diff --git a/.github/workflows.disabled/build-macos-m1-installer.yml b/.github/workflows.disabled/build-macos-m1-installer.yml index 5998c517f564..3af5038af5cd 100644 --- a/.github/workflows.disabled/build-macos-m1-installer.yml +++ b/.github/workflows.disabled/build-macos-m1-installer.yml @@ -1,9 +1,10 @@ -name: MacOS M1 installer on Python 3.9 +name: Build Installer - MacOS arm64 on: push: branches: - main + - 'release/**' tags: - '**' pull_request: @@ -17,7 +18,7 @@ concurrency: jobs: build: - name: MacOS M1 installer on Python 3.9 + name: MacOS arm64 installer runs-on: [m1] timeout-minutes: 40 strategy: @@ -102,7 +103,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - arch -arm64 sh install.sh + arch -arm64 sh install.sh -d - name: Install node 16.x run: | @@ -122,7 +123,7 @@ jobs: arch -arm64 sh build_macos_m1.sh - name: Upload MacOS artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chinilla-installers-macos-dmg-arm64 path: ${{ github.workspace }}/build_scripts/final_installer/ diff --git a/.github/workflows.disabled/build-test-macos-blockchain.yml b/.github/workflows.disabled/build-test-macos-blockchain.yml index 151c145213fb..2cefe0d5ad27 100644 --- a/.github/workflows.disabled/build-test-macos-blockchain.yml +++ b/.github/workflows.disabled/build-test-macos-blockchain.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-clvm.yml b/.github/workflows.disabled/build-test-macos-clvm.yml index 44eb57e3a04e..c11efb68d631 100644 --- a/.github/workflows.disabled/build-test-macos-clvm.yml +++ b/.github/workflows.disabled/build-test-macos-clvm.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-core-cmds.yml b/.github/workflows.disabled/build-test-macos-core-cmds.yml index dbf5bdcad6e2..8aed87dab7ea 100644 --- a/.github/workflows.disabled/build-test-macos-core-cmds.yml +++ b/.github/workflows.disabled/build-test-macos-core-cmds.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-core-consensus.yml b/.github/workflows.disabled/build-test-macos-core-consensus.yml index c7f5fe15eb2f..b50316eaa964 100644 --- a/.github/workflows.disabled/build-test-macos-core-consensus.yml +++ b/.github/workflows.disabled/build-test-macos-core-consensus.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-core-custom_types.yml b/.github/workflows.disabled/build-test-macos-core-custom_types.yml index c3620b7a4312..9f48d059bf34 100644 --- a/.github/workflows.disabled/build-test-macos-core-custom_types.yml +++ b/.github/workflows.disabled/build-test-macos-core-custom_types.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-core-daemon.yml b/.github/workflows.disabled/build-test-macos-core-daemon.yml index c23ad8af52e5..f916f0a33577 100644 --- a/.github/workflows.disabled/build-test-macos-core-daemon.yml +++ b/.github/workflows.disabled/build-test-macos-core-daemon.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -109,7 +109,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-core-full_node-full_sync.yml b/.github/workflows.disabled/build-test-macos-core-full_node-full_sync.yml index 447d0b18095e..ede33db78f60 100644 --- a/.github/workflows.disabled/build-test-macos-core-full_node-full_sync.yml +++ b/.github/workflows.disabled/build-test-macos-core-full_node-full_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-core-full_node-stores.yml b/.github/workflows.disabled/build-test-macos-core-full_node-stores.yml index b7e0b74faee1..601cd13ea82c 100644 --- a/.github/workflows.disabled/build-test-macos-core-full_node-stores.yml +++ b/.github/workflows.disabled/build-test-macos-core-full_node-stores.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-core-full_node.yml b/.github/workflows.disabled/build-test-macos-core-full_node.yml index 372cede22821..0e8f39f49762 100644 --- a/.github/workflows.disabled/build-test-macos-core-full_node.yml +++ b/.github/workflows.disabled/build-test-macos-core-full_node.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-core-server.yml b/.github/workflows.disabled/build-test-macos-core-server.yml index da59334d045d..d960442c9938 100644 --- a/.github/workflows.disabled/build-test-macos-core-server.yml +++ b/.github/workflows.disabled/build-test-macos-core-server.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-core-ssl.yml b/.github/workflows.disabled/build-test-macos-core-ssl.yml index 0cb5f2451741..8121dda464fc 100644 --- a/.github/workflows.disabled/build-test-macos-core-ssl.yml +++ b/.github/workflows.disabled/build-test-macos-core-ssl.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-core-util.yml b/.github/workflows.disabled/build-test-macos-core-util.yml index a008fcb39324..22727ad44b0b 100644 --- a/.github/workflows.disabled/build-test-macos-core-util.yml +++ b/.github/workflows.disabled/build-test-macos-core-util.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-core.yml b/.github/workflows.disabled/build-test-macos-core.yml index 36dd1b51e354..a71d6fc60052 100644 --- a/.github/workflows.disabled/build-test-macos-core.yml +++ b/.github/workflows.disabled/build-test-macos-core.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-farmer_harvester.yml b/.github/workflows.disabled/build-test-macos-farmer_harvester.yml index 97736966fcfa..c9383e9dd806 100644 --- a/.github/workflows.disabled/build-test-macos-farmer_harvester.yml +++ b/.github/workflows.disabled/build-test-macos-farmer_harvester.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-generator.yml b/.github/workflows.disabled/build-test-macos-generator.yml index 0fe9f0e5378d..f8febf203479 100644 --- a/.github/workflows.disabled/build-test-macos-generator.yml +++ b/.github/workflows.disabled/build-test-macos-generator.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-plot_sync.yml b/.github/workflows.disabled/build-test-macos-plot_sync.yml index 367d8a958ee5..57a52d60409a 100644 --- a/.github/workflows.disabled/build-test-macos-plot_sync.yml +++ b/.github/workflows.disabled/build-test-macos-plot_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-plotting.yml b/.github/workflows.disabled/build-test-macos-plotting.yml index 9944f96d63e1..72306aa2f9b6 100644 --- a/.github/workflows.disabled/build-test-macos-plotting.yml +++ b/.github/workflows.disabled/build-test-macos-plotting.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-pools.yml b/.github/workflows.disabled/build-test-macos-pools.yml index f9f53f256b49..84a5bf517cd8 100644 --- a/.github/workflows.disabled/build-test-macos-pools.yml +++ b/.github/workflows.disabled/build-test-macos-pools.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-simulation.yml b/.github/workflows.disabled/build-test-macos-simulation.yml index f13f10c1de1b..228ea926e4ba 100644 --- a/.github/workflows.disabled/build-test-macos-simulation.yml +++ b/.github/workflows.disabled/build-test-macos-simulation.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -109,7 +109,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-tools.yml b/.github/workflows.disabled/build-test-macos-tools.yml index 738a3a8054f9..7225abb18e97 100644 --- a/.github/workflows.disabled/build-test-macos-tools.yml +++ b/.github/workflows.disabled/build-test-macos-tools.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-util.yml b/.github/workflows.disabled/build-test-macos-util.yml index 46cfb44de6d2..b3f1132b87b3 100644 --- a/.github/workflows.disabled/build-test-macos-util.yml +++ b/.github/workflows.disabled/build-test-macos-util.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-wallet-cat_wallet.yml b/.github/workflows.disabled/build-test-macos-wallet-cat_wallet.yml index be39a8267f44..45a65402c436 100644 --- a/.github/workflows.disabled/build-test-macos-wallet-cat_wallet.yml +++ b/.github/workflows.disabled/build-test-macos-wallet-cat_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-wallet-did_wallet.yml b/.github/workflows.disabled/build-test-macos-wallet-did_wallet.yml index deac1f3f07b4..2fcd6b835627 100644 --- a/.github/workflows.disabled/build-test-macos-wallet-did_wallet.yml +++ b/.github/workflows.disabled/build-test-macos-wallet-did_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-wallet-rl_wallet.yml b/.github/workflows.disabled/build-test-macos-wallet-rl_wallet.yml index 13b62fac432c..f528b11ef01a 100644 --- a/.github/workflows.disabled/build-test-macos-wallet-rl_wallet.yml +++ b/.github/workflows.disabled/build-test-macos-wallet-rl_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-wallet-rpc.yml b/.github/workflows.disabled/build-test-macos-wallet-rpc.yml index d77fc608a909..de74de707097 100644 --- a/.github/workflows.disabled/build-test-macos-wallet-rpc.yml +++ b/.github/workflows.disabled/build-test-macos-wallet-rpc.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-wallet-simple_sync.yml b/.github/workflows.disabled/build-test-macos-wallet-simple_sync.yml index 597806bff273..1819148e59bc 100644 --- a/.github/workflows.disabled/build-test-macos-wallet-simple_sync.yml +++ b/.github/workflows.disabled/build-test-macos-wallet-simple_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-wallet-sync.yml b/.github/workflows.disabled/build-test-macos-wallet-sync.yml index 6c9509a548d6..e371319d3afe 100644 --- a/.github/workflows.disabled/build-test-macos-wallet-sync.yml +++ b/.github/workflows.disabled/build-test-macos-wallet-sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-wallet.yml b/.github/workflows.disabled/build-test-macos-wallet.yml index e730fced18a7..2617ff64c3b8 100644 --- a/.github/workflows.disabled/build-test-macos-wallet.yml +++ b/.github/workflows.disabled/build-test-macos-wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-macos-weight_proof.yml b/.github/workflows.disabled/build-test-macos-weight_proof.yml index c67311808d03..45a83f68ad5a 100644 --- a/.github/workflows.disabled/build-test-macos-weight_proof.yml +++ b/.github/workflows.disabled/build-test-macos-weight_proof.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-blockchain.yml b/.github/workflows.disabled/build-test-ubuntu-blockchain.yml index 7cef39a77802..1fbe71657b2e 100644 --- a/.github/workflows.disabled/build-test-ubuntu-blockchain.yml +++ b/.github/workflows.disabled/build-test-ubuntu-blockchain.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-clvm.yml b/.github/workflows.disabled/build-test-ubuntu-clvm.yml index cdd1d4bfc1e9..4a34e49441b2 100644 --- a/.github/workflows.disabled/build-test-ubuntu-clvm.yml +++ b/.github/workflows.disabled/build-test-ubuntu-clvm.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-core-cmds.yml b/.github/workflows.disabled/build-test-ubuntu-core-cmds.yml index 36c9ddee442e..0a23d7ea1020 100644 --- a/.github/workflows.disabled/build-test-ubuntu-core-cmds.yml +++ b/.github/workflows.disabled/build-test-ubuntu-core-cmds.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-core-consensus.yml b/.github/workflows.disabled/build-test-ubuntu-core-consensus.yml index d8c7b0b35c55..a6a20155661d 100644 --- a/.github/workflows.disabled/build-test-ubuntu-core-consensus.yml +++ b/.github/workflows.disabled/build-test-ubuntu-core-consensus.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-core-custom_types.yml b/.github/workflows.disabled/build-test-ubuntu-core-custom_types.yml index 051cbd3017a6..356a3daca3b7 100644 --- a/.github/workflows.disabled/build-test-ubuntu-core-custom_types.yml +++ b/.github/workflows.disabled/build-test-ubuntu-core-custom_types.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-core-daemon.yml b/.github/workflows.disabled/build-test-ubuntu-core-daemon.yml index 452fdfafb7f4..13feee58742c 100644 --- a/.github/workflows.disabled/build-test-ubuntu-core-daemon.yml +++ b/.github/workflows.disabled/build-test-ubuntu-core-daemon.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -108,7 +108,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-core-full_node-full_sync.yml b/.github/workflows.disabled/build-test-ubuntu-core-full_node-full_sync.yml index e941dcd28ba5..087b985f6fba 100644 --- a/.github/workflows.disabled/build-test-ubuntu-core-full_node-full_sync.yml +++ b/.github/workflows.disabled/build-test-ubuntu-core-full_node-full_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-core-full_node-stores.yml b/.github/workflows.disabled/build-test-ubuntu-core-full_node-stores.yml index fc604fb2314b..fefb52b0d441 100644 --- a/.github/workflows.disabled/build-test-ubuntu-core-full_node-stores.yml +++ b/.github/workflows.disabled/build-test-ubuntu-core-full_node-stores.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-core-full_node.yml b/.github/workflows.disabled/build-test-ubuntu-core-full_node.yml index 2e5c3b76ee3f..5abd7d4d1d8c 100644 --- a/.github/workflows.disabled/build-test-ubuntu-core-full_node.yml +++ b/.github/workflows.disabled/build-test-ubuntu-core-full_node.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-core-server.yml b/.github/workflows.disabled/build-test-ubuntu-core-server.yml index 050dec16ef60..f02d7f2ec233 100644 --- a/.github/workflows.disabled/build-test-ubuntu-core-server.yml +++ b/.github/workflows.disabled/build-test-ubuntu-core-server.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-core-ssl.yml b/.github/workflows.disabled/build-test-ubuntu-core-ssl.yml index 2cfe9eb9b262..c277b7bf3399 100644 --- a/.github/workflows.disabled/build-test-ubuntu-core-ssl.yml +++ b/.github/workflows.disabled/build-test-ubuntu-core-ssl.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-core-util.yml b/.github/workflows.disabled/build-test-ubuntu-core-util.yml index d15fc1cc62ae..c4333a5985f4 100644 --- a/.github/workflows.disabled/build-test-ubuntu-core-util.yml +++ b/.github/workflows.disabled/build-test-ubuntu-core-util.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-core.yml b/.github/workflows.disabled/build-test-ubuntu-core.yml index 82df6bc832d6..11073de36a65 100644 --- a/.github/workflows.disabled/build-test-ubuntu-core.yml +++ b/.github/workflows.disabled/build-test-ubuntu-core.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-farmer_harvester.yml b/.github/workflows.disabled/build-test-ubuntu-farmer_harvester.yml index 857835bcbc8b..cae7969862e8 100644 --- a/.github/workflows.disabled/build-test-ubuntu-farmer_harvester.yml +++ b/.github/workflows.disabled/build-test-ubuntu-farmer_harvester.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-generator.yml b/.github/workflows.disabled/build-test-ubuntu-generator.yml index 5cdad0e56711..269d1ffbc9b4 100644 --- a/.github/workflows.disabled/build-test-ubuntu-generator.yml +++ b/.github/workflows.disabled/build-test-ubuntu-generator.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-plot_sync.yml b/.github/workflows.disabled/build-test-ubuntu-plot_sync.yml index b33a3b1e0624..e22e9e0489f4 100644 --- a/.github/workflows.disabled/build-test-ubuntu-plot_sync.yml +++ b/.github/workflows.disabled/build-test-ubuntu-plot_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-plotting.yml b/.github/workflows.disabled/build-test-ubuntu-plotting.yml index ec965f051b6e..abecc96f54aa 100644 --- a/.github/workflows.disabled/build-test-ubuntu-plotting.yml +++ b/.github/workflows.disabled/build-test-ubuntu-plotting.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-pools.yml b/.github/workflows.disabled/build-test-ubuntu-pools.yml index 234c7263feb9..03c6877a4a49 100644 --- a/.github/workflows.disabled/build-test-ubuntu-pools.yml +++ b/.github/workflows.disabled/build-test-ubuntu-pools.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-simulation.yml b/.github/workflows.disabled/build-test-ubuntu-simulation.yml index 64a6bf1b362c..30ba4341e7fd 100644 --- a/.github/workflows.disabled/build-test-ubuntu-simulation.yml +++ b/.github/workflows.disabled/build-test-ubuntu-simulation.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -108,7 +108,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-tools.yml b/.github/workflows.disabled/build-test-ubuntu-tools.yml index 77ac93f7ad61..68ed28d1ca18 100644 --- a/.github/workflows.disabled/build-test-ubuntu-tools.yml +++ b/.github/workflows.disabled/build-test-ubuntu-tools.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-util.yml b/.github/workflows.disabled/build-test-ubuntu-util.yml index cedf9c54ab3d..048e75b62abc 100644 --- a/.github/workflows.disabled/build-test-ubuntu-util.yml +++ b/.github/workflows.disabled/build-test-ubuntu-util.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-wallet-cat_wallet.yml b/.github/workflows.disabled/build-test-ubuntu-wallet-cat_wallet.yml index 59efe7924803..eb4d0e83929a 100644 --- a/.github/workflows.disabled/build-test-ubuntu-wallet-cat_wallet.yml +++ b/.github/workflows.disabled/build-test-ubuntu-wallet-cat_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-wallet-did_wallet.yml b/.github/workflows.disabled/build-test-ubuntu-wallet-did_wallet.yml index 8ebc2f9255c8..2961995f6b42 100644 --- a/.github/workflows.disabled/build-test-ubuntu-wallet-did_wallet.yml +++ b/.github/workflows.disabled/build-test-ubuntu-wallet-did_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-wallet-rl_wallet.yml b/.github/workflows.disabled/build-test-ubuntu-wallet-rl_wallet.yml index 731c46646b40..e07a3fafb51e 100644 --- a/.github/workflows.disabled/build-test-ubuntu-wallet-rl_wallet.yml +++ b/.github/workflows.disabled/build-test-ubuntu-wallet-rl_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-wallet-rpc.yml b/.github/workflows.disabled/build-test-ubuntu-wallet-rpc.yml index 29f53e1206f4..d9de5860869d 100644 --- a/.github/workflows.disabled/build-test-ubuntu-wallet-rpc.yml +++ b/.github/workflows.disabled/build-test-ubuntu-wallet-rpc.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-wallet-simple_sync.yml b/.github/workflows.disabled/build-test-ubuntu-wallet-simple_sync.yml index 09aed3cef84d..6142a8446302 100644 --- a/.github/workflows.disabled/build-test-ubuntu-wallet-simple_sync.yml +++ b/.github/workflows.disabled/build-test-ubuntu-wallet-simple_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-wallet-sync.yml b/.github/workflows.disabled/build-test-ubuntu-wallet-sync.yml index 51d38874213e..f0862d9459f2 100644 --- a/.github/workflows.disabled/build-test-ubuntu-wallet-sync.yml +++ b/.github/workflows.disabled/build-test-ubuntu-wallet-sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-wallet.yml b/.github/workflows.disabled/build-test-ubuntu-wallet.yml index e4db61536b74..996a55c61cbc 100644 --- a/.github/workflows.disabled/build-test-ubuntu-wallet.yml +++ b/.github/workflows.disabled/build-test-ubuntu-wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows.disabled/build-test-ubuntu-weight_proof.yml b/.github/workflows.disabled/build-test-ubuntu-weight_proof.yml index 52147cb742dc..f6a3284aa627 100644 --- a/.github/workflows.disabled/build-test-ubuntu-weight_proof.yml +++ b/.github/workflows.disabled/build-test-ubuntu-weight_proof.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-linux-arm64-installer.yml b/.github/workflows/build-linux-arm64-installer.yml index 8f42a9d879f4..8ca40bc7da9d 100644 --- a/.github/workflows/build-linux-arm64-installer.yml +++ b/.github/workflows/build-linux-arm64-installer.yml @@ -1,4 +1,4 @@ -name: Installer - Linux arm64 (Python 3.8) +name: Build Installer - Linux DEB ARM64 on: workflow_dispatch: @@ -6,6 +6,7 @@ on: branches: - main - 'release/**' + - dev tags: - '**' pull_request: @@ -32,6 +33,9 @@ jobs: steps: - uses: Chinilla/actions/clean-workspace@main + - name: Add safe git directory + uses: Chia-Network/actions/git-mark-workspace-safe@main + - name: Add safe git directory uses: Chia-Network/actions/git-mark-workspace-safe@main @@ -124,6 +128,8 @@ jobs: sh build_linux_deb.sh arm64 mv final_installer/chinilla-blockchain_${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}_arm64.deb final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb cp final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb final_installer/chinilla-blockchain-beta-latest-arm64.deb + mv final_installer/chinilla-blockchain-cli_${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-1_arm64.deb final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb + cp final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb final_installer/chinilla-blockchain-cli-beta-latest-arm64.deb - name: Upload Linux artifacts uses: actions/upload-artifact@v3 @@ -143,6 +149,18 @@ jobs: out_dir: dev cdn_domain: download.chinilla.com + - name: Upload CLI to DO Spaces + if: steps.check_secrets.outputs.HAS_SECRET + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb + out_dir: dev + cdn_domain: download.chinilla.com + - name: Create Beta Checksum if: github.ref == 'refs/heads/main' env: @@ -150,6 +168,7 @@ jobs: run: | ls $GITHUB_WORKSPACE/build_scripts/final_installer/ sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-beta-latest-arm64.deb > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-beta-latest-arm64.deb.sha256 + sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-arm64.deb > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-arm64.deb.sha256 ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - name: Create Release Checksum @@ -159,19 +178,7 @@ jobs: run: | ls $GITHUB_WORKSPACE/build_scripts/final_installer/ sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-arm64.deb > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-arm64.deb.sha256 - ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - - - name: Install py3createtorrent - if: startsWith(github.ref, 'refs/tags/') - run: | - pip3 install py3createtorrent - - - name: Create torrent - if: startsWith(github.ref, 'refs/tags/') - env: - CHINILLA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }} - run: | - py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-arm64.deb -o $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-arm64.deb.torrent --webseed https://download.chinilla.com/install/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-arm64.deb + sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-arm64.deb > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-arm64.deb.sha256 ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - name: Upload Beta Installer @@ -198,6 +205,30 @@ jobs: out_dir: beta cdn_domain: download.chinilla.com + - name: Upload Beta CLI + if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-arm64.deb + out_dir: beta + cdn_domain: download.chinilla.com + + - name: Upload Beta CLI Sha256 + if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-arm64.deb.sha256 + out_dir: beta + cdn_domain: download.chinilla.com + - name: Upload Release Installer if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') uses: BetaHuhn/do-spaces-action@v2 @@ -222,7 +253,7 @@ jobs: out_dir: install cdn_domain: download.chinilla.com - - name: Upload Release Installer Torrent + - name: Upload Release CLI if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') uses: BetaHuhn/do-spaces-action@v2 with: @@ -230,8 +261,21 @@ jobs: secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} - source: /build_scripts/final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb.torrent - out_dir: torrents + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb + out_dir: install + cdn_domain: download.chinilla.com + + - name: Upload Release CLI SHA256 + if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb.sha256 + out_dir: install + cdn_domain: download.chinilla.com - name: Get tag name if: startsWith(github.ref, 'refs/tags/') diff --git a/.github/workflows/build-linux-installer-deb.yml b/.github/workflows/build-linux-installer-deb.yml index bfd57bfdb1bc..66c85a253c0e 100644 --- a/.github/workflows/build-linux-installer-deb.yml +++ b/.github/workflows/build-linux-installer-deb.yml @@ -1,10 +1,12 @@ -name: Installer - Linux .deb (Python 3.8) +name: Build Installer - Linux DEB AMD64 on: workflow_dispatch: push: branches: - main + - dev + - 'release/**' tags: - '**' pull_request: @@ -18,7 +20,7 @@ concurrency: jobs: build: - name: Installer - Linux .deb (Python 3.8) + name: Linux amd64 DEB Installer runs-on: ${{ matrix.os }} timeout-minutes: 40 strategy: @@ -140,7 +142,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - sh install.sh + sh install.sh -d - name: Setup Node 16.x uses: actions/setup-node@v3 @@ -161,10 +163,12 @@ jobs: cd ./build_scripts sh build_linux_deb.sh amd64 mv final_installer/chinilla-blockchain_${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}_amd64.deb final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb + mv final_installer/chinilla-blockchain-cli_${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-1_amd64.deb final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb cp final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb final_installer/chinilla-blockchain-beta-latest-amd64.deb + cp final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb final_installer/chinilla-blockchain-cli-beta-latest-amd64.deb - name: Upload Linux artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chinilla-installers-linux-deb-intel path: ${{ github.workspace }}/build_scripts/final_installer/ @@ -181,6 +185,18 @@ jobs: out_dir: dev cdn_domain: download.chinilla.com + - name: Upload CLI to DO Spaces + if: steps.check_secrets.outputs.HAS_SECRET + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb + out_dir: dev + cdn_domain: download.chinilla.com + - name: Create Beta Checksum if: github.ref == 'refs/heads/main' env: @@ -188,6 +204,7 @@ jobs: run: | ls ${{ github.workspace }}/build_scripts/final_installer/ sha256sum ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-beta-latest-amd64.deb > ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-beta-latest-amd64.deb.sha256 + sha256sum ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-amd64.deb > ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-amd64.deb.sha256 ls ${{ github.workspace }}/build_scripts/final_installer/ - name: Create Release Checksum @@ -197,20 +214,9 @@ jobs: run: | ls ${{ github.workspace }}/build_scripts/final_installer/ sha256sum ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb > ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb.sha256 + sha256sum ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb > ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb.sha256 ls ${{ github.workspace }}/build_scripts/final_installer/ - - name: Install py3createtorrent - if: startsWith(github.ref, 'refs/tags/') - run: | - pip3 install py3createtorrent - - name: Create .deb torrent - env: - CHINILLA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }} - if: startsWith(github.ref, 'refs/tags/') - run: | - py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb -o ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb.torrent --webseed https://download.chinilla.com/install/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-amd64.deb - ls - - name: Upload Beta Installer if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' uses: BetaHuhn/do-spaces-action@v2 @@ -235,6 +241,30 @@ jobs: out_dir: beta cdn_domain: download.chinilla.com + - name: Upload Beta CLI + if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-amd64.deb + out_dir: beta + cdn_domain: download.chinilla.com + + - name: Upload Beta CLI Sha256 + if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-amd64.deb.sha256 + out_dir: beta + cdn_domain: download.chinilla.com + - name: Upload Release Installer if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') uses: BetaHuhn/do-spaces-action@v2 @@ -259,7 +289,7 @@ jobs: out_dir: install cdn_domain: download.chinilla.com - - name: Upload Release Installer Torrent + - name: Upload Release CLI if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') uses: BetaHuhn/do-spaces-action@v2 with: @@ -267,8 +297,21 @@ jobs: secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} - source: /build_scripts/final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb.torrent - out_dir: torrents + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb + out_dir: install + cdn_domain: download.chinilla.com + + - name: Upload Release CLI SHA256 + if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb.sha256 + out_dir: install + cdn_domain: download.chinilla.com - name: Get tag name if: startsWith(github.ref, 'refs/tags/') diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index a2e8b84f4c1a..b11baccf59a3 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -1,10 +1,12 @@ -name: Installer - Linux .rpm (Python 3.9) +name: Build Installer - Linux RPM AMD64 on: workflow_dispatch: push: branches: - main + - dev + - 'release/**' tags: - '**' pull_request: @@ -18,7 +20,7 @@ concurrency: jobs: build: - name: Installer - Linux .rpm (Python 3.9) + name: Linux amd64 RPM Installer runs-on: ubuntu-latest container: image: chianetwork/centos7-builder:latest @@ -109,7 +111,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - sh install.sh + sh install.sh -d - name: Build .rpm package env: @@ -119,12 +121,14 @@ jobs: git -C ./chinilla-blockchain-gui status . ./activate cd ./build_scripts - sh build_linux_rpm.sh amd64 - cp final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-1.x86_64.rpm final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-x86-64.rpm - cp final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-x86-64.rpm final_installer/chinilla-blockchain-beta-latest-x86-64.rpm + bash build_linux_rpm.sh amd64 + cp final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-1.x86_64.rpm final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-x86-64.rpm + cp final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-x86-64.rpm final_installer/chinilla-blockchain-beta-latest-x86-64.rpm + cp final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-1.x86_64.rpm final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-x86-64.rpm + cp final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-1.x86_64.rpm final_installer/chinilla-blockchain-cli-beta-latest-x86-64.rpm - name: Upload Linux artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chinilla-installers-linux-rpm-intel path: ${{ github.workspace }}/build_scripts/final_installer/ @@ -141,6 +145,18 @@ jobs: out_dir: dev cdn_domain: download.chinilla.com + - name: Upload CLI to DO Spaces + if: steps.check_secrets.outputs.HAS_SECRET + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-x86-64.rpm + out_dir: dev + cdn_domain: download.chinilla.com + - name: Create Beta Checksum if: github.ref == 'refs/heads/main' env: @@ -148,6 +164,7 @@ jobs: run: | ls $GITHUB_WORKSPACE/build_scripts/final_installer/ sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-beta-latest-x86-64.rpm > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-beta-latest-x86-64.rpm.sha256 + sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-x86-64.rpm > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-x86-64.rpm.sha256 ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - name: Create Release Checksum @@ -157,21 +174,9 @@ jobs: run: | ls $GITHUB_WORKSPACE/build_scripts/final_installer/ sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-x86-64.rpm > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-x86-64.rpm.sha256 + sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-x86-64.rpm > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-x86-64.rpm.sha256 ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - - name: Install py3createtorrent - if: startsWith(github.ref, 'refs/tags/') - run: | - pip3 install py3createtorrent - - - name: Create .rpm torrent - if: startsWith(github.ref, 'refs/tags/') - env: - CHINILLA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }} - run: | - py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-x86-64.rpm -o $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-x86-64.rpm.torrent --webseed https://download.chinilla.com/install/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-x86-64.rpm - ls - - name: Upload Beta Installer if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' uses: BetaHuhn/do-spaces-action@v2 @@ -196,6 +201,30 @@ jobs: out_dir: beta cdn_domain: download.chinilla.com + - name: Upload Beta CLI + if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-x86-64.rpm + out_dir: beta + cdn_domain: download.chinilla.com + + - name: Upload Beta CLI Sha256 + if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-x86-64.rpm.sha256 + out_dir: beta + cdn_domain: download.chinilla.com + - name: Upload Release Installer if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') uses: BetaHuhn/do-spaces-action@v2 @@ -220,7 +249,7 @@ jobs: out_dir: install cdn_domain: download.chinilla.com - - name: Upload Release Installer Torrent + - name: Upload Release CLI if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') uses: BetaHuhn/do-spaces-action@v2 with: @@ -228,8 +257,21 @@ jobs: secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} - source: /build_scripts/final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-x86-64.rpm.torrent - out_dir: torrents + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-x86-64.rpm + out_dir: install + cdn_domain: download.chinilla.com + + - name: Upload Release CLI SHA256 + if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-x86-64.rpm.sha256 + out_dir: install + cdn_domain: download.chinilla.com - name: Get tag name if: startsWith(github.ref, 'refs/tags/') diff --git a/.github/workflows/build-macos-installer.yml b/.github/workflows/build-macos-installer.yml index 695e3bccf549..0b8d81f88c1d 100644 --- a/.github/workflows/build-macos-installer.yml +++ b/.github/workflows/build-macos-installer.yml @@ -1,10 +1,12 @@ -name: Installer - MacOS Intel (Python 3.9) +name: Build Installer - MacOS Intel on: workflow_dispatch: push: branches: - main + - dev + - 'release/**' tags: - '**' pull_request: @@ -18,7 +20,7 @@ concurrency: jobs: build: - name: Installer - MacOS Intel (Python 3.9) + name: MacOS Intel Installer runs-on: ${{ matrix.os }} timeout-minutes: 40 strategy: @@ -26,7 +28,7 @@ jobs: max-parallel: 4 matrix: python-version: [3.9] - os: [macOS-latest] + os: [macos-10.15] steps: - name: Checkout Code @@ -129,7 +131,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - sh install.sh + sh install.sh -d - name: Setup Node 16.x uses: actions/setup-node@v3 @@ -150,7 +152,7 @@ jobs: cp final_installer/Chinilla-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}.dmg final_installer/Chinilla-beta-latest.dmg - name: Upload MacOS artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chinilla-installers-macos-dmg-intel path: ${{ github.workspace }}/build_scripts/final_installer/ diff --git a/.github/workflows/build-windows-installer.yml b/.github/workflows/build-windows-installer.yml index abc7735e71f8..4e085dd7e6f5 100644 --- a/.github/workflows/build-windows-installer.yml +++ b/.github/workflows/build-windows-installer.yml @@ -1,10 +1,12 @@ -name: Installer - Windows 10 (Python 3.9) +name: Build Installer - Windows 10 on: workflow_dispatch: push: branches: - main + - dev + - 'release/**' tags: - '**' pull_request: @@ -18,9 +20,9 @@ concurrency: jobs: build: - name: Installer - Windows 10 (Python 3.9) + name: Windows 10 Installer runs-on: [windows-2019] - timeout-minutes: 40 + timeout-minutes: 50 steps: - name: Checkout Code @@ -63,7 +65,7 @@ jobs: - uses: actions/setup-python@v2 name: Install Python 3.9 with: - python-version: "3.9.11" + python-version: "3.9" - name: Setup Node 16.x uses: actions/setup-node@v3 @@ -156,15 +158,14 @@ jobs: .\build_scripts\build_windows.ps1 Copy-Item "${{ github.workspace }}\chinilla-blockchain-gui\release-builds\windows-installer\ChinillaSetup-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}.exe" -Destination "${{ github.workspace }}\chinilla-blockchain-gui\release-builds\windows-installer\ChinillaSetup-beta-latest.exe" - - name: Upload Windows exe's to artifacts - uses: actions/upload-artifact@v2.2.2 + uses: actions/upload-artifact@v3 with: name: chinilla-installers-windows-exe-intel path: ${{ github.workspace }}\chinilla-blockchain-gui\Chinilla-win32-x64\ - name: Upload Installer to artifacts - uses: actions/upload-artifact@v2.2.2 + uses: actions/upload-artifact@v3 with: name: Windows-Installers path: ${{ github.workspace }}\chinilla-blockchain-gui\release-builds\ @@ -196,17 +197,6 @@ jobs: certutil.exe -hashfile ${{ github.workspace }}\chinilla-blockchain-gui\release-builds\windows-installer\ChinillaSetup-beta-latest.exe SHA256 > ${{ github.workspace }}\chinilla-blockchain-gui\release-builds\windows-installer\ChinillaSetup-beta-latest.exe.sha256 ls ${{ github.workspace }}\chinilla-blockchain-gui\release-builds\windows-installer\ - - name: Install py3createtorrent - if: startsWith(github.ref, 'refs/tags/') - run: | - pip3 install py3createtorrent - - - name: Create torrent - if: startsWith(github.ref, 'refs/tags/') - run: | - py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}\chinilla-blockchain-gui\release-builds\windows-installer\ChinillaSetup-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}.exe -o ${{ github.workspace }}\chinilla-blockchain-gui\release-builds\windows-installer\ChinillaSetup-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}.exe.torrent --webseed https://download.chinilla.com/install/ChinillaSetup-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}.exe - ls - - name: Upload Beta Installer if: steps.check_secrets.outputs.HAS_DO_SECRET && github.ref == 'refs/heads/main' uses: BetaHuhn/do-spaces-action@v2 @@ -255,17 +245,6 @@ jobs: out_dir: install cdn_domain: download.chinilla.com - - name: Upload Release Installer Torrent - if: steps.check_secrets.outputs.HAS_DO_SECRET && startsWith(github.ref, 'refs/tags/') - uses: BetaHuhn/do-spaces-action@v2 - with: - access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} - secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} - space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} - space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} - source: \chinilla-blockchain-gui\release-builds\windows-installer\ChinillaSetup-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}.exe.torrent - out_dir: torrents - - name: Get tag name if: startsWith(github.ref, 'refs/tags/') id: tag-name diff --git a/.github/workflows/check_wheel_availability.yaml b/.github/workflows/check_wheel_availability.yaml new file mode 100644 index 000000000000..599ee1f56d36 --- /dev/null +++ b/.github/workflows/check_wheel_availability.yaml @@ -0,0 +1,68 @@ +name: Check Dependency Artifacts + +on: + push: + branches: + - main + - dev + tags: + - '**' + pull_request: + branches: + - '**' + +concurrency: + # SHA is added to the end if on `main` to let all main workflows run + group: ${{ github.ref }}-${{ github.workflow }}-${{ github.event_name }}-${{ github.ref == 'refs/heads/main' && github.sha || '' }} + cancel-in-progress: true + +jobs: + check_dependency_artifacts: + name: ${{ matrix.os.name }} ${{ matrix.arch.name }} ${{ matrix.python-version }} + runs-on: ${{ matrix.os.runs-on[matrix.arch.matrix] }} + strategy: + fail-fast: false + matrix: + os: + - name: Linux + matrix: linux + runs-on: + intel: ubuntu-latest + - name: macOS + matrix: macos + runs-on: + intel: macos-latest + - name: Windows + matrix: windows + runs-on: + intel: windows-latest + arch: + - name: Intel + matrix: intel + python-version: ['3.7', '3.8', '3.9', '3.10'] + exclude: + - os: + matrix: macos + python-version: '3.7' + - os: + matrix: macos + arch: + matrix: arm + python-version: '3.8' + - os: + matrix: windows + arch: + matrix: arm + + steps: + - uses: Chia-Network/actions/clean-workspace@main + + - name: Checkout Code + uses: actions/checkout@v3 + + - uses: Chia-Network/actions/setup-python@main + with: + python-version: ${{ matrix.python-version }} + + - name: Check Wheel Availability + run: python build_scripts/check_dependency_artifacts.py diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 7ccc0f79b55d..579167f9cbc4 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -3,7 +3,10 @@ name: pre-commit on: pull_request: push: - branches: [main] + branches: + - main + - latest + - dev concurrency: # SHA is added to the end if on `main` to let all main workflows run diff --git a/.github/workflows/super-linter.yml b/.github/workflows/super-linter.yml index 2fa6bf3863a8..3366cfec01f2 100644 --- a/.github/workflows/super-linter.yml +++ b/.github/workflows/super-linter.yml @@ -19,6 +19,7 @@ on: push: branches: - main + - dev tags: - '**' pull_request: diff --git a/.github/workflows/test-install-scripts.yml b/.github/workflows/test-install-scripts.yml index 5ad4f1fdc965..a5ecb0406873 100644 --- a/.github/workflows/test-install-scripts.yml +++ b/.github/workflows/test-install-scripts.yml @@ -4,6 +4,7 @@ on: push: branches: - main + - dev tags: - '**' pull_request: @@ -80,7 +81,7 @@ jobs: url: "docker://debian:bullseye" - name: debian:bookworm type: debian - # https://packages.debian.org/bookworm/python/python3 (3.9) + # https://packages.debian.org/bookworm/python/python3 (3.10) url: "docker://debian:bookworm" - name: fedora:33 type: fedora @@ -90,10 +91,10 @@ jobs: type: fedora # (34, 3.9) https://packages.fedoraproject.org/search?query=python3&releases=Fedora+34&start=0 url: "docker://fedora:34" -# - name: fedora:35 -# type: fedora -# # (35, 3.10) https://packages.fedoraproject.org/search?query=python3&releases=Fedora+35&start=0 -# url: "docker://fedora:35" + - name: fedora:35 + type: fedora + # (35, 3.10) https://packages.fedoraproject.org/search?query=python3&releases=Fedora+35&start=0 + url: "docker://fedora:35" - name: rockylinux:8 type: rocky url: "docker://rockylinux:8" @@ -113,6 +114,45 @@ jobs: type: ubuntu # https://packages.ubuntu.com/impish/python3 (21.10, 3.9) url: "docker://ubuntu:impish" + - name: ubuntu:jammy (22.04) + type: ubuntu + # https://packages.ubuntu.com/jammy/python3 (22.04, 3.10) + url: "docker://ubuntu:jammy" + - name: linuxmintd/mint19.1-amd64 (Tessa) + type: mint + # 3.6 default with an option for 3.7 + url: "docker://linuxmintd/mint19.1-amd64" + - name: linuxmintd/mint19.2-amd64 (Tina) + type: mint + # 3.6 default with an option for 3.7 + url: "docker://linuxmintd/mint19.2-amd64" + - name: linuxmintd/mint19.3-amd64 (Tricia) + type: mint + # 3.6 default with an option for 3.7 + url: "docker://linuxmintd/mint19.3-amd64" + - name: linuxmintd/mint20-amd64 (Ulyana) + type: mint + # 3.8 + url: "docker://linuxmintd/mint20-amd64" + - name: linuxmintd/mint20.1-amd64 (Ulyssa) + type: mint + # 3.8 + url: "docker://linuxmintd/mint20.1-amd64" + - name: linuxmintd/mint20.2-amd64 (Uma) + type: mint + # 3.8 + url: "docker://linuxmintd/mint20.2-amd64" + - name: linuxmintd/mint20.3-amd64 (Una) + type: mint + # 3.8 + url: "docker://linuxmintd/mint20.3-amd64" +# The Linux Mint 21 docker image reports as 20.3 but has different Python. +# Uncomment after adapting to handle this or upstream fixing it. +# Also, Linux Mint 21 is not released as of this change. +# - name: linuxmintd/mint21-amd64 +# type: linuxmint +# # 3.10 default with an option for 3.9 +# url: "docker://linuxmintd/mint21-amd64" steps: - name: Prepare Amazon Linux @@ -178,6 +218,25 @@ jobs: apt-get --yes update apt-get install --yes git lsb-release sudo + - name: Prepare Linux Mint + if: ${{ matrix.distribution.type == 'mint' }} + env: + DEBIAN_FRONTEND: noninteractive + run: | + # for 19.* + apt-get --yes update + # for 19.3 to avoid + # Setting up software-properties-common (2.0.0.2) ... + # Traceback (most recent call last): + # File "/usr/lib/linuxmint/mintSources/mintSources.py", line 11, in + # import requests + # ModuleNotFoundError: No module named 'requests' + apt-get install --yes python3-requests + apt-get install --yes software-properties-common + add-apt-repository --yes ppa:git-core/ppa + apt-get --yes update + apt-get install --yes git lsb-release sudo + - name: Add safe git directory run: git config --global --add safe.directory $GITHUB_WORKSPACE diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index bc764ab2d3bf..57fd36b9c01a 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -5,6 +5,7 @@ on: push: branches: - main + - dev tags: - '**' pull_request: @@ -66,7 +67,7 @@ jobs: python -m build --sdist --outdir dist . - name: Upload artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: dist path: ./dist diff --git a/.isort.cfg b/.isort.cfg index 94f47b0355e2..7f9237c361c9 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -109,7 +109,6 @@ extend_skip= chinilla/util/profiler.py chinilla/util/service_groups.py chinilla/util/ssl_check.py - chinilla/util/streamable.py chinilla/util/ws_message.py chinilla/wallet/cat_wallet/cat_info.py chinilla/wallet/cat_wallet/cat_utils.py @@ -191,7 +190,6 @@ extend_skip= tests/core/util/test_files.py tests/core/util/test_keychain.py tests/core/util/test_keyring_wrapper.py - tests/core/util/test_streamable.py tests/generator/test_compression.py tests/generator/test_generator_types.py tests/generator/test_list_to_batches.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 59ebd1d6aa6c..a1ec8f2307c3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,31 @@ All notable changes to this project will be documented in this file. +## 1.0.5 Chia blockchain 2022-5-11 + +### Notes + +This release aligns with Chia 1.3.5 + +### Added + +- Added Support for Python 3.10 +- Performance improvements in harvesters during plot refresh. Large farmers likely no longer need to specify a very high plot refresh interval in config.yaml +- Added CLI only `.rpm` and `.deb` packages to official release channels +- Fixed an issue where some coins would be missing after a full sync +- Enabled paginated plot loading and improved plot state reporting +- Updated the farming GUI tab to fix several bugs +- Fix infinite loop with timelord closing +- Simplified install.sh ubuntu version tracking +- Fixed memory leak on the farm page +- Fixed list of plot files "in progress" +- Various farmer rpc improvements +- Improvements to the harvester `get_plots` RPC + +### Known Issues + +There is a known issue where harvesters will not reconnect to the farmer automatically unless you restart the harvester. This bug was introduced in 1.3.4 and we plan to patch it in a coming release. + ## 1.0.4 Chinilla Blockchain 2022-05-02 ### Added diff --git a/Install.ps1 b/Install.ps1 index 4bf830dd98da..55f3f89ad7f0 100644 --- a/Install.ps1 +++ b/Install.ps1 @@ -43,7 +43,7 @@ if ($null -eq (Get-Command py -ErrorAction SilentlyContinue)) Exit 1 } -$supportedPythonVersions = "3.9", "3.8", "3.7" +$supportedPythonVersions = "3.10", "3.9", "3.8", "3.7" if (Test-Path env:INSTALL_PYTHON_VERSION) { $pythonVersion = $env:INSTALL_PYTHON_VERSION diff --git a/build_scripts/assets/deb/postinst.sh b/build_scripts/assets/deb/postinst.sh new file mode 100644 index 000000000000..c39e00e6308c --- /dev/null +++ b/build_scripts/assets/deb/postinst.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +# Post install script for the UI .deb to place symlinks in places to allow the CLI to work similarly in both versions + +set -e + +ln -s /usr/lib/chinilla-blockchain/resources/app.asar.unpacked/daemon/chinilla /usr/bin/chinilla || true +ln -s /usr/lib/chinilla-blockchain/resources/app.asar.unpacked/daemon /opt/chinilla || true diff --git a/build_scripts/assets/deb/prerm.sh b/build_scripts/assets/deb/prerm.sh new file mode 100644 index 000000000000..9e34e2602897 --- /dev/null +++ b/build_scripts/assets/deb/prerm.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +# Pre remove script for the UI .deb to clean up the symlinks from the installer + +set -e + +unlink /usr/bin/chia || true +unlink /opt/chia || true diff --git a/build_scripts/build_linux_deb.sh b/build_scripts/build_linux_deb.sh index c8c7f40ddf6d..9e096a315174 100644 --- a/build_scripts/build_linux_deb.sh +++ b/build_scripts/build_linux_deb.sh @@ -37,7 +37,6 @@ rm -rf dist mkdir dist echo "Create executables with pyinstaller" -pip install pyinstaller==4.9 SPEC_FILE=$(python -c 'import chinilla; print(chinilla.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" LAST_EXIT_CODE=$? @@ -101,8 +100,10 @@ cd ../../../build_scripts || exit echo "Create chinilla-$CHINILLA_INSTALLER_VERSION.deb" rm -rf final_installer mkdir final_installer -electron-installer-debian --src dist/$DIR_NAME/ --dest final_installer/ \ ---arch "$PLATFORM" --options.version $CHINILLA_INSTALLER_VERSION --options.bin chinilla-blockchain --options.name chinilla-blockchain +electron-installer-debian --src "dist/$DIR_NAME/" \ + --arch "$PLATFORM" \ + --options.version "$CHINILLA_INSTALLER_VERSION" \ + --config deb-options.json LAST_EXIT_CODE=$? if [ "$LAST_EXIT_CODE" -ne 0 ]; then echo >&2 "electron-installer-debian failed!" diff --git a/build_scripts/build_linux_rpm.sh b/build_scripts/build_linux_rpm.sh index 5a8d078928a6..24aa7e342cd9 100644 --- a/build_scripts/build_linux_rpm.sh +++ b/build_scripts/build_linux_rpm.sh @@ -35,7 +35,6 @@ rm -rf dist mkdir dist echo "Create executables with pyinstaller" -pip install pyinstaller==4.9 SPEC_FILE=$(python -c 'import chinilla; print(chinilla.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" LAST_EXIT_CODE=$? @@ -44,6 +43,31 @@ if [ "$LAST_EXIT_CODE" -ne 0 ]; then exit $LAST_EXIT_CODE fi +# Builds CLI only rpm +CLI_RPM_BASE="chinilla-blockchain-cli-$CHINILLA_INSTALLER_VERSION-1.$REDHAT_PLATFORM" +mkdir -p "dist/$CLI_RPM_BASE/opt/chinilla" +mkdir -p "dist/$CLI_RPM_BASE/usr/bin" +cp -r dist/daemon/* "dist/$CLI_RPM_BASE/opt/chinilla/" +ln -s ../../opt/chinilla/chinilla "dist/$CLI_RPM_BASE/usr/bin/chinilla" +# This is built into the base build image +# shellcheck disable=SC1091 +. /etc/profile.d/rvm.sh +rvm use ruby-3 +# /usr/lib64/libcrypt.so.1 is marked as a dependency specifically because newer versions of fedora bundle +# libcrypt.so.2 by default, and the libxcrypt-compat package needs to be installed for the other version +# Marking as a dependency allows yum/dnf to automatically install the libxcrypt-compat package as well +fpm -s dir -t rpm \ + -C "dist/$CLI_RPM_BASE" \ + -p "dist/$CLI_RPM_BASE.rpm" \ + --name chinilla-blockchain-cli \ + --license Apache-2.0 \ + --version "$CHINILLA_INSTALLER_VERSION" \ + --architecture "$REDHAT_PLATFORM" \ + --description "Chinilla is a modern cryptocurrency built from scratch, designed to be efficient, decentralized, and secure." \ + --depends /usr/lib64/libcrypt.so.1 \ + . +# CLI only rpm done + cp -r dist/daemon ../chinilla-blockchain-gui/packages/gui cd .. || exit cd chinilla-blockchain-gui || exit @@ -110,4 +134,7 @@ if [ "$REDHAT_PLATFORM" = "x86_64" ]; then fi fi +# Move the cli only rpm into final installers as well, so it gets uploaded as an artifact +mv "dist/$CLI_RPM_BASE.rpm" final_installer/ + ls final_installer/ diff --git a/build_scripts/build_macos.sh b/build_scripts/build_macos.sh index 9086faa042db..432f953a0a1e 100644 --- a/build_scripts/build_macos.sh +++ b/build_scripts/build_macos.sh @@ -22,7 +22,6 @@ sudo rm -rf dist mkdir dist echo "Create executables with pyinstaller" -pip install pyinstaller==4.9 SPEC_FILE=$(python -c 'import chinilla; print(chinilla.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" LAST_EXIT_CODE=$? diff --git a/build_scripts/build_macos_m1.sh b/build_scripts/build_macos_m1.sh index 032f4f3d6fd5..2baf6c33b59a 100644 --- a/build_scripts/build_macos_m1.sh +++ b/build_scripts/build_macos_m1.sh @@ -21,9 +21,6 @@ echo "Create dist/" sudo rm -rf dist mkdir dist -echo "Install pyinstaller and build bootloaders for M1" -pip install pyinstaller==4.9 - echo "Create executables with pyinstaller" SPEC_FILE=$(python -c 'import chinilla; print(chinilla.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" diff --git a/build_scripts/check_dependency_artifacts.py b/build_scripts/check_dependency_artifacts.py new file mode 100644 index 000000000000..07fe57b834bc --- /dev/null +++ b/build_scripts/check_dependency_artifacts.py @@ -0,0 +1,84 @@ +import os +import pathlib +import platform +import subprocess +import sys +import tempfile + +excepted_packages = { + "keyrings.cryptfile", # pure python + "dnslib", # pure python +} + + +def excepted(path: pathlib.Path) -> bool: + # TODO: This should be implemented with a real file name parser though i'm + # uncertain at the moment what package that would be. + + name, dash, rest = path.name.partition("-") + return name in excepted_packages + + +def main() -> int: + with tempfile.TemporaryDirectory() as directory_string: + print(f"Working in: {directory_string}") + print() + directory_path = pathlib.Path(directory_string) + + extras = ["upnp"] + package_path_string = os.fspath(pathlib.Path(__file__).parent.parent) + + if len(extras) > 0: + package_and_extras = f"{package_path_string}[{','.join(extras)}]" + else: + package_and_extras = package_path_string + + print("Downloading packages for Python version:") + lines = [ + *sys.version.splitlines(), + "", + f"machine: {platform.machine()}", + f"platform: {platform.platform()}", + ] + for line in lines: + print(f" {line}") + print(flush=True) + + subprocess.run( + [ + sys.executable, + "-m", + "pip", + "download", + "--dest", + os.fspath(directory_path), + "--extra-index", + "https://pypi.chia.net/simple/", + package_and_extras, + ], + check=True, + ) + + failed_artifacts = [] + + for artifact in directory_path.iterdir(): + if artifact.suffix == ".whl": + # everything being a wheel is the target + continue + + if excepted(artifact): + continue + + failed_artifacts.append(artifact) + + if len(failed_artifacts) > 0: + print("The following unacceptable artifacts were downloaded by pip:") + for artifact in failed_artifacts: + print(f" {artifact.name}") + + return 1 + + return 0 + + +sys.exit(main()) diff --git a/build_scripts/deb-options.json b/build_scripts/deb-options.json new file mode 100644 index 000000000000..102a2237bcb2 --- /dev/null +++ b/build_scripts/deb-options.json @@ -0,0 +1,9 @@ +{ + "dest": "final_installer/", + "bin": "chinilla-blockchain", + "name": "chinilla-blockchain", + "scripts": { + "postinst": "assets/deb/postinst.sh", + "prerm": "assets/deb/prerm.sh" + } +} diff --git a/build_scripts/installer-version.py b/build_scripts/installer-version.py index d9d71e02e7cf..12e13b0d934e 100644 --- a/build_scripts/installer-version.py +++ b/build_scripts/installer-version.py @@ -1,4 +1,3 @@ -import os import sys from setuptools_scm import get_version @@ -10,7 +9,6 @@ def main(): scm_full_version = get_version(root="..", relative_to=__file__) # scm_full_version = "1.0.5.dev22" - os.environ["SCM_VERSION"] = scm_full_version left_full_version = scm_full_version.split("+") version = left_full_version[0].split(".") diff --git a/chinilla-blockchain-gui b/chinilla-blockchain-gui index 7a85008e8db2..04400b1cb381 160000 --- a/chinilla-blockchain-gui +++ b/chinilla-blockchain-gui @@ -1 +1 @@ -Subproject commit 7a85008e8db2e1faf79bbabc5c2849eb3abdf05b +Subproject commit 04400b1cb381c1e928742a1305f9c6e1446f9293 diff --git a/chinilla/farmer/farmer.py b/chinilla/farmer/farmer.py index 64c141ae09f2..168cb11a22d2 100644 --- a/chinilla/farmer/farmer.py +++ b/chinilla/farmer/farmer.py @@ -256,11 +256,14 @@ def on_disconnect(self, connection: ws.WSChinillaConnection): self.state_changed("close_connection", {}) if connection.connection_type is NodeType.HARVESTER: del self.plot_sync_receivers[connection.peer_node_id] + self.state_changed("harvester_removed", {"node_id": connection.peer_node_id}) - async def plot_sync_callback(self, peer_id: bytes32, delta: Delta) -> None: - log.info(f"plot_sync_callback: peer_id {peer_id}, delta {delta}") - if not delta.empty(): - self.state_changed("new_plots", await self.get_harvesters()) + async def plot_sync_callback(self, peer_id: bytes32, delta: Optional[Delta]) -> None: + log.debug(f"plot_sync_callback: peer_id {peer_id}, delta {delta}") + receiver: Receiver = self.plot_sync_receivers[peer_id] + harvester_updated: bool = delta is not None and not delta.empty() + if receiver.initial_sync() or harvester_updated: + self.state_changed("harvester_update", receiver.to_dict(True)) async def _pool_get_pool_info(self, pool_config: PoolWalletConfig) -> Optional[Dict]: try: @@ -629,13 +632,13 @@ async def generate_login_link(self, launcher_id: bytes32) -> Optional[str]: return None - async def get_harvesters(self) -> Dict: + async def get_harvesters(self, counts_only: bool = False) -> Dict: harvesters: List = [] for connection in self.server.get_connections(NodeType.HARVESTER): self.log.debug(f"get_harvesters host: {connection.peer_host}, node_id: {connection.peer_node_id}") receiver = self.plot_sync_receivers.get(connection.peer_node_id) if receiver is not None: - harvesters.append(receiver.to_dict()) + harvesters.append(receiver.to_dict(counts_only)) else: self.log.debug( f"get_harvesters invalid peer: {connection.peer_host}, node_id: {connection.peer_node_id}" @@ -643,6 +646,12 @@ async def get_harvesters(self) -> Dict: return {"harvesters": harvesters} + def get_receiver(self, node_id: bytes32) -> Receiver: + receiver: Optional[Receiver] = self.plot_sync_receivers.get(node_id) + if receiver is None: + raise KeyError(f"Receiver missing for {node_id}") + return receiver + async def _periodically_update_pool_state_task(self): time_slept: uint64 = uint64(0) config_path: Path = config_path_for_filename(self._root_path, "config.yaml") diff --git a/chinilla/harvester/harvester.py b/chinilla/harvester/harvester.py index 0e64195620f4..2d7713c1b1fe 100644 --- a/chinilla/harvester/harvester.py +++ b/chinilla/harvester/harvester.py @@ -118,13 +118,12 @@ def get_plots(self) -> Tuple[List[Dict], List[str], List[str]]: { "filename": str(path), "size": prover.get_size(), - "plot-seed": prover.get_id(), # Deprecated "plot_id": prover.get_id(), "pool_public_key": plot_info.pool_public_key, "pool_contract_puzzle_hash": plot_info.pool_contract_puzzle_hash, "plot_public_key": plot_info.plot_public_key, "file_size": plot_info.file_size, - "time_modified": plot_info.time_modified, + "time_modified": int(plot_info.time_modified), } ) self.log.debug( diff --git a/chinilla/plot_sync/receiver.py b/chinilla/plot_sync/receiver.py index 458d13cf006e..7521429ddedc 100644 --- a/chinilla/plot_sync/receiver.py +++ b/chinilla/plot_sync/receiver.py @@ -1,5 +1,6 @@ import logging import time +from dataclasses import dataclass, field from typing import Any, Callable, Collection, Coroutine, Dict, List, Optional from chinilla.plot_sync.delta import Delta, PathListDelta, PlotListDelta @@ -24,74 +25,85 @@ ) from chinilla.server.ws_connection import ProtocolMessageTypes, WSChinillaConnection, make_msg from chinilla.types.blockchain_format.sized_bytes import bytes32 -from chinilla.util.ints import int16, uint64 +from chinilla.util.ints import int16, uint32, uint64 +from chinilla.util.misc import get_list_or_len from chinilla.util.streamable import _T_Streamable log = logging.getLogger(__name__) +@dataclass +class Sync: + state: State = State.idle + sync_id: uint64 = uint64(0) + next_message_id: uint64 = uint64(0) + plots_processed: uint32 = uint32(0) + plots_total: uint32 = uint32(0) + delta: Delta = field(default_factory=Delta) + time_done: Optional[float] = None + + def in_progress(self) -> bool: + return self.sync_id != 0 + + def bump_next_message_id(self) -> None: + self.next_message_id = uint64(self.next_message_id + 1) + + def bump_plots_processed(self) -> None: + self.plots_processed = uint32(self.plots_processed + 1) + + class Receiver: _connection: WSChinillaConnection - _sync_state: State - _delta: Delta - _expected_sync_id: uint64 - _expected_message_id: uint64 - _last_sync_id: uint64 - _last_sync_time: float + _current_sync: Sync + _last_sync: Sync _plots: Dict[str, Plot] _invalid: List[str] _keys_missing: List[str] _duplicates: List[str] - _update_callback: Callable[[bytes32, Delta], Coroutine[Any, Any, None]] + _total_plot_size: int + _update_callback: Callable[[bytes32, Optional[Delta]], Coroutine[Any, Any, None]] def __init__( - self, connection: WSChinillaConnection, update_callback: Callable[[bytes32, Delta], Coroutine[Any, Any, None]] + self, + connection: WSChinillaConnection, + update_callback: Callable[[bytes32, Optional[Delta]], Coroutine[Any, Any, None]], ) -> None: self._connection = connection - self._sync_state = State.idle - self._delta = Delta() - self._expected_sync_id = uint64(0) - self._expected_message_id = uint64(0) - self._last_sync_id = uint64(0) - self._last_sync_time = 0 + self._current_sync = Sync() + self._last_sync = Sync() self._plots = {} self._invalid = [] self._keys_missing = [] self._duplicates = [] + self._total_plot_size = 0 self._update_callback = update_callback # type: ignore[assignment, misc] + async def trigger_callback(self, update: Optional[Delta] = None) -> None: + try: + await self._update_callback(self._connection.peer_node_id, update) # type: ignore[misc,call-arg] + except Exception as e: + log.error(f"_update_callback raised: {e}") + def reset(self) -> None: - self._sync_state = State.idle - self._expected_sync_id = uint64(0) - self._expected_message_id = uint64(0) - self._last_sync_id = uint64(0) - self._last_sync_time = 0 + self._current_sync = Sync() + self._last_sync = Sync() self._plots.clear() self._invalid.clear() self._keys_missing.clear() self._duplicates.clear() - self._delta.clear() - - def bump_expected_message_id(self) -> None: - self._expected_message_id = uint64(self._expected_message_id + 1) + self._total_plot_size = 0 def connection(self) -> WSChinillaConnection: return self._connection - def state(self) -> State: - return self._sync_state + def current_sync(self) -> Sync: + return self._current_sync - def expected_sync_id(self) -> uint64: - return self._expected_sync_id + def last_sync(self) -> Sync: + return self._last_sync - def expected_message_id(self) -> uint64: - return self._expected_message_id - - def last_sync_id(self) -> uint64: - return self._last_sync_id - - def last_sync_time(self) -> float: - return self._last_sync_time + def initial_sync(self) -> bool: + return self._last_sync.sync_id == 0 def plots(self) -> Dict[str, Plot]: return self._plots @@ -105,6 +117,9 @@ def keys_missing(self) -> List[str]: def duplicates(self) -> List[str]: return self._duplicates + def total_plot_size(self) -> int: + return self._total_plot_size + async def _process( self, method: Callable[[_T_Streamable], Any], message_type: ProtocolMessageTypes, message: Any ) -> None: @@ -131,12 +146,12 @@ async def send_response(plot_sync_error: Optional[PlotSyncError] = None) -> None await send_response(PlotSyncError(int16(ErrorCodes.unknown), f"{e}", None)) def _validate_identifier(self, identifier: PlotSyncIdentifier, start: bool = False) -> None: - sync_id_match = identifier.sync_id == self._expected_sync_id - message_id_match = identifier.message_id == self._expected_message_id + sync_id_match = identifier.sync_id == self._current_sync.sync_id + message_id_match = identifier.message_id == self._current_sync.next_message_id identifier_match = sync_id_match and message_id_match if (start and not message_id_match) or (not start and not identifier_match): expected: PlotSyncIdentifier = PlotSyncIdentifier( - identifier.timestamp, self._expected_sync_id, self._expected_message_id + identifier.timestamp, self._current_sync.sync_id, self._current_sync.next_message_id ) raise InvalidIdentifierError( identifier, @@ -147,14 +162,15 @@ async def _sync_started(self, data: PlotSyncStart) -> None: if data.initial: self.reset() self._validate_identifier(data.identifier, True) - if data.last_sync_id != self.last_sync_id(): - raise InvalidLastSyncIdError(data.last_sync_id, self.last_sync_id()) + if data.last_sync_id != self._last_sync.sync_id: + raise InvalidLastSyncIdError(data.last_sync_id, self._last_sync.sync_id) if data.last_sync_id == data.identifier.sync_id: raise SyncIdsMatchError(State.idle, data.last_sync_id) - self._expected_sync_id = data.identifier.sync_id - self._delta.clear() - self._sync_state = State.loaded - self.bump_expected_message_id() + self._current_sync.sync_id = data.identifier.sync_id + self._current_sync.delta.clear() + self._current_sync.state = State.loaded + self._current_sync.plots_total = data.plot_file_count + self._current_sync.bump_next_message_id() async def sync_started(self, data: PlotSyncStart) -> None: await self._process(self._sync_started, ProtocolMessageTypes.plot_sync_start, data) @@ -163,14 +179,18 @@ async def _process_loaded(self, plot_infos: PlotSyncPlotList) -> None: self._validate_identifier(plot_infos.identifier) for plot_info in plot_infos.data: - if plot_info.filename in self._plots or plot_info.filename in self._delta.valid.additions: + if plot_info.filename in self._plots or plot_info.filename in self._current_sync.delta.valid.additions: raise PlotAlreadyAvailableError(State.loaded, plot_info.filename) - self._delta.valid.additions[plot_info.filename] = plot_info + self._current_sync.delta.valid.additions[plot_info.filename] = plot_info + self._current_sync.bump_plots_processed() + + # Let the callback receiver know about the sync progress updates + await self.trigger_callback() if plot_infos.final: - self._sync_state = State.removed + self._current_sync.state = State.removed - self.bump_expected_message_id() + self._current_sync.bump_next_message_id() async def process_loaded(self, plot_infos: PlotSyncPlotList) -> None: await self._process(self._process_loaded, ProtocolMessageTypes.plot_sync_loaded, plot_infos) @@ -193,18 +213,23 @@ async def process_path_list( if not is_removal and path in delta: raise PlotAlreadyAvailableError(state, path) delta.append(path) + if not is_removal: + self._current_sync.bump_plots_processed() + + # Let the callback receiver know about the sync progress updates + await self.trigger_callback() if paths.final: - self._sync_state = next_state + self._current_sync.state = next_state - self.bump_expected_message_id() + self._current_sync.bump_next_message_id() async def _process_removed(self, paths: PlotSyncPathList) -> None: await self.process_path_list( state=State.removed, next_state=State.invalid, target=self._plots, - delta=self._delta.valid.removals, + delta=self._current_sync.delta.valid.removals, paths=paths, is_removal=True, ) @@ -217,7 +242,7 @@ async def _process_invalid(self, paths: PlotSyncPathList) -> None: state=State.invalid, next_state=State.keys_missing, target=self._invalid, - delta=self._delta.invalid.additions, + delta=self._current_sync.delta.invalid.additions, paths=paths, ) @@ -229,7 +254,7 @@ async def _process_keys_missing(self, paths: PlotSyncPathList) -> None: state=State.keys_missing, next_state=State.duplicates, target=self._keys_missing, - delta=self._delta.keys_missing.additions, + delta=self._current_sync.delta.keys_missing.additions, paths=paths, ) @@ -241,7 +266,7 @@ async def _process_duplicates(self, paths: PlotSyncPathList) -> None: state=State.duplicates, next_state=State.done, target=self._duplicates, - delta=self._delta.duplicates.additions, + delta=self._current_sync.delta.duplicates.additions, paths=paths, ) @@ -250,55 +275,62 @@ async def process_duplicates(self, paths: PlotSyncPathList) -> None: async def _sync_done(self, data: PlotSyncDone) -> None: self._validate_identifier(data.identifier) - # Update ids - self._last_sync_id = self._expected_sync_id - self._expected_sync_id = uint64(0) - self._expected_message_id = uint64(0) + self._current_sync.time_done = time.time() # First create the update delta (i.e. transform invalid/keys_missing into additions/removals) which we will # send to the callback receiver below - delta_invalid: PathListDelta = PathListDelta.from_lists(self._invalid, self._delta.invalid.additions) + delta_invalid: PathListDelta = PathListDelta.from_lists( + self._invalid, self._current_sync.delta.invalid.additions + ) delta_keys_missing: PathListDelta = PathListDelta.from_lists( - self._keys_missing, self._delta.keys_missing.additions + self._keys_missing, self._current_sync.delta.keys_missing.additions + ) + delta_duplicates: PathListDelta = PathListDelta.from_lists( + self._duplicates, self._current_sync.delta.duplicates.additions ) - delta_duplicates: PathListDelta = PathListDelta.from_lists(self._duplicates, self._delta.duplicates.additions) update = Delta( - PlotListDelta(self._delta.valid.additions.copy(), self._delta.valid.removals.copy()), + PlotListDelta( + self._current_sync.delta.valid.additions.copy(), self._current_sync.delta.valid.removals.copy() + ), delta_invalid, delta_keys_missing, delta_duplicates, ) # Apply delta - self._plots.update(self._delta.valid.additions) - for removal in self._delta.valid.removals: + self._plots.update(self._current_sync.delta.valid.additions) + for removal in self._current_sync.delta.valid.removals: del self._plots[removal] - self._invalid = self._delta.invalid.additions.copy() - self._keys_missing = self._delta.keys_missing.additions.copy() - self._duplicates = self._delta.duplicates.additions.copy() - # Update state and bump last sync time - self._sync_state = State.idle - self._last_sync_time = time.time() + self._invalid = self._current_sync.delta.invalid.additions.copy() + self._keys_missing = self._current_sync.delta.keys_missing.additions.copy() + self._duplicates = self._current_sync.delta.duplicates.additions.copy() + self._total_plot_size = sum(plot.file_size for plot in self._plots.values()) + # Save current sync as last sync and create a new current sync + self._last_sync = self._current_sync + self._current_sync = Sync() # Let the callback receiver know if this sync cycle caused any update - try: - await self._update_callback(self._connection.peer_node_id, update) # type: ignore[misc,call-arg] - except Exception as e: - log.error(f"_update_callback raised: {e}") - self._delta.clear() + await self.trigger_callback(update) async def sync_done(self, data: PlotSyncDone) -> None: await self._process(self._sync_done, ProtocolMessageTypes.plot_sync_done, data) - def to_dict(self) -> Dict[str, Any]: - result: Dict[str, Any] = { + def to_dict(self, counts_only: bool = False) -> Dict[str, Any]: + syncing = None + if self._current_sync.in_progress(): + syncing = { + "initial": self.initial_sync(), + "plot_files_processed": self._current_sync.plots_processed, + "plot_files_total": self._current_sync.plots_total, + } + return { "connection": { "node_id": self._connection.peer_node_id, "host": self._connection.peer_host, "port": self._connection.peer_port, }, - "plots": list(self._plots.values()), - "failed_to_open_filenames": self._invalid, - "no_key_filenames": self._keys_missing, - "duplicates": self._duplicates, + "plots": get_list_or_len(list(self._plots.values()), counts_only), + "failed_to_open_filenames": get_list_or_len(self._invalid, counts_only), + "no_key_filenames": get_list_or_len(self._keys_missing, counts_only), + "duplicates": get_list_or_len(self._duplicates, counts_only), + "total_plot_size": self._total_plot_size, + "syncing": syncing, + "last_sync_time": self._last_sync.time_done, } - if self._last_sync_time != 0: - result["last_sync_time"] = self._last_sync_time - return result diff --git a/chinilla/plotting/manager.py b/chinilla/plotting/manager.py index 3b7b86986c95..7aa9a8e89398 100644 --- a/chinilla/plotting/manager.py +++ b/chinilla/plotting/manager.py @@ -232,9 +232,9 @@ def _refresh_task(self, sleep_interval_ms: int): plot_filenames: Dict[Path, List[Path]] = get_plot_filenames(self.root_path) plot_directories: Set[Path] = set(plot_filenames.keys()) - plot_paths: List[Path] = [] + plot_paths: Set[Path] = set() for paths in plot_filenames.values(): - plot_paths += paths + plot_paths.update(paths) total_result: PlotRefreshResult = PlotRefreshResult() total_size = len(plot_paths) @@ -274,7 +274,7 @@ def _refresh_task(self, sleep_interval_ms: int): for filename in filenames_to_remove: del self.plot_filename_paths[filename] - for remaining, batch in list_to_batches(plot_paths, self.refresh_parameter.batch_size): + for remaining, batch in list_to_batches(list(plot_paths), self.refresh_parameter.batch_size): batch_result: PlotRefreshResult = self.refresh_batch(batch, plot_directories) if not self._refreshing_enabled: self.log.debug("refresh_plots: Aborted") diff --git a/chinilla/pyinstaller.spec b/chinilla/pyinstaller.spec index 6648d8ed3257..bf45aa1b2a8d 100644 --- a/chinilla/pyinstaller.spec +++ b/chinilla/pyinstaller.spec @@ -2,6 +2,7 @@ import importlib import pathlib import platform +import sysconfig from pkg_resources import get_distribution @@ -98,7 +99,7 @@ if THIS_IS_WINDOWS: if THIS_IS_WINDOWS: chinilla_mod = importlib.import_module("chinilla") - dll_paths = ROOT / "*.dll" + dll_paths = pathlib.Path(sysconfig.get_path("platlib")) / "*.dll" binaries = [ ( diff --git a/chinilla/rpc/farmer_rpc_api.py b/chinilla/rpc/farmer_rpc_api.py index cbdda8b8c2bc..d824f6912ebe 100644 --- a/chinilla/rpc/farmer_rpc_api.py +++ b/chinilla/rpc/farmer_rpc_api.py @@ -1,11 +1,69 @@ +import dataclasses +import operator from typing import Any, Callable, Dict, List, Optional +from typing_extensions import Protocol + from chinilla.farmer.farmer import Farmer +from chinilla.plot_sync.receiver import Receiver +from chinilla.protocols.harvester_protocol import Plot from chinilla.types.blockchain_format.sized_bytes import bytes32 from chinilla.util.byte_types import hexstr_to_bytes +from chinilla.util.paginator import Paginator +from chinilla.util.streamable import dataclass_from_dict from chinilla.util.ws_message import WsRpcMessage, create_payload_dict +class PaginatedRequestData(Protocol): + node_id: bytes32 + page: int + page_size: int + + +@dataclasses.dataclass +class FilterItem: + key: str + value: Optional[str] + + +@dataclasses.dataclass +class PlotInfoRequestData: + node_id: bytes32 + page: int + page_size: int + filter: List[FilterItem] = dataclasses.field(default_factory=list) + sort_key: str = "filename" + reverse: bool = False + + +@dataclasses.dataclass +class PlotPathRequestData: + node_id: bytes32 + page: int + page_size: int + filter: List[str] = dataclasses.field(default_factory=list) + reverse: bool = False + + +def paginated_plot_request(source: List[Any], request: PaginatedRequestData) -> Dict[str, object]: + paginator: Paginator = Paginator(source, request.page_size) + return { + "node_id": request.node_id.hex(), + "page": request.page, + "page_count": paginator.page_count(), + "total_count": len(source), + "plots": paginator.get_page(request.page), + } + + +def plot_matches_filter(plot: Plot, filter_item: FilterItem) -> bool: + plot_attribute = getattr(plot, filter_item.key) + if filter_item.value is None: + return plot_attribute is None + else: + return filter_item.value in str(plot_attribute) + + class FarmerRpcApi: def __init__(self, farmer: Farmer): self.service = farmer @@ -20,6 +78,11 @@ def get_routes(self) -> Dict[str, Callable]: "/get_pool_state": self.get_pool_state, "/set_payout_instructions": self.set_payout_instructions, "/get_harvesters": self.get_harvesters, + "/get_harvesters_summary": self.get_harvesters_summary, + "/get_harvester_plots_valid": self.get_harvester_plots_valid, + "/get_harvester_plots_invalid": self.get_harvester_plots_invalid, + "/get_harvester_plots_keys_missing": self.get_harvester_plots_keys_missing, + "/get_harvester_plots_duplicates": self.get_harvester_plots_duplicates, "/get_pool_login_link": self.get_pool_login_link, } @@ -44,10 +107,19 @@ async def _state_changed(self, change: str, change_data: Dict) -> List[WsRpcMess "wallet_ui", ) ] - elif change == "new_plots": + elif change == "harvester_update": return [ create_payload_dict( - "get_harvesters", + "harvester_update", + change_data, + self.service_name, + "wallet_ui", + ) + ] + elif change == "harvester_removed": + return [ + create_payload_dict( + "harvester_removed", change_data, self.service_name, "wallet_ui", @@ -109,11 +181,20 @@ async def set_reward_targets(self, request: Dict) -> Dict: self.service.set_reward_targets(farmer_target, pool_target) return {} + def get_pool_contract_puzzle_hash_plot_count(self, pool_contract_puzzle_hash: bytes32) -> int: + plot_count: int = 0 + for receiver in self.service.plot_sync_receivers.values(): + plot_count += sum( + plot.pool_contract_puzzle_hash == pool_contract_puzzle_hash for plot in receiver.plots().values() + ) + return plot_count + async def get_pool_state(self, _: Dict) -> Dict: pools_list = [] for p2_singleton_puzzle_hash, pool_dict in self.service.pool_state.items(): pool_state = pool_dict.copy() pool_state["p2_singleton_puzzle_hash"] = p2_singleton_puzzle_hash.hex() + pool_state["plot_count"] = self.get_pool_contract_puzzle_hash_plot_count(p2_singleton_puzzle_hash) pools_list.append(pool_state) return {"pool_state": pools_list} @@ -123,7 +204,48 @@ async def set_payout_instructions(self, request: Dict) -> Dict: return {} async def get_harvesters(self, _: Dict): - return await self.service.get_harvesters() + return await self.service.get_harvesters(False) + + async def get_harvesters_summary(self, _: Dict[str, object]) -> Dict[str, object]: + return await self.service.get_harvesters(True) + + async def get_harvester_plots_valid(self, request_dict: Dict[str, object]) -> Dict[str, object]: + # TODO: Consider having a extra List[PlotInfo] in Receiver to avoid rebuilding the list for each call + request = dataclass_from_dict(PlotInfoRequestData, request_dict) + plot_list = list(self.service.get_receiver(request.node_id).plots().values()) + # Apply filter + plot_list = [ + plot for plot in plot_list if all(plot_matches_filter(plot, filter_item) for filter_item in request.filter) + ] + restricted_sort_keys: List[str] = ["pool_contract_puzzle_hash", "pool_public_key", "plot_public_key"] + # Apply sort_key and reverse if sort_key is not restricted + if request.sort_key in restricted_sort_keys: + raise KeyError(f"Can't sort by optional attributes: {restricted_sort_keys}") + # Sort by plot_id also by default since its unique + plot_list = sorted(plot_list, key=operator.attrgetter(request.sort_key, "plot_id"), reverse=request.reverse) + return paginated_plot_request(plot_list, request) + + def paginated_plot_path_request( + self, source_func: Callable[[Receiver], List[str]], request_dict: Dict[str, object] + ) -> Dict[str, object]: + request: PlotPathRequestData = dataclass_from_dict(PlotPathRequestData, request_dict) + receiver = self.service.get_receiver(request.node_id) + source = source_func(receiver) + request = dataclass_from_dict(PlotPathRequestData, request_dict) + # Apply filter + source = [plot for plot in source if all(filter_item in plot for filter_item in request.filter)] + # Apply reverse + source = sorted(source, reverse=request.reverse) + return paginated_plot_request(source, request) + + async def get_harvester_plots_invalid(self, request_dict: Dict[str, object]) -> Dict[str, object]: + return self.paginated_plot_path_request(Receiver.invalid, request_dict) + + async def get_harvester_plots_keys_missing(self, request_dict: Dict[str, object]) -> Dict[str, object]: + return self.paginated_plot_path_request(Receiver.keys_missing, request_dict) + + async def get_harvester_plots_duplicates(self, request_dict: Dict[str, object]) -> Dict[str, object]: + return self.paginated_plot_path_request(Receiver.duplicates, request_dict) async def get_pool_login_link(self, request: Dict) -> Dict: launcher_id: bytes32 = bytes32(hexstr_to_bytes(request["launcher_id"])) diff --git a/chinilla/rpc/farmer_rpc_client.py b/chinilla/rpc/farmer_rpc_client.py index 8a40e76ef84d..d0e556575f90 100644 --- a/chinilla/rpc/farmer_rpc_client.py +++ b/chinilla/rpc/farmer_rpc_client.py @@ -1,7 +1,9 @@ from typing import Dict, List, Optional, Any +from chinilla.rpc.farmer_rpc_api import PlotInfoRequestData, PlotPathRequestData from chinilla.rpc.rpc_client import RpcClient from chinilla.types.blockchain_format.sized_bytes import bytes32 +from chinilla.util.misc import dataclass_to_json_dict class FarmerRpcClient(RpcClient): @@ -52,6 +54,21 @@ async def set_payout_instructions(self, launcher_id: bytes32, payout_instruction async def get_harvesters(self) -> Dict[str, Any]: return await self.fetch("get_harvesters", {}) + async def get_harvesters_summary(self) -> Dict[str, object]: + return await self.fetch("get_harvesters_summary", {}) + + async def get_harvester_plots_valid(self, request: PlotInfoRequestData) -> Dict[str, Any]: + return await self.fetch("get_harvester_plots_valid", dataclass_to_json_dict(request)) + + async def get_harvester_plots_invalid(self, request: PlotPathRequestData) -> Dict[str, Any]: + return await self.fetch("get_harvester_plots_invalid", dataclass_to_json_dict(request)) + + async def get_harvester_plots_keys_missing(self, request: PlotPathRequestData) -> Dict[str, Any]: + return await self.fetch("get_harvester_plots_keys_missing", dataclass_to_json_dict(request)) + + async def get_harvester_plots_duplicates(self, request: PlotPathRequestData) -> Dict[str, Any]: + return await self.fetch("get_harvester_plots_duplicates", dataclass_to_json_dict(request)) + async def get_pool_login_link(self, launcher_id: bytes32) -> Optional[str]: try: return (await self.fetch("get_pool_login_link", {"launcher_id": launcher_id.hex()}))["login_link"] diff --git a/chinilla/rpc/rpc_server.py b/chinilla/rpc/rpc_server.py index b027721b724c..bd3aa231a545 100644 --- a/chinilla/rpc/rpc_server.py +++ b/chinilla/rpc/rpc_server.py @@ -9,7 +9,7 @@ from chinilla.rpc.util import wrap_http_handler from chinilla.server.outbound_message import NodeType -from chinilla.server.server import ssl_context_for_server +from chinilla.server.server import ssl_context_for_client, ssl_context_for_server from chinilla.types.peer_info import PeerInfo from chinilla.util.byte_types import hexstr_to_bytes from chinilla.util.ints import uint16 @@ -42,6 +42,9 @@ def __init__(self, rpc_api: Any, service_name: str, stop_cb: Callable, root_path self.ssl_context = ssl_context_for_server( self.ca_cert_path, self.ca_key_path, self.crt_path, self.key_path, log=self.log ) + self.ssl_client_context = ssl_context_for_client( + self.ca_cert_path, self.ca_key_path, self.crt_path, self.key_path, log=self.log + ) async def stop(self): self.shut_down = True @@ -278,7 +281,7 @@ async def connect_to_daemon(self, self_hostname: str, daemon_port: uint16): autoclose=True, autoping=True, heartbeat=60, - ssl_context=self.ssl_context, + ssl_context=self.ssl_client_context, max_msg_size=max_message_size, ) await self.connection(self.websocket) diff --git a/chinilla/server/server.py b/chinilla/server/server.py index 3ad5c3b94720..8e2ea8ec4478 100644 --- a/chinilla/server/server.py +++ b/chinilla/server/server.py @@ -48,7 +48,7 @@ def ssl_context_for_server( if check_permissions: verify_ssl_certs_and_keys([ca_cert, private_cert_path], [ca_key, private_key_path], log) - ssl_context = ssl._create_unverified_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=str(ca_cert)) + ssl_context = ssl._create_unverified_context(purpose=ssl.Purpose.CLIENT_AUTH, cafile=str(ca_cert)) ssl_context.check_hostname = False ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2 ssl_context.set_ciphers( diff --git a/chinilla/timelord/timelord.py b/chinilla/timelord/timelord.py index 26500e2d2094..d137f2cf8bf0 100644 --- a/chinilla/timelord/timelord.py +++ b/chinilla/timelord/timelord.py @@ -183,25 +183,22 @@ async def _handle_client(self, reader: asyncio.StreamReader, writer: asyncio.Str async def _stop_chain(self, chain: Chain): try: - while chain not in self.allows_iters: - self.lock.release() - await asyncio.sleep(0.05) - log.error(f"Trying to stop {chain} before its initialization.") - await self.lock.acquire() - if chain not in self.chain_type_to_stream: - log.warning(f"Trying to stop a crashed chain: {chain}.") - return None - stop_ip, _, stop_writer = self.chain_type_to_stream[chain] - stop_writer.write(b"010") - await stop_writer.drain() + _, _, stop_writer = self.chain_type_to_stream[chain] if chain in self.allows_iters: + stop_writer.write(b"010") + await stop_writer.drain() self.allows_iters.remove(chain) + else: + log.error(f"Trying to stop {chain} before its initialization.") + stop_writer.close() + await stop_writer.wait_closed() if chain not in self.unspawned_chains: self.unspawned_chains.append(chain) - if chain in self.chain_type_to_stream: - del self.chain_type_to_stream[chain] + del self.chain_type_to_stream[chain] except ConnectionResetError as e: log.error(f"{e}") + except Exception as e: + log.error(f"Exception in stop chain: {type(e)} {e}") def _can_infuse_unfinished_block(self, block: timelord_protocol.NewUnfinishedBlockTimelord) -> Optional[uint64]: assert self.last_state is not None diff --git a/chinilla/util/misc.py b/chinilla/util/misc.py index 3607017c802a..5760d3be061f 100644 --- a/chinilla/util/misc.py +++ b/chinilla/util/misc.py @@ -1,3 +1,9 @@ +import dataclasses +from typing import Any, Dict, Sequence, Union + +from chinilla.util.streamable import recurse_jsonify + + def format_bytes(bytes: int) -> str: if not isinstance(bytes, int) or bytes < 0: @@ -68,3 +74,11 @@ def prompt_yes_no(prompt: str = "(y/n) ") -> bool: return True elif ch == "n": return False + + +def get_list_or_len(list_in: Sequence[object], length: bool) -> Union[int, Sequence[object]]: + return len(list_in) if length else list_in + + +def dataclass_to_json_dict(instance: Any) -> Dict[str, Any]: + return recurse_jsonify(dataclasses.asdict(instance)) diff --git a/chinilla/util/paginator.py b/chinilla/util/paginator.py new file mode 100644 index 000000000000..69bcce5377f4 --- /dev/null +++ b/chinilla/util/paginator.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +import dataclasses +from math import ceil +from typing import Sequence + + +class InvalidPageSizeLimit(Exception): + def __init__(self, page_size_limit: int) -> None: + super().__init__(f"Page size limit must be one or more, not: {page_size_limit}") + + +class InvalidPageSizeError(Exception): + def __init__(self, page_size: int, page_size_limit: int) -> None: + super().__init__(f"Invalid page size {page_size}. Must be between: 1 and {page_size_limit}") + + +class PageOutOfBoundsError(Exception): + def __init__(self, page_size: int, max_page_size: int) -> None: + super().__init__(f"Page {page_size} out of bounds. Available pages: 0-{max_page_size}") + + +@dataclasses.dataclass +class Paginator: + _source: Sequence[object] + _page_size: int + + @classmethod + def create(cls, source: Sequence[object], page_size: int, page_size_limit: int = 100) -> Paginator: + if page_size_limit < 1: + raise InvalidPageSizeLimit(page_size_limit) + if page_size > page_size_limit: + raise InvalidPageSizeError(page_size, page_size_limit) + return cls(source, page_size) + + def page_size(self) -> int: + return self._page_size + + def page_count(self) -> int: + return max(1, ceil(len(self._source) / self._page_size)) + + def get_page(self, page: int) -> Sequence[object]: + if page < 0 or page >= self.page_count(): + raise PageOutOfBoundsError(page, self.page_count() - 1) + offset = page * self._page_size + return self._source[offset : offset + self._page_size] diff --git a/chinilla/util/streamable.py b/chinilla/util/streamable.py index 66bb2fec44ab..ae2117a0fb46 100644 --- a/chinilla/util/streamable.py +++ b/chinilla/util/streamable.py @@ -5,7 +5,21 @@ import pprint import sys from enum import Enum -from typing import Any, BinaryIO, Dict, get_type_hints, List, Tuple, Type, TypeVar, Union, Callable, Optional, Iterator +from typing import ( + Any, + BinaryIO, + Callable, + Dict, + Iterator, + List, + Optional, + Tuple, + Type, + TypeVar, + Union, + get_type_hints, + overload, +) from blspy import G1Element, G2Element, PrivateKey from typing_extensions import Literal @@ -58,29 +72,32 @@ class DefinitionError(StreamableError): _T_Streamable = TypeVar("_T_Streamable", bound="Streamable") +ParseFunctionType = Callable[[BinaryIO], object] +StreamFunctionType = Callable[[object, BinaryIO], None] + # Caches to store the fields and (de)serialization methods for all available streamable classes. -FIELDS_FOR_STREAMABLE_CLASS = {} -STREAM_FUNCTIONS_FOR_STREAMABLE_CLASS = {} -PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS = {} +FIELDS_FOR_STREAMABLE_CLASS: Dict[Type[object], Dict[str, Type[object]]] = {} +STREAM_FUNCTIONS_FOR_STREAMABLE_CLASS: Dict[Type[object], List[StreamFunctionType]] = {} +PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS: Dict[Type[object], List[ParseFunctionType]] = {} -def is_type_List(f_type: Type) -> bool: +def is_type_List(f_type: object) -> bool: return get_origin(f_type) == list or f_type == list -def is_type_SpecificOptional(f_type) -> bool: +def is_type_SpecificOptional(f_type: object) -> bool: """ Returns true for types such as Optional[T], but not Optional, or T. """ return get_origin(f_type) == Union and get_args(f_type)[1]() is None -def is_type_Tuple(f_type: Type) -> bool: +def is_type_Tuple(f_type: object) -> bool: return get_origin(f_type) == tuple or f_type == tuple -def dataclass_from_dict(klass, d): +def dataclass_from_dict(klass: Type[Any], d: Any) -> Any: """ Converts a dictionary based on a dataclass, into an instance of that dataclass. Recursively goes through lists, optionals, and dictionaries. @@ -100,7 +117,8 @@ def dataclass_from_dict(klass, d): return tuple(klass_properties) elif dataclasses.is_dataclass(klass): # Type is a dataclass, data is a dictionary - fieldtypes = {f.name: f.type for f in dataclasses.fields(klass)} + hints = get_type_hints(klass) + fieldtypes = {f.name: hints.get(f.name, f.type) for f in dataclasses.fields(klass)} return klass(**{f: dataclass_from_dict(fieldtypes[f], d[f]) for f in d}) elif is_type_List(klass): # Type is a list, data is a list @@ -116,7 +134,17 @@ def dataclass_from_dict(klass, d): return klass(d) -def recurse_jsonify(d): +@overload +def recurse_jsonify(d: Union[List[Any], Tuple[Any, ...]]) -> List[Any]: + ... + + +@overload +def recurse_jsonify(d: Dict[str, Any]) -> Dict[str, Any]: + ... + + +def recurse_jsonify(d: Union[List[Any], Tuple[Any, ...], Dict[str, Any]]) -> Union[List[Any], Dict[str, Any]]: """ Makes bytes objects and unhashable types into strings with 0x, and makes large ints into strings. @@ -173,11 +201,11 @@ def parse_uint32(f: BinaryIO, byteorder: Literal["little", "big"] = "big") -> ui return uint32(int.from_bytes(size_bytes, byteorder)) -def write_uint32(f: BinaryIO, value: uint32, byteorder: Literal["little", "big"] = "big"): +def write_uint32(f: BinaryIO, value: uint32, byteorder: Literal["little", "big"] = "big") -> None: f.write(value.to_bytes(4, byteorder)) -def parse_optional(f: BinaryIO, parse_inner_type_f: Callable[[BinaryIO], Any]) -> Optional[Any]: +def parse_optional(f: BinaryIO, parse_inner_type_f: ParseFunctionType) -> Optional[object]: is_present_bytes = f.read(1) assert is_present_bytes is not None and len(is_present_bytes) == 1 # Checks for EOF if is_present_bytes == bytes([0]): @@ -195,8 +223,8 @@ def parse_bytes(f: BinaryIO) -> bytes: return bytes_read -def parse_list(f: BinaryIO, parse_inner_type_f: Callable[[BinaryIO], Any]) -> List[Any]: - full_list: List = [] +def parse_list(f: BinaryIO, parse_inner_type_f: ParseFunctionType) -> List[object]: + full_list: List[object] = [] # wjb assert inner_type != get_args(List)[0] list_size = parse_uint32(f) for list_index in range(list_size): @@ -204,14 +232,14 @@ def parse_list(f: BinaryIO, parse_inner_type_f: Callable[[BinaryIO], Any]) -> Li return full_list -def parse_tuple(f: BinaryIO, list_parse_inner_type_f: List[Callable[[BinaryIO], Any]]) -> Tuple[Any, ...]: - full_list = [] +def parse_tuple(f: BinaryIO, list_parse_inner_type_f: List[ParseFunctionType]) -> Tuple[object, ...]: + full_list: List[object] = [] for parse_f in list_parse_inner_type_f: full_list.append(parse_f(f)) return tuple(full_list) -def parse_size_hints(f: BinaryIO, f_type: Type, bytes_to_read: int) -> Any: +def parse_size_hints(f: BinaryIO, f_type: Type[Any], bytes_to_read: int) -> Any: bytes_read = f.read(bytes_to_read) assert bytes_read is not None and len(bytes_read) == bytes_to_read return f_type.from_bytes(bytes_read) @@ -224,7 +252,7 @@ def parse_str(f: BinaryIO) -> str: return bytes.decode(str_read_bytes, "utf-8") -def stream_optional(stream_inner_type_func: Callable[[Any, BinaryIO], None], item: Any, f: BinaryIO) -> None: +def stream_optional(stream_inner_type_func: StreamFunctionType, item: Any, f: BinaryIO) -> None: if item is None: f.write(bytes([0])) else: @@ -237,13 +265,13 @@ def stream_bytes(item: Any, f: BinaryIO) -> None: f.write(item) -def stream_list(stream_inner_type_func: Callable[[Any, BinaryIO], None], item: Any, f: BinaryIO) -> None: +def stream_list(stream_inner_type_func: StreamFunctionType, item: Any, f: BinaryIO) -> None: write_uint32(f, uint32(len(item))) for element in item: stream_inner_type_func(element, f) -def stream_tuple(stream_inner_type_funcs: List[Callable[[Any, BinaryIO], None]], item: Any, f: BinaryIO) -> None: +def stream_tuple(stream_inner_type_funcs: List[StreamFunctionType], item: Any, f: BinaryIO) -> None: assert len(stream_inner_type_funcs) == len(item) for i in range(len(item)): stream_inner_type_funcs[i](item[i], f) @@ -255,7 +283,19 @@ def stream_str(item: Any, f: BinaryIO) -> None: f.write(str_bytes) -def streamable(cls: Any): +def stream_bool(item: Any, f: BinaryIO) -> None: + f.write(int(item).to_bytes(1, "big")) + + +def stream_streamable(item: object, f: BinaryIO) -> None: + getattr(item, "stream")(f) + + +def stream_byte_convertible(item: object, f: BinaryIO) -> None: + f.write(getattr(item, "__bytes__")()) + + +def streamable(cls: Type[_T_Streamable]) -> Type[_T_Streamable]: """ This decorator forces correct streamable protocol syntax/usage and populates the caches for types hints and (de)serialization methods for all members of the class. The correct usage is: @@ -279,7 +319,9 @@ class Example(Streamable): raise DefinitionError(f"@dataclass(frozen=True) required first. {correct_usage_string}") try: - object.__new__(cls)._streamable_test_if_dataclass_frozen_ = None + # Ignore mypy here because we especially want to access a not available member to test if + # the dataclass is frozen. + object.__new__(cls)._streamable_test_if_dataclass_frozen_ = None # type: ignore[attr-defined] except dataclasses.FrozenInstanceError: pass else: @@ -352,10 +394,10 @@ class Streamable: Make sure to use the streamable decorator when inheriting from the Streamable class to prepare the streaming caches. """ - def post_init_parse(self, item: Any, f_name: str, f_type: Type) -> Any: + def post_init_parse(self, item: Any, f_name: str, f_type: Type[Any]) -> Any: if is_type_List(f_type): - collected_list: List = [] - inner_type: Type = get_args(f_type)[0] + collected_list: List[Any] = [] + inner_type: Type[Any] = get_args(f_type)[0] # wjb assert inner_type != get_args(List)[0] # type: ignore if not is_type_List(type(item)): raise ValueError(f"Wrong type for {f_name}, need a list.") @@ -391,7 +433,7 @@ def post_init_parse(self, item: Any, f_name: str, f_type: Type) -> Any: raise ValueError(f"Wrong type for {f_name}") return item - def __post_init__(self): + def __post_init__(self) -> None: try: fields = FIELDS_FOR_STREAMABLE_CLASS[type(self)] except Exception: @@ -408,12 +450,12 @@ def __post_init__(self): object.__setattr__(self, f_name, self.post_init_parse(data[f_name], f_name, f_type)) @classmethod - def function_to_parse_one_item(cls, f_type: Type) -> Callable[[BinaryIO], Any]: + def function_to_parse_one_item(cls, f_type: Type[Any]) -> ParseFunctionType: """ This function returns a function taking one argument `f: BinaryIO` that parses and returns a value of the given type. """ - inner_type: Type + inner_type: Type[Any] if f_type is bool: return parse_bool if is_type_SpecificOptional(f_type): @@ -421,7 +463,8 @@ def function_to_parse_one_item(cls, f_type: Type) -> Callable[[BinaryIO], Any]: parse_inner_type_f = cls.function_to_parse_one_item(inner_type) return lambda f: parse_optional(f, parse_inner_type_f) if hasattr(f_type, "parse"): - return f_type.parse + # Ignoring for now as the proper solution isn't obvious + return f_type.parse # type: ignore[no-any-return] if f_type == bytes: return parse_bytes if is_type_List(f_type): @@ -444,7 +487,7 @@ def parse(cls: Type[_T_Streamable], f: BinaryIO) -> _T_Streamable: # Create the object without calling __init__() to avoid unnecessary post-init checks in strictdataclass obj: _T_Streamable = object.__new__(cls) fields: Iterator[str] = iter(FIELDS_FOR_STREAMABLE_CLASS.get(cls, {})) - values: Iterator = (parse_f(f) for parse_f in PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS[cls]) + values: Iterator[object] = (parse_f(f) for parse_f in PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS[cls]) for field, value in zip(fields, values): object.__setattr__(obj, field, value) @@ -456,8 +499,8 @@ def parse(cls: Type[_T_Streamable], f: BinaryIO) -> _T_Streamable: return obj @classmethod - def function_to_stream_one_item(cls, f_type: Type) -> Callable[[Any, BinaryIO], Any]: - inner_type: Type + def function_to_stream_one_item(cls, f_type: Type[Any]) -> StreamFunctionType: + inner_type: Type[Any] if is_type_SpecificOptional(f_type): inner_type = get_args(f_type)[0] stream_inner_type_func = cls.function_to_stream_one_item(inner_type) @@ -465,9 +508,9 @@ def function_to_stream_one_item(cls, f_type: Type) -> Callable[[Any, BinaryIO], elif f_type == bytes: return stream_bytes elif hasattr(f_type, "stream"): - return lambda item, f: item.stream(f) + return stream_streamable elif hasattr(f_type, "__bytes__"): - return lambda item, f: f.write(bytes(item)) + return stream_byte_convertible elif is_type_List(f_type): inner_type = get_args(f_type)[0] stream_inner_type_func = cls.function_to_stream_one_item(inner_type) @@ -481,7 +524,7 @@ def function_to_stream_one_item(cls, f_type: Type) -> Callable[[Any, BinaryIO], elif f_type is str: return stream_str elif f_type is bool: - return lambda item, f: f.write(int(item).to_bytes(1, "big")) + return stream_bool else: raise NotImplementedError(f"can't stream {f_type}") @@ -518,9 +561,9 @@ def __str__(self: Any) -> str: def __repr__(self: Any) -> str: return pp.pformat(recurse_jsonify(dataclasses.asdict(self))) - def to_json_dict(self) -> Dict: + def to_json_dict(self) -> Dict[str, Any]: return recurse_jsonify(dataclasses.asdict(self)) @classmethod - def from_json_dict(cls: Any, json_dict: Dict) -> Any: + def from_json_dict(cls: Any, json_dict: Dict[str, Any]) -> Any: return dataclass_from_dict(cls, json_dict) diff --git a/chinilla/wallet/util/wallet_sync_utils.py b/chinilla/wallet/util/wallet_sync_utils.py index c5b09e1feb8b..10866179e4cb 100644 --- a/chinilla/wallet/util/wallet_sync_utils.py +++ b/chinilla/wallet/util/wallet_sync_utils.py @@ -57,10 +57,10 @@ async def subscribe_to_phs( Tells full nodes that we are interested in puzzle hashes, and returns the response. """ msg = wallet_protocol.RegisterForPhUpdates(puzzle_hashes, uint32(max(min_height, uint32(0)))) - all_coins_state: Optional[RespondToPhUpdates] = await peer.register_interest_in_puzzle_hash(msg) - if all_coins_state is not None: - return all_coins_state.coin_states - return [] + all_coins_state: Optional[RespondToPhUpdates] = await peer.register_interest_in_puzzle_hash(msg, timeout=300) + if all_coins_state is None: + raise ValueError(f"None response from peer {peer.peer_host} for register_interest_in_puzzle_hash") + return all_coins_state.coin_states async def subscribe_to_coin_updates( @@ -72,10 +72,11 @@ async def subscribe_to_coin_updates( Tells full nodes that we are interested in coin ids, and returns the response. """ msg = wallet_protocol.RegisterForCoinUpdates(coin_names, uint32(max(0, min_height))) - all_coins_state: Optional[RespondToCoinUpdates] = await peer.register_interest_in_coin(msg) - if all_coins_state is not None: - return all_coins_state.coin_states - return [] + all_coins_state: Optional[RespondToCoinUpdates] = await peer.register_interest_in_coin(msg, timeout=300) + + if all_coins_state is None: + raise ValueError(f"None response from peer {peer.peer_host} for register_interest_in_coin") + return all_coins_state.coin_states def validate_additions( diff --git a/chinilla/wallet/wallet_node.py b/chinilla/wallet/wallet_node.py index 962af1be0691..5a6ddda16d66 100644 --- a/chinilla/wallet/wallet_node.py +++ b/chinilla/wallet/wallet_node.py @@ -699,9 +699,11 @@ async def receive_and_validate(inner_states: List[CoinState], inner_idx_start: i for states in chunks(items, chunk_size): if self.server is None: self.log.error("No server") + await asyncio.gather(*all_tasks) return False if peer.peer_node_id not in self.server.all_connections: self.log.error(f"Disconnected from peer {peer.peer_node_id} host {peer.peer_host}") + await asyncio.gather(*all_tasks) return False if trusted: async with self.wallet_state_manager.db_wrapper.lock: @@ -726,6 +728,7 @@ async def receive_and_validate(inner_states: List[CoinState], inner_idx_start: i await asyncio.sleep(0.1) if self._shut_down: self.log.info("Terminating receipt and validation due to shut down request") + await asyncio.gather(*all_tasks) return False concurrent_tasks_cs_heights.append(last_change_height_cs(states[0])) all_tasks.append(asyncio.create_task(receive_and_validate(states, idx, concurrent_tasks_cs_heights))) diff --git a/install.sh b/install.sh index 29865e08e510..04e32f948f23 100644 --- a/install.sh +++ b/install.sh @@ -56,7 +56,11 @@ fi # Get submodules git submodule update --init mozilla-ca -UBUNTU_PRE_2004=false +UBUNTU_PRE_2004=0 +UBUNTU_2000=0 +UBUNTU_2100=0 +UBUNTU_2200=0 + if $UBUNTU; then LSB_RELEASE=$(lsb_release -rs) # In case Ubuntu minimal does not come with bc @@ -64,8 +68,15 @@ if $UBUNTU; then sudo apt install bc -y fi # Mint 20.04 responds with 20 here so 20 instead of 20.04 - UBUNTU_PRE_2004=$(echo "$LSB_RELEASE<20" | bc) - UBUNTU_2100=$(echo "$LSB_RELEASE>=21" | bc) + if [ "$(echo "$LSB_RELEASE<20" | bc)" = "1" ]; then + UBUNTU_PRE_2004=1 + elif [ "$(echo "$LSB_RELEASE<21" | bc)" = "1" ]; then + UBUNTU_2000=1 + elif [ "$(echo "$LSB_RELEASE<22" | bc)" = "1" ]; then + UBUNTU_2100=1 + else + UBUNTU_2200=1 + fi fi install_python3_and_sqlite3_from_source_with_yum() { @@ -114,19 +125,23 @@ install_python3_and_sqlite3_from_source_with_yum() { # Manage npm and other install requirements on an OS specific basis if [ "$(uname)" = "Linux" ]; then #LINUX=1 - if [ "$UBUNTU" = "true" ] && [ "$UBUNTU_PRE_2004" = "1" ]; then + if [ "$UBUNTU_PRE_2004" = "1" ]; then # Ubuntu echo "Installing on Ubuntu pre 20.04 LTS." sudo apt-get update sudo apt-get install -y python3.7-venv python3.7-distutils openssl - elif [ "$UBUNTU" = "true" ] && [ "$UBUNTU_PRE_2004" = "0" ] && [ "$UBUNTU_2100" = "0" ]; then + elif [ "$UBUNTU_2000" = "1" ]; then echo "Installing on Ubuntu 20.04 LTS." sudo apt-get update sudo apt-get install -y python3.8-venv python3-distutils openssl - elif [ "$UBUNTU" = "true" ] && [ "$UBUNTU_2100" = "1" ]; then - echo "Installing on Ubuntu 21.04 or newer." + elif [ "$UBUNTU_2100" = "1" ]; then + echo "Installing on Ubuntu 21.04." sudo apt-get update sudo apt-get install -y python3.9-venv python3-distutils openssl + elif [ "$UBUNTU_2200" = "1" ]; then + echo "Installing on Ubuntu 22.04 LTS or newer." + sudo apt-get update + sudo apt-get install -y python3.10-venv python3-distutils openssl elif [ "$DEBIAN" = "true" ]; then echo "Installing on Debian." sudo apt-get update @@ -186,14 +201,14 @@ fi find_python() { set +e unset BEST_VERSION - for V in 39 3.9 38 3.8 37 3.7 3; do + for V in 310 3.10 39 3.9 38 3.8 37 3.7 3; do if command -v python$V >/dev/null; then if [ "$BEST_VERSION" = "" ]; then BEST_VERSION=$V if [ "$BEST_VERSION" = "3" ]; then PY3_VERSION=$(python$BEST_VERSION --version | cut -d ' ' -f2) - if [[ "$PY3_VERSION" =~ 3.10.* ]]; then - echo "Chinilla requires Python version <= 3.9.10" + if [[ "$PY3_VERSION" =~ 3.11.* ]]; then + echo "Chinilla requires Python version < 3.11.0" echo "Current Python version = $PY3_VERSION" # If Arch, direct to Arch Wiki if type pacman >/dev/null 2>&1 && [ -f "/etc/arch-release" ]; then diff --git a/mypy.ini b/mypy.ini index fa40db3ce9dc..d851f5f2e965 100644 --- a/mypy.ini +++ b/mypy.ini @@ -17,7 +17,7 @@ no_implicit_reexport = True strict_equality = True # list created by: venv/bin/mypy | sed -n 's/.py:.*//p' | sort | uniq | tr '/' '.' | tr '\n' ',' -[mypy-benchmarks.block_ref,benchmarks.block_store,benchmarks.coin_store,benchmarks.utils,build_scripts.installer-version,chinilla.clvm.spend_sim,chinilla.cmds.configure,chinilla.cmds.db,chinilla.cmds.db_upgrade_func,chinilla.cmds.farm_funcs,chinilla.cmds.init,chinilla.cmds.init_funcs,chinilla.cmds.keys,chinilla.cmds.keys_funcs,chinilla.cmds.passphrase,chinilla.cmds.passphrase_funcs,chinilla.cmds.plotnft,chinilla.cmds.plotnft_funcs,chinilla.cmds.plots,chinilla.cmds.plotters,chinilla.cmds.show,chinilla.cmds.start_funcs,chinilla.cmds.wallet,chinilla.cmds.wallet_funcs,chinilla.consensus.block_body_validation,chinilla.consensus.blockchain,chinilla.consensus.blockchain_interface,chinilla.consensus.block_creation,chinilla.consensus.block_header_validation,chinilla.consensus.block_record,chinilla.consensus.block_root_validation,chinilla.consensus.coinbase,chinilla.consensus.constants,chinilla.consensus.difficulty_adjustment,chinilla.consensus.get_block_challenge,chinilla.consensus.multiprocess_validation,chinilla.consensus.pos_quality,chinilla.consensus.vdf_info_computation,chinilla.daemon.client,chinilla.daemon.keychain_proxy,chinilla.daemon.keychain_server,chinilla.daemon.server,chinilla.farmer.farmer,chinilla.farmer.farmer_api,chinilla.full_node.block_height_map,chinilla.full_node.block_store,chinilla.full_node.bundle_tools,chinilla.full_node.coin_store,chinilla.full_node.full_node,chinilla.full_node.full_node_api,chinilla.full_node.full_node_store,chinilla.full_node.generator,chinilla.full_node.hint_store,chinilla.full_node.lock_queue,chinilla.full_node.mempool,chinilla.full_node.mempool_check_conditions,chinilla.full_node.mempool_manager,chinilla.full_node.pending_tx_cache,chinilla.full_node.sync_store,chinilla.full_node.weight_proof,chinilla.harvester.harvester,chinilla.harvester.harvester_api,chinilla.introducer.introducer,chinilla.introducer.introducer_api,chinilla.plotters.bladebit,chinilla.plotters.chiapos,chinilla.plotters.install_plotter,chinilla.plotters.madmax,chinilla.plotters.plotters,chinilla.plotters.plotters_util,chinilla.plotting.check_plots,chinilla.plotting.create_plots,chinilla.plotting.manager,chinilla.plotting.util,chinilla.pools.pool_config,chinilla.pools.pool_puzzles,chinilla.pools.pool_wallet,chinilla.pools.pool_wallet_info,chinilla.protocols.pool_protocol,chinilla.rpc.crawler_rpc_api,chinilla.rpc.farmer_rpc_api,chinilla.rpc.farmer_rpc_client,chinilla.rpc.full_node_rpc_api,chinilla.rpc.full_node_rpc_client,chinilla.rpc.harvester_rpc_api,chinilla.rpc.harvester_rpc_client,chinilla.rpc.rpc_client,chinilla.rpc.rpc_server,chinilla.rpc.timelord_rpc_api,chinilla.rpc.util,chinilla.rpc.wallet_rpc_api,chinilla.rpc.wallet_rpc_client,chinilla.seeder.crawler,chinilla.seeder.crawler_api,chinilla.seeder.crawl_store,chinilla.seeder.dns_server,chinilla.seeder.peer_record,chinilla.seeder.start_crawler,chinilla.server.address_manager,chinilla.server.address_manager_store,chinilla.server.connection_utils,chinilla.server.introducer_peers,chinilla.server.node_discovery,chinilla.server.peer_store_resolver,chinilla.server.rate_limits,chinilla.server.reconnect_task,chinilla.server.server,chinilla.server.ssl_context,chinilla.server.start_farmer,chinilla.server.start_full_node,chinilla.server.start_harvester,chinilla.server.start_introducer,chinilla.server.start_service,chinilla.server.start_timelord,chinilla.server.start_wallet,chinilla.server.upnp,chinilla.server.ws_connection,chinilla.simulator.full_node_simulator,chinilla.simulator.start_simulator,chinilla.ssl.create_ssl,chinilla.timelord.iters_from_block,chinilla.timelord.timelord,chinilla.timelord.timelord_api,chinilla.timelord.timelord_launcher,chinilla.timelord.timelord_state,chinilla.types.announcement,chinilla.types.blockchain_format.classgroup,chinilla.types.blockchain_format.coin,chinilla.types.blockchain_format.program,chinilla.types.blockchain_format.proof_of_space,chinilla.types.blockchain_format.tree_hash,chinilla.types.blockchain_format.vdf,chinilla.types.full_block,chinilla.types.header_block,chinilla.types.mempool_item,chinilla.types.name_puzzle_condition,chinilla.types.peer_info,chinilla.types.spend_bundle,chinilla.types.transaction_queue_entry,chinilla.types.unfinished_block,chinilla.types.unfinished_header_block,chinilla.util.api_decorators,chinilla.util.block_cache,chinilla.util.byte_types,chinilla.util.cached_bls,chinilla.util.check_fork_next_block,chinilla.util.chinilla_logging,chinilla.util.config,chinilla.util.db_wrapper,chinilla.util.dump_keyring,chinilla.util.file_keyring,chinilla.util.files,chinilla.util.hash,chinilla.util.ints,chinilla.util.json_util,chinilla.util.keychain,chinilla.util.keyring_wrapper,chinilla.util.log_exceptions,chinilla.util.lru_cache,chinilla.util.make_test_constants,chinilla.util.merkle_set,chinilla.util.network,chinilla.util.partial_func,chinilla.util.pip_import,chinilla.util.profiler,chinilla.util.safe_cancel_task,chinilla.util.service_groups,chinilla.util.ssl_check,chinilla.util.streamable,chinilla.util.struct_stream,chinilla.util.validate_alert,chinilla.wallet.block_record,chinilla.wallet.cat_wallet.cat_utils,chinilla.wallet.cat_wallet.cat_wallet,chinilla.wallet.cat_wallet.lineage_store,chinilla.wallet.chinillalisp,chinilla.wallet.did_wallet.did_wallet,chinilla.wallet.did_wallet.did_wallet_puzzles,chinilla.wallet.key_val_store,chinilla.wallet.lineage_proof,chinilla.wallet.payment,chinilla.wallet.puzzles.load_clvm,chinilla.wallet.puzzles.p2_conditions,chinilla.wallet.puzzles.p2_delegated_conditions,chinilla.wallet.puzzles.p2_delegated_puzzle,chinilla.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle,chinilla.wallet.puzzles.p2_m_of_n_delegate_direct,chinilla.wallet.puzzles.p2_puzzle_hash,chinilla.wallet.puzzles.prefarm.spend_prefarm,chinilla.wallet.puzzles.puzzle_utils,chinilla.wallet.puzzles.rom_bootstrap_generator,chinilla.wallet.puzzles.singleton_top_layer,chinilla.wallet.puzzles.tails,chinilla.wallet.rl_wallet.rl_wallet,chinilla.wallet.rl_wallet.rl_wallet_puzzles,chinilla.wallet.secret_key_store,chinilla.wallet.settings.user_settings,chinilla.wallet.trade_manager,chinilla.wallet.trade_record,chinilla.wallet.trading.offer,chinilla.wallet.trading.trade_store,chinilla.wallet.transaction_record,chinilla.wallet.util.debug_spend_bundle,chinilla.wallet.util.new_peak_queue,chinilla.wallet.util.peer_request_cache,chinilla.wallet.util.wallet_sync_utils,chinilla.wallet.wallet,chinilla.wallet.wallet_action_store,chinilla.wallet.wallet_blockchain,chinilla.wallet.wallet_coin_store,chinilla.wallet.wallet_interested_store,chinilla.wallet.wallet_node,chinilla.wallet.wallet_node_api,chinilla.wallet.wallet_pool_store,chinilla.wallet.wallet_puzzle_store,chinilla.wallet.wallet_state_manager,chinilla.wallet.wallet_sync_store,chinilla.wallet.wallet_transaction_store,chinilla.wallet.wallet_user_store,chinilla.wallet.wallet_weight_proof_handler,installhelper,tests.blockchain.blockchain_test_utils,tests.blockchain.test_blockchain,tests.blockchain.test_blockchain_transactions,tests.block_tools,tests.build-init-files,tests.build-workflows,tests.clvm.coin_store,tests.clvm.test_chinillalisp_deserialization,tests.clvm.test_clvm_compilation,tests.clvm.test_program,tests.clvm.test_puzzle_compression,tests.clvm.test_puzzles,tests.clvm.test_serialized_program,tests.clvm.test_singletons,tests.clvm.test_spend_sim,tests.conftest,tests.connection_utils,tests.core.cmds.test_keys,tests.core.consensus.test_pot_iterations,tests.core.custom_types.test_coin,tests.core.custom_types.test_proof_of_space,tests.core.custom_types.test_spend_bundle,tests.core.daemon.test_daemon,tests.core.full_node.full_sync.test_full_sync,tests.core.full_node.stores.test_block_store,tests.core.full_node.stores.test_coin_store,tests.core.full_node.stores.test_full_node_store,tests.core.full_node.stores.test_hint_store,tests.core.full_node.stores.test_sync_store,tests.core.full_node.test_address_manager,tests.core.full_node.test_block_height_map,tests.core.full_node.test_conditions,tests.core.full_node.test_full_node,tests.core.full_node.test_mempool,tests.core.full_node.test_mempool_performance,tests.core.full_node.test_node_load,tests.core.full_node.test_peer_store_resolver,tests.core.full_node.test_performance,tests.core.full_node.test_transactions,tests.core.make_block_generator,tests.core.node_height,tests.core.server.test_dos,tests.core.server.test_rate_limits,tests.core.ssl.test_ssl,tests.core.test_cost_calculation,tests.core.test_crawler_rpc,tests.core.test_daemon_rpc,tests.core.test_db_conversion,tests.core.test_farmer_harvester_rpc,tests.core.test_filter,tests.core.test_full_node_rpc,tests.core.test_merkle_set,tests.core.test_setproctitle,tests.core.util.test_cached_bls,tests.core.util.test_config,tests.core.util.test_file_keyring_synchronization,tests.core.util.test_files,tests.core.util.test_keychain,tests.core.util.test_keyring_wrapper,tests.core.util.test_lru_cache,tests.core.util.test_significant_bits,tests.core.util.test_streamable,tests.farmer_harvester.test_farmer_harvester,tests.generator.test_compression,tests.generator.test_generator_types,tests.generator.test_list_to_batches,tests.generator.test_rom,tests.generator.test_scan,tests.plotting.test_plot_manager,tests.pools.test_pool_cmdline,tests.pools.test_pool_config,tests.pools.test_pool_puzzles_lifecycle,tests.pools.test_pool_rpc,tests.pools.test_wallet_pool_store,tests.setup_nodes,tests.setup_services,tests.simulation.test_simulation,tests.time_out_assert,tests.tools.test_full_sync,tests.tools.test_run_block,tests.util.alert_server,tests.util.benchmark_cost,tests.util.blockchain,tests.util.build_network_protocol_files,tests.util.db_connection,tests.util.generator_tools_testing,tests.util.keyring,tests.util.key_tool,tests.util.misc,tests.util.network,tests.util.rpc,tests.util.test_full_block_utils,tests.util.test_lock_queue,tests.util.test_network_protocol_files,tests.util.test_struct_stream,tests.wallet.cat_wallet.test_cat_lifecycle,tests.wallet.cat_wallet.test_cat_wallet,tests.wallet.cat_wallet.test_offer_lifecycle,tests.wallet.cat_wallet.test_trades,tests.wallet.did_wallet.test_did,tests.wallet.did_wallet.test_did_rpc,tests.wallet.rl_wallet.test_rl_rpc,tests.wallet.rl_wallet.test_rl_wallet,tests.wallet.rpc.test_wallet_rpc,tests.wallet.simple_sync.test_simple_sync_protocol,tests.wallet.sync.test_wallet_sync,tests.wallet.test_bech32m,tests.wallet.test_chinillalisp,tests.wallet.test_puzzle_store,tests.wallet.test_singleton,tests.wallet.test_singleton_lifecycle,tests.wallet.test_singleton_lifecycle_fast,tests.wallet.test_taproot,tests.wallet.test_wallet,tests.wallet.test_wallet_blockchain,tests.wallet.test_wallet_interested_store,tests.wallet.test_wallet_key_val_store,tests.wallet.test_wallet_user_store,tests.wallet_tools,tests.weight_proof.test_weight_proof,tools.analyze-chain,tools.run_block,tools.test_full_sync] +[mypy-benchmarks.block_ref,benchmarks.block_store,benchmarks.coin_store,benchmarks.utils,build_scripts.installer-version,chinilla.clvm.spend_sim,chinilla.cmds.configure,chinilla.cmds.db,chinilla.cmds.db_upgrade_func,chinilla.cmds.farm_funcs,chinilla.cmds.init,chinilla.cmds.init_funcs,chinilla.cmds.keys,chinilla.cmds.keys_funcs,chinilla.cmds.passphrase,chinilla.cmds.passphrase_funcs,chinilla.cmds.plotnft,chinilla.cmds.plotnft_funcs,chinilla.cmds.plots,chinilla.cmds.plotters,chinilla.cmds.show,chinilla.cmds.start_funcs,chinilla.cmds.wallet,chinilla.cmds.wallet_funcs,chinilla.consensus.block_body_validation,chinilla.consensus.blockchain,chinilla.consensus.blockchain_interface,chinilla.consensus.block_creation,chinilla.consensus.block_header_validation,chinilla.consensus.block_record,chinilla.consensus.block_root_validation,chinilla.consensus.coinbase,chinilla.consensus.constants,chinilla.consensus.difficulty_adjustment,chinilla.consensus.get_block_challenge,chinilla.consensus.multiprocess_validation,chinilla.consensus.pos_quality,chinilla.consensus.vdf_info_computation,chinilla.daemon.client,chinilla.daemon.keychain_proxy,chinilla.daemon.keychain_server,chinilla.daemon.server,chinilla.farmer.farmer,chinilla.farmer.farmer_api,chinilla.full_node.block_height_map,chinilla.full_node.block_store,chinilla.full_node.bundle_tools,chinilla.full_node.coin_store,chinilla.full_node.full_node,chinilla.full_node.full_node_api,chinilla.full_node.full_node_store,chinilla.full_node.generator,chinilla.full_node.hint_store,chinilla.full_node.lock_queue,chinilla.full_node.mempool,chinilla.full_node.mempool_check_conditions,chinilla.full_node.mempool_manager,chinilla.full_node.pending_tx_cache,chinilla.full_node.sync_store,chinilla.full_node.weight_proof,chinilla.harvester.harvester,chinilla.harvester.harvester_api,chinilla.introducer.introducer,chinilla.introducer.introducer_api,chinilla.plotters.bladebit,chinilla.plotters.chiapos,chinilla.plotters.install_plotter,chinilla.plotters.madmax,chinilla.plotters.plotters,chinilla.plotters.plotters_util,chinilla.plotting.check_plots,chinilla.plotting.create_plots,chinilla.plotting.manager,chinilla.plotting.util,chinilla.pools.pool_config,chinilla.pools.pool_puzzles,chinilla.pools.pool_wallet,chinilla.pools.pool_wallet_info,chinilla.protocols.pool_protocol,chinilla.rpc.crawler_rpc_api,chinilla.rpc.farmer_rpc_api,chinilla.rpc.farmer_rpc_client,chinilla.rpc.full_node_rpc_api,chinilla.rpc.full_node_rpc_client,chinilla.rpc.harvester_rpc_api,chinilla.rpc.harvester_rpc_client,chinilla.rpc.rpc_client,chinilla.rpc.rpc_server,chinilla.rpc.timelord_rpc_api,chinilla.rpc.util,chinilla.rpc.wallet_rpc_api,chinilla.rpc.wallet_rpc_client,chinilla.seeder.crawler,chinilla.seeder.crawler_api,chinilla.seeder.crawl_store,chinilla.seeder.dns_server,chinilla.seeder.peer_record,chinilla.seeder.start_crawler,chinilla.server.address_manager,chinilla.server.address_manager_store,chinilla.server.connection_utils,chinilla.server.introducer_peers,chinilla.server.node_discovery,chinilla.server.peer_store_resolver,chinilla.server.rate_limits,chinilla.server.reconnect_task,chinilla.server.server,chinilla.server.ssl_context,chinilla.server.start_farmer,chinilla.server.start_full_node,chinilla.server.start_harvester,chinilla.server.start_introducer,chinilla.server.start_service,chinilla.server.start_timelord,chinilla.server.start_wallet,chinilla.server.upnp,chinilla.server.ws_connection,chinilla.simulator.full_node_simulator,chinilla.simulator.start_simulator,chinilla.ssl.create_ssl,chinilla.timelord.iters_from_block,chinilla.timelord.timelord,chinilla.timelord.timelord_api,chinilla.timelord.timelord_launcher,chinilla.timelord.timelord_state,chinilla.types.announcement,chinilla.types.blockchain_format.classgroup,chinilla.types.blockchain_format.coin,chinilla.types.blockchain_format.program,chinilla.types.blockchain_format.proof_of_space,chinilla.types.blockchain_format.tree_hash,chinilla.types.blockchain_format.vdf,chinilla.types.full_block,chinilla.types.header_block,chinilla.types.mempool_item,chinilla.types.name_puzzle_condition,chinilla.types.peer_info,chinilla.types.spend_bundle,chinilla.types.transaction_queue_entry,chinilla.types.unfinished_block,chinilla.types.unfinished_header_block,chinilla.util.api_decorators,chinilla.util.block_cache,chinilla.util.byte_types,chinilla.util.cached_bls,chinilla.util.check_fork_next_block,chinilla.util.chinilla_logging,chinilla.util.config,chinilla.util.db_wrapper,chinilla.util.dump_keyring,chinilla.util.file_keyring,chinilla.util.files,chinilla.util.hash,chinilla.util.ints,chinilla.util.json_util,chinilla.util.keychain,chinilla.util.keyring_wrapper,chinilla.util.log_exceptions,chinilla.util.lru_cache,chinilla.util.make_test_constants,chinilla.util.merkle_set,chinilla.util.network,chinilla.util.partial_func,chinilla.util.pip_import,chinilla.util.profiler,chinilla.util.safe_cancel_task,chinilla.util.service_groups,chinilla.util.ssl_check,chinilla.util.struct_stream,chinilla.util.validate_alert,chinilla.wallet.block_record,chinilla.wallet.cat_wallet.cat_utils,chinilla.wallet.cat_wallet.cat_wallet,chinilla.wallet.cat_wallet.lineage_store,chinilla.wallet.chinillalisp,chinilla.wallet.did_wallet.did_wallet,chinilla.wallet.did_wallet.did_wallet_puzzles,chinilla.wallet.key_val_store,chinilla.wallet.lineage_proof,chinilla.wallet.payment,chinilla.wallet.puzzles.load_clvm,chinilla.wallet.puzzles.p2_conditions,chinilla.wallet.puzzles.p2_delegated_conditions,chinilla.wallet.puzzles.p2_delegated_puzzle,chinilla.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle,chinilla.wallet.puzzles.p2_m_of_n_delegate_direct,chinilla.wallet.puzzles.p2_puzzle_hash,chinilla.wallet.puzzles.prefarm.spend_prefarm,chinilla.wallet.puzzles.puzzle_utils,chinilla.wallet.puzzles.rom_bootstrap_generator,chinilla.wallet.puzzles.singleton_top_layer,chinilla.wallet.puzzles.tails,chinilla.wallet.rl_wallet.rl_wallet,chinilla.wallet.rl_wallet.rl_wallet_puzzles,chinilla.wallet.secret_key_store,chinilla.wallet.settings.user_settings,chinilla.wallet.trade_manager,chinilla.wallet.trade_record,chinilla.wallet.trading.offer,chinilla.wallet.trading.trade_store,chinilla.wallet.transaction_record,chinilla.wallet.util.debug_spend_bundle,chinilla.wallet.util.new_peak_queue,chinilla.wallet.util.peer_request_cache,chinilla.wallet.util.wallet_sync_utils,chinilla.wallet.wallet,chinilla.wallet.wallet_action_store,chinilla.wallet.wallet_blockchain,chinilla.wallet.wallet_coin_store,chinilla.wallet.wallet_interested_store,chinilla.wallet.wallet_node,chinilla.wallet.wallet_node_api,chinilla.wallet.wallet_pool_store,chinilla.wallet.wallet_puzzle_store,chinilla.wallet.wallet_state_manager,chinilla.wallet.wallet_sync_store,chinilla.wallet.wallet_transaction_store,chinilla.wallet.wallet_user_store,chinilla.wallet.wallet_weight_proof_handler,installhelper,tests.blockchain.blockchain_test_utils,tests.blockchain.test_blockchain,tests.blockchain.test_blockchain_transactions,tests.block_tools,tests.build-init-files,tests.build-workflows,tests.clvm.coin_store,tests.clvm.test_chinillalisp_deserialization,tests.clvm.test_clvm_compilation,tests.clvm.test_program,tests.clvm.test_puzzle_compression,tests.clvm.test_puzzles,tests.clvm.test_serialized_program,tests.clvm.test_singletons,tests.clvm.test_spend_sim,tests.conftest,tests.connection_utils,tests.core.cmds.test_keys,tests.core.consensus.test_pot_iterations,tests.core.custom_types.test_coin,tests.core.custom_types.test_proof_of_space,tests.core.custom_types.test_spend_bundle,tests.core.daemon.test_daemon,tests.core.full_node.full_sync.test_full_sync,tests.core.full_node.stores.test_block_store,tests.core.full_node.stores.test_coin_store,tests.core.full_node.stores.test_full_node_store,tests.core.full_node.stores.test_hint_store,tests.core.full_node.stores.test_sync_store,tests.core.full_node.test_address_manager,tests.core.full_node.test_block_height_map,tests.core.full_node.test_conditions,tests.core.full_node.test_full_node,tests.core.full_node.test_mempool,tests.core.full_node.test_mempool_performance,tests.core.full_node.test_node_load,tests.core.full_node.test_peer_store_resolver,tests.core.full_node.test_performance,tests.core.full_node.test_transactions,tests.core.make_block_generator,tests.core.node_height,tests.core.server.test_dos,tests.core.server.test_rate_limits,tests.core.ssl.test_ssl,tests.core.test_cost_calculation,tests.core.test_crawler_rpc,tests.core.test_daemon_rpc,tests.core.test_db_conversion,tests.core.test_farmer_harvester_rpc,tests.core.test_filter,tests.core.test_full_node_rpc,tests.core.test_merkle_set,tests.core.test_setproctitle,tests.core.util.test_cached_bls,tests.core.util.test_config,tests.core.util.test_file_keyring_synchronization,tests.core.util.test_files,tests.core.util.test_keychain,tests.core.util.test_keyring_wrapper,tests.core.util.test_lru_cache,tests.core.util.test_significant_bits,tests.farmer_harvester.test_farmer_harvester,tests.generator.test_compression,tests.generator.test_generator_types,tests.generator.test_list_to_batches,tests.generator.test_rom,tests.generator.test_scan,tests.plotting.test_plot_manager,tests.pools.test_pool_cmdline,tests.pools.test_pool_config,tests.pools.test_pool_puzzles_lifecycle,tests.pools.test_pool_rpc,tests.pools.test_wallet_pool_store,tests.setup_nodes,tests.setup_services,tests.simulation.test_simulation,tests.time_out_assert,tests.tools.test_full_sync,tests.tools.test_run_block,tests.util.alert_server,tests.util.benchmark_cost,tests.util.blockchain,tests.util.build_network_protocol_files,tests.util.db_connection,tests.util.generator_tools_testing,tests.util.keyring,tests.util.key_tool,tests.util.misc,tests.util.network,tests.util.rpc,tests.util.test_full_block_utils,tests.util.test_lock_queue,tests.util.test_network_protocol_files,tests.util.test_struct_stream,tests.wallet.cat_wallet.test_cat_lifecycle,tests.wallet.cat_wallet.test_cat_wallet,tests.wallet.cat_wallet.test_offer_lifecycle,tests.wallet.cat_wallet.test_trades,tests.wallet.did_wallet.test_did,tests.wallet.did_wallet.test_did_rpc,tests.wallet.rl_wallet.test_rl_rpc,tests.wallet.rl_wallet.test_rl_wallet,tests.wallet.rpc.test_wallet_rpc,tests.wallet.simple_sync.test_simple_sync_protocol,tests.wallet.sync.test_wallet_sync,tests.wallet.test_bech32m,tests.wallet.test_chinillalisp,tests.wallet.test_puzzle_store,tests.wallet.test_singleton,tests.wallet.test_singleton_lifecycle,tests.wallet.test_singleton_lifecycle_fast,tests.wallet.test_taproot,tests.wallet.test_wallet,tests.wallet.test_wallet_blockchain,tests.wallet.test_wallet_interested_store,tests.wallet.test_wallet_key_val_store,tests.wallet.test_wallet_user_store,tests.wallet_tools,tests.weight_proof.test_weight_proof,tools.analyze-chain,tools.run_block,tools.test_full_sync] disallow_any_generics = False disallow_subclassing_any = False disallow_untyped_calls = False diff --git a/pytest.ini b/pytest.ini index 953e5cbf4d8a..628c1af61be8 100644 --- a/pytest.ini +++ b/pytest.ini @@ -21,3 +21,5 @@ filterwarnings = ignore:Exception ignored in:pytest.PytestUnraisableExceptionWarning ignore:cannot collect test class:pytest.PytestCollectionWarning ignore:The loop argument is deprecated since Python 3\.8, and scheduled for removal in Python 3\.10.:DeprecationWarning + ignore:The distutils package is deprecated:DeprecationWarning + ignore:There is no current event loop:DeprecationWarning diff --git a/setup.py b/setup.py index 62ab8cc3d4f5..c067f86d2be3 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,6 @@ from setuptools import setup dependencies = [ - "multidict==5.1.0", # Avoid 5.2.0 due to Avast "aiofiles==0.7.0", # Async IO for files "blspy==1.0.9", # Signature library "chiavdf==1.0.5", # timelord and vdf verification @@ -11,7 +10,7 @@ "clvm_tools==0.4.4", # Currying, Program.to, other conveniences "chia_rs==0.1.1", "clvm-tools-rs==0.1.7", # Rust implementation of clvm_tools - "aiohttp==3.7.4", # HTTP server for full node rpc + "aiohttp==3.8.1", # HTTP server for full node rpc "aiosqlite==0.17.0", # asyncio wrapper for sqlite, to store blocks "bitstring==3.1.9", # Binary data management library "colorama==0.4.4", # Colorizes terminal output @@ -24,8 +23,8 @@ "keyrings.cryptfile==1.3.4", # Secure storage for keys on Linux (Will be replaced) # "keyrings.cryptfile==1.3.8", # Secure storage for keys on Linux (Will be replaced) # See https://github.com/frispete/keyrings.cryptfile/issues/15 - "PyYAML==5.4.1", # Used for config file format - "setproctitle==1.2.2", # Gives the chinilla processes readable names + "PyYAML==6.0", # Used for config file format + "setproctitle==1.2.3", # Gives the chia processes readable names "sortedcontainers==2.4.0", # For maintaining sorted mempools # TODO: when moving to click 8 remove the pinning of black noted below "click==7.1.2", # For the CLI @@ -57,6 +56,7 @@ "black==21.12b0", "aiohttp_cors", # For blackd "ipython", # For asyncio debugging + "pyinstaller==4.9", "types-aiofiles", "types-click", "types-cryptography", diff --git a/tests/block_tools.py b/tests/block_tools.py index 38aa0e5dcae9..5b936da97e35 100644 --- a/tests/block_tools.py +++ b/tests/block_tools.py @@ -50,7 +50,7 @@ from chinilla.full_node.signage_point import SignagePoint from chinilla.plotting.util import PlotsRefreshParameter, PlotRefreshResult, PlotRefreshEvents, parse_plot_info from chinilla.plotting.manager import PlotManager -from chinilla.server.server import ssl_context_for_server +from chinilla.server.server import ssl_context_for_client from chinilla.types.blockchain_format.classgroup import ClassgroupElement from chinilla.types.blockchain_format.coin import Coin, hash_coin_list from chinilla.types.blockchain_format.foliage import ( @@ -369,7 +369,7 @@ def get_daemon_ssl_context(self) -> ssl.SSLContext: key_path = self.root_path / self.config["daemon_ssl"]["private_key"] ca_cert_path = self.root_path / self.config["private_ssl_ca"]["crt"] ca_key_path = self.root_path / self.config["private_ssl_ca"]["key"] - return ssl_context_for_server(ca_cert_path, ca_key_path, crt_path, key_path) + return ssl_context_for_client(ca_cert_path, ca_key_path, crt_path, key_path) def get_plot_signature(self, m: bytes32, plot_pk: G1Element) -> G2Element: """ diff --git a/tests/core/ssl/test_ssl.py b/tests/core/ssl/test_ssl.py index 50877295e5fb..3a47faa4fde4 100644 --- a/tests/core/ssl/test_ssl.py +++ b/tests/core/ssl/test_ssl.py @@ -46,8 +46,8 @@ async def establish_connection(server: ChinillaServer, self_hostname: str, ssl_c @pytest_asyncio.fixture(scope="function") -async def harvester_farmer(bt): - async for _ in setup_harvester_farmer(bt, test_constants, start_services=True): +async def harvester_farmer(bt, tmp_path): + async for _ in setup_harvester_farmer(bt, tmp_path, test_constants, start_services=True): yield _ diff --git a/tests/core/test_farmer_harvester_rpc.py b/tests/core/test_farmer_harvester_rpc.py index 97302f64342a..23a53b474f64 100644 --- a/tests/core/test_farmer_harvester_rpc.py +++ b/tests/core/test_farmer_harvester_rpc.py @@ -1,12 +1,28 @@ import logging +import operator import time +from math import ceil +from os import mkdir +from pathlib import Path +from shutil import copy +from typing import Any, Awaitable, Callable, Dict, List, Union, cast import pytest import pytest_asyncio from chinilla.consensus.coinbase import create_puzzlehash_for_pk +from chinilla.plot_sync.receiver import Receiver +from chinilla.plotting.util import add_plot_directory from chinilla.protocols import farmer_protocol -from chinilla.rpc.farmer_rpc_api import FarmerRpcApi +from chinilla.protocols.harvester_protocol import Plot +from chinilla.rpc.farmer_rpc_api import ( + FarmerRpcApi, + FilterItem, + PaginatedRequestData, + PlotInfoRequestData, + PlotPathRequestData, + plot_matches_filter, +) from chinilla.rpc.farmer_rpc_client import FarmerRpcClient from chinilla.rpc.harvester_rpc_api import HarvesterRpcApi from chinilla.rpc.harvester_rpc_client import HarvesterRpcClient @@ -17,7 +33,11 @@ from chinilla.util.config import load_config, lock_and_load_config, save_config from chinilla.util.hash import std_hash from chinilla.util.ints import uint8, uint16, uint32, uint64 +from chinilla.util.misc import get_list_or_len +from chinilla.util.streamable import dataclass_from_dict from chinilla.wallet.derive_keys import master_sk_to_wallet_sk +from tests.block_tools import get_plot_dir +from tests.plot_sync.test_delta import dummy_plot from tests.setup_nodes import setup_harvester_farmer, test_constants from tests.time_out_assert import time_out_assert, time_out_assert_custom_interval from tests.util.rpc import validate_get_routes @@ -26,9 +46,17 @@ log = logging.getLogger(__name__) +async def wait_for_plot_sync(receiver: Receiver, previous_last_sync_id: uint64) -> None: + def wait(): + current_last_sync_id = receiver.last_sync().sync_id + return current_last_sync_id != 0 and current_last_sync_id != previous_last_sync_id + + await time_out_assert(30, wait) + + @pytest_asyncio.fixture(scope="function") -async def harvester_farmer_simulation(bt): - async for _ in setup_harvester_farmer(bt, test_constants, start_services=True): +async def harvester_farmer_simulation(bt, tmp_path): + async for _ in setup_harvester_farmer(bt, tmp_path, test_constants, start_services=True): yield _ @@ -102,8 +130,9 @@ async def test_get_routes(harvester_farmer_environment): await validate_get_routes(harvester_rpc_client, harvester_rpc_api) +@pytest.mark.parametrize("endpoint", ["get_harvesters", "get_harvesters_summary"]) @pytest.mark.asyncio -async def test_farmer_get_harvesters(harvester_farmer_environment): +async def test_farmer_get_harvesters_and_summary(harvester_farmer_environment, endpoint: str): ( farmer_service, farmer_rpc_api, @@ -114,26 +143,42 @@ async def test_farmer_get_harvesters(harvester_farmer_environment): ) = harvester_farmer_environment harvester = harvester_service._node - num_plots = 0 + harvester_plots = [] async def non_zero_plots() -> bool: res = await harvester_rpc_client.get_plots() - nonlocal num_plots - num_plots = len(res["plots"]) - return num_plots > 0 + nonlocal harvester_plots + harvester_plots = res["plots"] + return len(harvester_plots) > 0 await time_out_assert(10, non_zero_plots) async def test_get_harvesters(): + nonlocal harvester_plots harvester.plot_manager.trigger_refresh() await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False) - farmer_res = await farmer_rpc_client.get_harvesters() + farmer_res = await getattr(farmer_rpc_client, endpoint)() + if len(list(farmer_res["harvesters"])) != 1: log.error(f"test_get_harvesters: invalid harvesters {list(farmer_res['harvesters'])}") return False - if len(list(farmer_res["harvesters"][0]["plots"])) != num_plots: - log.error(f"test_get_harvesters: invalid plots {list(farmer_res['harvesters'])}") + + if farmer_res["harvesters"][0]["last_sync_time"] is None: + log.error(f"test_get_harvesters: sync not done {list(farmer_res['harvesters'])}") return False + + harvester_dict = farmer_res["harvesters"][0] + counts_only: bool = endpoint == "get_harvesters_summary" + + if not counts_only: + harvester_dict["plots"] = sorted(harvester_dict["plots"], key=lambda item: item["filename"]) + harvester_plots = sorted(harvester_plots, key=lambda item: item["filename"]) + + assert harvester_dict["plots"] == get_list_or_len(harvester_plots, counts_only) + assert harvester_dict["failed_to_open_filenames"] == get_list_or_len([], counts_only) + assert harvester_dict["no_key_filenames"] == get_list_or_len([], counts_only) + assert harvester_dict["duplicates"] == get_list_or_len([], counts_only) + return True await time_out_assert_custom_interval(30, 1, test_get_harvesters) @@ -275,3 +320,218 @@ async def test_farmer_get_pool_state(harvester_farmer_environment, self_hostname for pool_dict in client_pool_state["pool_state"]: for key in ["points_found_24h", "points_acknowledged_24h"]: assert pool_dict[key][0] == list(since_24h) + + +@pytest.mark.asyncio +async def test_farmer_get_pool_state_plot_count(harvester_farmer_environment, self_hostname: str) -> None: + ( + farmer_service, + farmer_rpc_api, + farmer_rpc_client, + harvester_service, + harvester_rpc_api, + harvester_rpc_client, + ) = harvester_farmer_environment + farmer_api = farmer_service._api + + async def wait_for_plot_sync() -> bool: + try: + return (await farmer_rpc_client.get_harvesters_summary())["harvesters"][0]["plots"] > 0 + except Exception: + return False + + await time_out_assert(15, wait_for_plot_sync, True) + + assert len((await farmer_rpc_client.get_pool_state())["pool_state"]) == 0 + + pool_contract_puzzle_hash: bytes32 = bytes32.from_hexstr( + "1b9d1eaa3c6a9b27cd90ad9070eb012794a74b277446417bc7b904145010c087" + ) + pool_list = [ + { + "launcher_id": "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa", + "owner_public_key": "aa11e92274c0f6a2449fd0c7cfab4a38f943289dbe2214c808b36390c34eacfaa1d4c8f3c6ec582ac502ff32228679a0", # noqa + "payout_instructions": "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8", + "pool_url": self_hostname, + "p2_singleton_puzzle_hash": pool_contract_puzzle_hash.hex(), + "target_puzzle_hash": "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58", + } + ] + + root_path = farmer_api.farmer._root_path + with lock_and_load_config(root_path, "config.yaml") as config: + config["pool"]["pool_list"] = pool_list + save_config(root_path, "config.yaml", config) + await farmer_api.farmer.update_pool_state() + + pool_plot_count = (await farmer_rpc_client.get_pool_state())["pool_state"][0]["plot_count"] + assert pool_plot_count == 5 + + # TODO: Maybe improve this to not remove from Receiver directly but instead from the harvester and then wait for + # plot sync event. + async def remove_all_and_validate() -> bool: + nonlocal pool_plot_count + receiver = farmer_api.farmer.plot_sync_receivers[harvester_service._server.node_id] + for path, plot in receiver.plots().copy().items(): + if plot.pool_contract_puzzle_hash == pool_contract_puzzle_hash: + del receiver.plots()[path] + pool_plot_count -= 1 + plot_count = (await farmer_rpc_client.get_pool_state())["pool_state"][0]["plot_count"] + assert plot_count == pool_plot_count + return plot_count + + await time_out_assert(15, remove_all_and_validate, False) + assert (await farmer_rpc_client.get_pool_state())["pool_state"][0]["plot_count"] == 0 + + +@pytest.mark.parametrize( + "filter_item, match", + [ + (FilterItem("filename", "1"), True), + (FilterItem("filename", "12"), True), + (FilterItem("filename", "123"), True), + (FilterItem("filename", "1234"), False), + (FilterItem("filename", "23"), True), + (FilterItem("filename", "3"), True), + (FilterItem("filename", "0123"), False), + (FilterItem("pool_contract_puzzle_hash", None), True), + (FilterItem("pool_contract_puzzle_hash", "1"), False), + ], +) +def test_plot_matches_filter(filter_item: FilterItem, match: bool): + assert plot_matches_filter(dummy_plot("123"), filter_item) == match + + +@pytest.mark.parametrize( + "endpoint, filtering, sort_key, reverse, expected_plot_count", + [ + (FarmerRpcClient.get_harvester_plots_valid, [], "filename", False, 20), + (FarmerRpcClient.get_harvester_plots_valid, [], "size", True, 20), + ( + FarmerRpcClient.get_harvester_plots_valid, + [FilterItem("pool_contract_puzzle_hash", None)], + "file_size", + True, + 15, + ), + ( + FarmerRpcClient.get_harvester_plots_valid, + [FilterItem("size", "20"), FilterItem("filename", "81")], + "plot_id", + False, + 4, + ), + (FarmerRpcClient.get_harvester_plots_invalid, [], None, True, 13), + (FarmerRpcClient.get_harvester_plots_invalid, ["invalid_0"], None, False, 6), + (FarmerRpcClient.get_harvester_plots_invalid, ["inval", "lid_1/"], None, False, 2), + (FarmerRpcClient.get_harvester_plots_keys_missing, [], None, True, 3), + (FarmerRpcClient.get_harvester_plots_keys_missing, ["keys_missing_1"], None, False, 2), + (FarmerRpcClient.get_harvester_plots_duplicates, [], None, True, 7), + (FarmerRpcClient.get_harvester_plots_duplicates, ["duplicates_0"], None, False, 3), + ], +) +@pytest.mark.asyncio +async def test_farmer_get_harvester_plots_endpoints( + harvester_farmer_environment: Any, + endpoint: Callable[[FarmerRpcClient, PaginatedRequestData], Awaitable[Dict[str, Any]]], + filtering: Union[List[FilterItem], List[str]], + sort_key: str, + reverse: bool, + expected_plot_count: int, +) -> None: + ( + farmer_service, + farmer_rpc_api, + farmer_rpc_client, + harvester_service, + harvester_rpc_api, + harvester_rpc_client, + ) = harvester_farmer_environment + + harvester = harvester_service._node + harvester_id = harvester_service._server.node_id + receiver = farmer_service._api.farmer.plot_sync_receivers[harvester_id] + + if receiver.initial_sync(): + await wait_for_plot_sync(receiver, receiver.last_sync().sync_id) + + harvester_plots = (await harvester_rpc_client.get_plots())["plots"] + plots = [] + + request: PaginatedRequestData + if endpoint == FarmerRpcClient.get_harvester_plots_valid: + request = PlotInfoRequestData(harvester_id, 0, -1, cast(List[FilterItem], filtering), sort_key, reverse) + else: + request = PlotPathRequestData(harvester_id, 0, -1, cast(List[str], filtering), reverse) + + def add_plot_directories(prefix: str, count: int) -> List[Path]: + new_paths = [] + for i in range(count): + new_paths.append(harvester.root_path / f"{prefix}_{i}") + mkdir(new_paths[-1]) + add_plot_directory(harvester.root_path, str(new_paths[-1])) + return new_paths + + # Generate the plot data and + if endpoint == FarmerRpcClient.get_harvester_plots_valid: + plots = harvester_plots + elif endpoint == FarmerRpcClient.get_harvester_plots_invalid: + invalid_paths = add_plot_directories("invalid", 3) + for dir_index, r in [(0, range(0, 6)), (1, range(6, 8)), (2, range(8, 13))]: + plots += [str(invalid_paths[dir_index] / f"{i}.plot") for i in r] + for plot in plots: + with open(plot, "w"): + pass + elif endpoint == FarmerRpcClient.get_harvester_plots_keys_missing: + keys_missing_plots = [path for path in (Path(get_plot_dir()) / "not_in_keychain").iterdir() if path.is_file()] + keys_missing_paths = add_plot_directories("keys_missing", 2) + for dir_index, copy_plots in [(0, keys_missing_plots[:1]), (1, keys_missing_plots[1:3])]: + for plot in copy_plots: + copy(plot, keys_missing_paths[dir_index]) + plots.append(str(keys_missing_paths[dir_index] / plot.name)) + + elif endpoint == FarmerRpcClient.get_harvester_plots_duplicates: + duplicate_paths = add_plot_directories("duplicates", 2) + for dir_index, r in [(0, range(0, 3)), (1, range(3, 7))]: + for i in r: + plot_path = Path(harvester_plots[i]["filename"]) + plots.append(str(duplicate_paths[dir_index] / plot_path.name)) + copy(plot_path, plots[-1]) + + # Sort and filter the data + if endpoint == FarmerRpcClient.get_harvester_plots_valid: + for filter_item in filtering: + assert isinstance(filter_item, FilterItem) + plots = [plot for plot in plots if plot_matches_filter(dataclass_from_dict(Plot, plot), filter_item)] + plots.sort(key=operator.itemgetter(sort_key, "plot_id"), reverse=reverse) + else: + for filter_item in filtering: + plots = [plot for plot in plots if filter_item in plot] + plots.sort(reverse=reverse) + + total_count = len(plots) + assert total_count == expected_plot_count + + last_sync_id = receiver.last_sync().sync_id + + harvester.plot_manager.trigger_refresh() + harvester.plot_manager.start_refreshing() + + await wait_for_plot_sync(receiver, last_sync_id) + + for page_size in [1, int(total_count / 2), total_count - 1, total_count, total_count + 1, 100]: + request.page_size = page_size + expected_page_count = ceil(total_count / page_size) + for page in range(expected_page_count): + request.page = page + page_result = await endpoint(farmer_rpc_client, request) + offset = page * page_size + expected_plots = plots[offset : offset + page_size] + assert page_result == { + "success": True, + "node_id": harvester_id.hex(), + "page": page, + "page_count": expected_page_count, + "total_count": total_count, + "plots": expected_plots, + } diff --git a/tests/core/util/test_config.py b/tests/core/util/test_config.py index 2397c607edfd..14e146f25846 100644 --- a/tests/core/util/test_config.py +++ b/tests/core/util/test_config.py @@ -138,7 +138,7 @@ def run_reader_and_writer_tasks(root_path: Path, default_config: Dict): Subprocess entry point. This function spins-off threads to perform read/write tasks concurrently, possibly leading to synchronization issues accessing config data. """ - asyncio.get_event_loop().run_until_complete(create_reader_and_writer_tasks(root_path, default_config)) + asyncio.run(create_reader_and_writer_tasks(root_path, default_config)) @pytest.fixture(scope="function") diff --git a/tests/core/util/test_streamable.py b/tests/core/util/test_streamable.py index dc424d1d49b8..1201f9987252 100644 --- a/tests/core/util/test_streamable.py +++ b/tests/core/util/test_streamable.py @@ -1,10 +1,13 @@ +from __future__ import annotations + +import io from dataclasses import dataclass from typing import Dict, List, Optional, Tuple -import io -import pytest +import pytest from clvm_tools import binutils from pytest import raises +from typing_extensions import Literal from chinilla.protocols.wallet_protocol import RespondRemovals from chinilla.types.blockchain_format.coin import Coin @@ -16,19 +19,20 @@ from chinilla.util.streamable import ( DefinitionError, Streamable, - streamable, + is_type_List, + is_type_SpecificOptional, parse_bool, - parse_uint32, - write_uint32, - parse_optional, parse_bytes, parse_list, - parse_tuple, + parse_optional, parse_size_hints, parse_str, - is_type_List, - is_type_SpecificOptional, + parse_tuple, + parse_uint32, + streamable, + write_uint32, ) +from tests.block_tools import BlockTools from tests.setup_nodes import test_constants @@ -59,22 +63,26 @@ class TestClassDict(Streamable): a: Dict[str, str] +@dataclass(frozen=True) +class DataclassOnly: + a: uint8 + + def test_pure_dataclass_not_supported() -> None: - @dataclass(frozen=True) - class DataClassOnly: - a: uint8 with raises(NotImplementedError): @streamable @dataclass(frozen=True) class TestClassDataclass(Streamable): - a: DataClassOnly + a: DataclassOnly + + +class PlainClass: + a: uint8 def test_plain_class_not_supported() -> None: - class PlainClass: - a: uint8 with raises(NotImplementedError): @@ -84,74 +92,81 @@ class TestClassPlain(Streamable): a: PlainClass -def test_basic_list(): +def test_basic_list() -> None: a = [1, 2, 3] assert is_type_List(type(a)) assert is_type_List(List) assert is_type_List(List[int]) assert is_type_List(List[uint8]) assert is_type_List(list) - assert not is_type_List(Tuple) + assert not is_type_List(type(Tuple)) assert not is_type_List(tuple) assert not is_type_List(dict) -def test_not_lists(): +def test_not_lists() -> None: assert not is_type_List(Dict) -def test_basic_optional(): +def test_basic_optional() -> None: assert is_type_SpecificOptional(Optional[int]) assert is_type_SpecificOptional(Optional[Optional[int]]) assert not is_type_SpecificOptional(List[int]) -def test_StrictDataClass(): +def test_StrictDataClass() -> None: @streamable @dataclass(frozen=True) class TestClass1(Streamable): a: uint8 b: str - good: TestClass1 = TestClass1(24, "!@12") + # we want to test invalid here, hence the ignore. + good: TestClass1 = TestClass1(24, "!@12") # type: ignore[arg-type] assert TestClass1.__name__ == "TestClass1" assert good assert good.a == 24 assert good.b == "!@12" - good2 = TestClass1(52, bytes([1, 2, 3])) + # we want to test invalid here, hence the ignore. + good2 = TestClass1(52, bytes([1, 2, 3])) # type: ignore[arg-type] assert good2.b == str(bytes([1, 2, 3])) -def test_StrictDataClassBad(): +def test_StrictDataClassBad() -> None: @streamable @dataclass(frozen=True) class TestClass2(Streamable): a: uint8 b = 0 - assert TestClass2(25) + # we want to test invalid here, hence the ignore. + assert TestClass2(25) # type: ignore[arg-type] + # we want to test invalid here, hence the ignore. with raises(TypeError): - TestClass2(1, 2) # pylint: disable=too-many-function-args + TestClass2(1, 2) # type: ignore[call-arg,arg-type] # pylint: disable=too-many-function-args -def test_StrictDataClassLists(): +def test_StrictDataClassLists() -> None: @streamable @dataclass(frozen=True) class TestClass(Streamable): a: List[uint8] b: List[List[uint8]] - assert TestClass([1, 2, 3], [[uint8(200), uint8(25)], [uint8(25)]]) + # we want to test invalid here, hence the ignore. + assert TestClass([1, 2, 3], [[uint8(200), uint8(25)], [uint8(25)]]) # type: ignore[list-item] + # we want to test invalid here, hence the ignore. with raises(ValueError): - TestClass({"1": 1}, [[uint8(200), uint8(25)], [uint8(25)]]) + TestClass({"1": 1}, [[uint8(200), uint8(25)], [uint8(25)]]) # type: ignore[arg-type] + # we want to test invalid here, hence the ignore. with raises(ValueError): - TestClass([1, 2, 3], [uint8(200), uint8(25)]) + TestClass([1, 2, 3], [uint8(200), uint8(25)]) # type: ignore[list-item] -def test_StrictDataClassOptional(): +def test_StrictDataClassOptional() -> None: @streamable @dataclass(frozen=True) class TestClass(Streamable): @@ -160,11 +175,12 @@ class TestClass(Streamable): c: Optional[Optional[uint8]] d: Optional[Optional[uint8]] - good = TestClass(12, None, 13, None) + # we want to test invalid here, hence the ignore. + good = TestClass(12, None, 13, None) # type: ignore[arg-type] assert good -def test_basic(): +def test_basic() -> None: @streamable @dataclass(frozen=True) class TestClass(Streamable): @@ -176,13 +192,14 @@ class TestClass(Streamable): f: Optional[uint32] g: Tuple[uint32, str, bytes] - a = TestClass(24, 352, [1, 2, 4], [[1, 2, 3], [3, 4]], 728, None, (383, "hello", b"goodbye")) + # we want to test invalid here, hence the ignore. + a = TestClass(24, 352, [1, 2, 4], [[1, 2, 3], [3, 4]], 728, None, (383, "hello", b"goodbye")) # type: ignore[arg-type,list-item] # noqa: E501 b: bytes = bytes(a) assert a == TestClass.from_bytes(b) -def test_variable_size(): +def test_variable_size() -> None: @streamable @dataclass(frozen=True) class TestClass2(Streamable): @@ -201,7 +218,7 @@ class TestClass3(Streamable): a: int -def test_json(bt): +def test_json(bt: BlockTools) -> None: block = bt.create_genesis_block(test_constants, bytes32([0] * 32), uint64(0)) dict_block = block.to_json_dict() assert FullBlock.from_json_dict(dict_block) == block @@ -226,42 +243,44 @@ class OptionalTestClass(Streamable): (None, None, None), ], ) -def test_optional_json(a: Optional[str], b: Optional[bool], c: Optional[List[Optional[str]]]): +def test_optional_json(a: Optional[str], b: Optional[bool], c: Optional[List[Optional[str]]]) -> None: obj: OptionalTestClass = OptionalTestClass.from_json_dict({"a": a, "b": b, "c": c}) assert obj.a == a assert obj.b == b assert obj.c == c -def test_recursive_json(): - @streamable - @dataclass(frozen=True) - class TestClass1(Streamable): - a: List[uint32] +@streamable +@dataclass(frozen=True) +class TestClassRecursive1(Streamable): + a: List[uint32] + + +@streamable +@dataclass(frozen=True) +class TestClassRecursive2(Streamable): + a: uint32 + b: List[Optional[List[TestClassRecursive1]]] + c: bytes32 - @streamable - @dataclass(frozen=True) - class TestClass2(Streamable): - a: uint32 - b: List[Optional[List[TestClass1]]] - c: bytes32 - tc1_a = TestClass1([uint32(1), uint32(2)]) - tc1_b = TestClass1([uint32(4), uint32(5)]) - tc1_c = TestClass1([uint32(7), uint32(8)]) +def test_recursive_json() -> None: + tc1_a = TestClassRecursive1([uint32(1), uint32(2)]) + tc1_b = TestClassRecursive1([uint32(4), uint32(5)]) + tc1_c = TestClassRecursive1([uint32(7), uint32(8)]) - tc2 = TestClass2(uint32(5), [[tc1_a], [tc1_b, tc1_c], None], bytes32(bytes([1] * 32))) - assert TestClass2.from_json_dict(tc2.to_json_dict()) == tc2 + tc2 = TestClassRecursive2(uint32(5), [[tc1_a], [tc1_b, tc1_c], None], bytes32(bytes([1] * 32))) + assert TestClassRecursive2.from_json_dict(tc2.to_json_dict()) == tc2 -def test_recursive_types(): +def test_recursive_types() -> None: coin: Optional[Coin] = None l1 = [(bytes32([2] * 32), coin)] rr = RespondRemovals(uint32(1), bytes32([1] * 32), l1, None) RespondRemovals(rr.height, rr.header_hash, rr.coins, rr.proofs) -def test_ambiguous_deserialization_optionals(): +def test_ambiguous_deserialization_optionals() -> None: with raises(AssertionError): SubEpochChallengeSegment.from_bytes(b"\x00\x00\x00\x03\xff\xff\xff\xff") @@ -278,7 +297,7 @@ class TestClassOptional(Streamable): TestClassOptional.from_bytes(bytes([1, 2])) -def test_ambiguous_deserialization_int(): +def test_ambiguous_deserialization_int() -> None: @streamable @dataclass(frozen=True) class TestClassUint(Streamable): @@ -289,7 +308,7 @@ class TestClassUint(Streamable): TestClassUint.from_bytes(b"\x00\x00") -def test_ambiguous_deserialization_list(): +def test_ambiguous_deserialization_list() -> None: @streamable @dataclass(frozen=True) class TestClassList(Streamable): @@ -300,7 +319,7 @@ class TestClassList(Streamable): TestClassList.from_bytes(bytes([0, 0, 100, 24])) -def test_ambiguous_deserialization_tuple(): +def test_ambiguous_deserialization_tuple() -> None: @streamable @dataclass(frozen=True) class TestClassTuple(Streamable): @@ -311,7 +330,7 @@ class TestClassTuple(Streamable): TestClassTuple.from_bytes(bytes([0, 0, 100, 24])) -def test_ambiguous_deserialization_str(): +def test_ambiguous_deserialization_str() -> None: @streamable @dataclass(frozen=True) class TestClassStr(Streamable): @@ -322,7 +341,7 @@ class TestClassStr(Streamable): TestClassStr.from_bytes(bytes([0, 0, 100, 24, 52])) -def test_ambiguous_deserialization_bytes(): +def test_ambiguous_deserialization_bytes() -> None: @streamable @dataclass(frozen=True) class TestClassBytes(Streamable): @@ -339,7 +358,7 @@ class TestClassBytes(Streamable): TestClassBytes.from_bytes(bytes([0, 0, 0, 2, 52, 21])) -def test_ambiguous_deserialization_bool(): +def test_ambiguous_deserialization_bool() -> None: @streamable @dataclass(frozen=True) class TestClassBool(Streamable): @@ -353,13 +372,13 @@ class TestClassBool(Streamable): TestClassBool.from_bytes(bytes([1])) -def test_ambiguous_deserialization_program(): +def test_ambiguous_deserialization_program() -> None: @streamable @dataclass(frozen=True) class TestClassProgram(Streamable): a: Program - program = Program.to(binutils.assemble("()")) + program = Program.to(binutils.assemble("()")) # type: ignore[no-untyped-call] # TODO, add typing in clvm_tools TestClassProgram.from_bytes(bytes(program)) @@ -367,7 +386,7 @@ class TestClassProgram(Streamable): TestClassProgram.from_bytes(bytes(program) + b"9") -def test_streamable_empty(): +def test_streamable_empty() -> None: @streamable @dataclass(frozen=True) class A(Streamable): @@ -376,7 +395,7 @@ class A(Streamable): assert A.from_bytes(bytes(A())) == A() -def test_parse_bool(): +def test_parse_bool() -> None: assert not parse_bool(io.BytesIO(b"\x00")) assert parse_bool(io.BytesIO(b"\x01")) @@ -391,7 +410,7 @@ def test_parse_bool(): parse_bool(io.BytesIO(b"\x02")) -def test_uint32(): +def test_uint32() -> None: assert parse_uint32(io.BytesIO(b"\x00\x00\x00\x00")) == 0 assert parse_uint32(io.BytesIO(b"\x00\x00\x00\x01")) == 1 assert parse_uint32(io.BytesIO(b"\x00\x00\x00\x01"), "little") == 16777216 @@ -399,7 +418,7 @@ def test_uint32(): assert parse_uint32(io.BytesIO(b"\x01\x00\x00\x00"), "little") == 1 assert parse_uint32(io.BytesIO(b"\xff\xff\xff\xff"), "little") == 4294967295 - def test_write(value, byteorder): + def test_write(value: int, byteorder: Literal["little", "big"]) -> None: f = io.BytesIO() write_uint32(f, uint32(value), byteorder) f.seek(0) @@ -420,7 +439,7 @@ def test_write(value, byteorder): parse_uint32(io.BytesIO(b"\x00\x00\x00")) -def test_parse_optional(): +def test_parse_optional() -> None: assert parse_optional(io.BytesIO(b"\x00"), parse_bool) is None assert parse_optional(io.BytesIO(b"\x01\x01"), parse_bool) assert not parse_optional(io.BytesIO(b"\x01\x00"), parse_bool) @@ -437,7 +456,7 @@ def test_parse_optional(): parse_optional(io.BytesIO(b"\xff\x00"), parse_bool) -def test_parse_bytes(): +def test_parse_bytes() -> None: assert parse_bytes(io.BytesIO(b"\x00\x00\x00\x00")) == b"" assert parse_bytes(io.BytesIO(b"\x00\x00\x00\x01\xff")) == b"\xff" @@ -463,7 +482,7 @@ def test_parse_bytes(): parse_bytes(io.BytesIO(b"\x00\x00\x02\x01" + b"a" * 512)) -def test_parse_list(): +def test_parse_list() -> None: assert parse_list(io.BytesIO(b"\x00\x00\x00\x00"), parse_bool) == [] assert parse_list(io.BytesIO(b"\x00\x00\x00\x01\x01"), parse_bool) == [True] @@ -484,7 +503,7 @@ def test_parse_list(): parse_list(io.BytesIO(b"\x00\x00\x00\x01\x02"), parse_bool) -def test_parse_tuple(): +def test_parse_tuple() -> None: assert parse_tuple(io.BytesIO(b""), []) == () assert parse_tuple(io.BytesIO(b"\x00\x00"), [parse_bool, parse_bool]) == (False, False) @@ -499,33 +518,35 @@ def test_parse_tuple(): parse_tuple(io.BytesIO(b"\x00"), [parse_bool, parse_bool]) -def test_parse_size_hints(): - class TestFromBytes: - b: bytes +class TestFromBytes: + b: bytes - @classmethod - def from_bytes(cls, b): - ret = TestFromBytes() - ret.b = b - return ret + @classmethod + def from_bytes(cls, b: bytes) -> TestFromBytes: + ret = TestFromBytes() + ret.b = b + return ret + +class FailFromBytes: + @classmethod + def from_bytes(cls, b: bytes) -> FailFromBytes: + raise ValueError() + + +def test_parse_size_hints() -> None: assert parse_size_hints(io.BytesIO(b"1337"), TestFromBytes, 4).b == b"1337" # EOF with raises(AssertionError): parse_size_hints(io.BytesIO(b"133"), TestFromBytes, 4) - class FailFromBytes: - @classmethod - def from_bytes(cls, b): - raise ValueError() - # error in underlying type with raises(ValueError): parse_size_hints(io.BytesIO(b"1337"), FailFromBytes, 4) -def test_parse_str(): +def test_parse_str() -> None: assert parse_str(io.BytesIO(b"\x00\x00\x00\x00")) == "" assert parse_str(io.BytesIO(b"\x00\x00\x00\x01a")) == "a" @@ -551,7 +572,7 @@ def test_parse_str(): parse_str(io.BytesIO(b"\x00\x00\x02\x01" + b"a" * 512)) -def test_wrong_decorator_order(): +def test_wrong_decorator_order() -> None: with raises(DefinitionError): @@ -561,7 +582,7 @@ class WrongDecoratorOrder(Streamable): pass -def test_dataclass_not_frozen(): +def test_dataclass_not_frozen() -> None: with raises(DefinitionError): @@ -571,7 +592,7 @@ class DataclassNotFrozen(Streamable): pass -def test_dataclass_missing(): +def test_dataclass_missing() -> None: with raises(DefinitionError): @@ -580,11 +601,11 @@ class DataclassMissing(Streamable): pass -def test_streamable_inheritance_missing(): +def test_streamable_inheritance_missing() -> None: with raises(DefinitionError): - + # we want to test invalid here, hence the ignore. @streamable @dataclass(frozen=True) - class StreamableInheritanceMissing: + class StreamableInheritanceMissing: # type: ignore[type-var] pass diff --git a/tests/farmer_harvester/test_farmer_harvester.py b/tests/farmer_harvester/test_farmer_harvester.py index 4a3796e8bf21..5c79d06c61ac 100644 --- a/tests/farmer_harvester/test_farmer_harvester.py +++ b/tests/farmer_harvester/test_farmer_harvester.py @@ -14,8 +14,8 @@ def farmer_is_started(farmer): @pytest_asyncio.fixture(scope="function") -async def harvester_farmer_environment_no_start(bt): - async for _ in setup_harvester_farmer(bt, test_constants, start_services=False): +async def harvester_farmer_environment_no_start(bt, tmp_path): + async for _ in setup_harvester_farmer(bt, tmp_path, test_constants, start_services=False): yield _ diff --git a/tests/plot_sync/test_plot_sync.py b/tests/plot_sync/test_plot_sync.py index 3768a1bce4bd..df44f9a3ec84 100644 --- a/tests/plot_sync/test_plot_sync.py +++ b/tests/plot_sync/test_plot_sync.py @@ -18,7 +18,7 @@ from chinilla.protocols.harvester_protocol import Plot from chinilla.server.start_service import Service from chinilla.types.blockchain_format.sized_bytes import bytes32 -from chinilla.util.config import create_default_chinilla_config +from chinilla.util.config import create_default_chinilla_config, lock_and_load_config, save_config from chinilla.util.ints import uint8, uint64 from tests.block_tools import BlockTools from tests.plot_sync.util import start_harvester_service @@ -30,8 +30,8 @@ def synced(sender: Sender, receiver: Receiver, previous_last_sync_id: int) -> bool: return ( sender._last_sync_id != previous_last_sync_id - and sender._last_sync_id == receiver._last_sync_id != 0 - and receiver.state() == State.idle + and sender._last_sync_id == receiver._last_sync.sync_id != 0 + and receiver.current_sync().state == State.idle and not sender._lock.locked() ) @@ -211,7 +211,7 @@ async def run_sync_test(self) -> None: # Make sure to reset the passed flag always before a new run self.expected[self.harvesters.index(harvester)].callback_passed = False receiver._update_callback = self.plot_sync_callback - assert harvester.plot_sync_sender._last_sync_id == receiver._last_sync_id + assert harvester.plot_sync_sender._last_sync_id == receiver._last_sync.sync_id last_sync_ids.append(harvester.plot_sync_sender._last_sync_id) plot_manager.start_refreshing() plot_manager.trigger_refresh() @@ -293,6 +293,10 @@ def new_test_dir(name: str, plot_list: List[Path]) -> TestDirectory: farmer: Farmer = farmer_service._node harvesters: List[Harvester] = [await start_harvester_service(service) for service in harvester_services] for harvester in harvesters: + # Remove default plot directory for this tests + with lock_and_load_config(harvester.root_path, "config.yaml") as config: + config["harvester"]["plot_directories"] = [] + save_config(harvester.root_path, "config.yaml", config) harvester.plot_manager.set_public_keys( bt.plot_manager.farmer_public_keys.copy(), bt.plot_manager.pool_public_keys.copy() ) diff --git a/tests/plot_sync/test_receiver.py b/tests/plot_sync/test_receiver.py index 7c7aa62d9329..6f7e336ba067 100644 --- a/tests/plot_sync/test_receiver.py +++ b/tests/plot_sync/test_receiver.py @@ -1,4 +1,6 @@ +import dataclasses import logging +import random import time from secrets import token_bytes from typing import Any, Callable, List, Tuple, Type, Union @@ -7,7 +9,7 @@ from blspy import G1Element from chinilla.plot_sync.delta import Delta -from chinilla.plot_sync.receiver import Receiver +from chinilla.plot_sync.receiver import Receiver, Sync from chinilla.plot_sync.util import ErrorCodes, State from chinilla.protocols.harvester_protocol import ( Plot, @@ -21,6 +23,7 @@ from chinilla.server.ws_connection import NodeType from chinilla.types.blockchain_format.sized_bytes import bytes32 from chinilla.util.ints import uint8, uint32, uint64 +from chinilla.util.misc import get_list_or_len from chinilla.util.streamable import _T_Streamable from tests.plot_sync.util import get_dummy_connection @@ -30,11 +33,8 @@ def assert_default_values(receiver: Receiver) -> None: - assert receiver.state() == State.idle - assert receiver.expected_sync_id() == 0 - assert receiver.expected_message_id() == 0 - assert receiver.last_sync_id() == 0 - assert receiver.last_sync_time() == 0 + assert receiver.current_sync() == Sync() + assert receiver.last_sync() == Sync() assert receiver.plots() == {} assert receiver.invalid() == [] assert receiver.keys_missing() == [] @@ -106,25 +106,25 @@ def post_function_validate(receiver: Receiver, data: Union[List[Plot], List[str] if expected_state == State.loaded: for plot_info in data: assert type(plot_info) == Plot - assert plot_info.filename in receiver._delta.valid.additions + assert plot_info.filename in receiver._current_sync.delta.valid.additions elif expected_state == State.removed: for path in data: - assert path in receiver._delta.valid.removals + assert path in receiver._current_sync.delta.valid.removals elif expected_state == State.invalid: for path in data: - assert path in receiver._delta.invalid.additions + assert path in receiver._current_sync.delta.invalid.additions elif expected_state == State.keys_missing: for path in data: - assert path in receiver._delta.keys_missing.additions + assert path in receiver._current_sync.delta.keys_missing.additions elif expected_state == State.duplicates: for path in data: - assert path in receiver._delta.duplicates.additions + assert path in receiver._current_sync.delta.duplicates.additions @pytest.mark.asyncio async def run_sync_step(receiver: Receiver, sync_step: SyncStepData, expected_state: State) -> None: - assert receiver.state() == expected_state - last_sync_time_before = receiver._last_sync_time + assert receiver.current_sync().state == expected_state + last_sync_time_before = receiver._last_sync.time_done # For the the list types invoke the trigger function in batches if sync_step.payload_type == PlotSyncPlotList or sync_step.payload_type == PlotSyncPathList: step_data, _ = sync_step.args @@ -132,21 +132,27 @@ async def run_sync_step(receiver: Receiver, sync_step: SyncStepData, expected_st # Invoke batches of: 1, 2, 3, 4 items and validate the data against plot store before and after indexes = [0, 1, 3, 6, 10] for i in range(0, len(indexes) - 1): + plots_processed_before = receiver.current_sync().plots_processed invoke_data = step_data[indexes[i] : indexes[i + 1]] pre_function_validate(receiver, invoke_data, expected_state) await sync_step.function( create_payload(sync_step.payload_type, False, invoke_data, i == (len(indexes) - 2)) ) post_function_validate(receiver, invoke_data, expected_state) + if expected_state == State.removed: + assert receiver.current_sync().plots_processed == plots_processed_before + else: + assert receiver.current_sync().plots_processed == plots_processed_before + len(invoke_data) else: # For Start/Done just invoke it.. await sync_step.function(create_payload(sync_step.payload_type, sync_step.state == State.idle, *sync_step.args)) # Make sure we moved to the next state - assert receiver.state() != expected_state + assert receiver.current_sync().state != expected_state if sync_step.payload_type == PlotSyncDone: - assert receiver._last_sync_time != last_sync_time_before + assert receiver._last_sync.time_done != last_sync_time_before + assert receiver.last_sync().plots_processed == receiver.last_sync().plots_total else: - assert receiver._last_sync_time == last_sync_time_before + assert receiver._last_sync.time_done == last_sync_time_before def plot_sync_setup() -> Tuple[Receiver, List[SyncStepData]]: @@ -163,7 +169,7 @@ def plot_sync_setup() -> Tuple[Receiver, List[SyncStepData]]: pool_contract_puzzle_hash=None, pool_public_key=None, plot_public_key=G1Element(), - file_size=uint64(0), + file_size=uint64(random.randint(0, 100)), time_modified=uint64(0), ) for x in path_list @@ -171,6 +177,7 @@ def plot_sync_setup() -> Tuple[Receiver, List[SyncStepData]]: # Manually add the plots we want to remove in tests receiver._plots = {plot_info.filename: plot_info for plot_info in plot_info_list[0:10]} + receiver._total_plot_size = sum(plot.file_size for plot in receiver._plots.values()) sync_steps: List[SyncStepData] = [ SyncStepData(State.idle, receiver.sync_started, PlotSyncStart, False, uint64(0), uint32(len(plot_info_list))), @@ -194,37 +201,45 @@ async def test_reset() -> None: receiver, sync_steps = plot_sync_setup() connection_before = receiver.connection() # Assign some dummy values - receiver._sync_state = State.done - receiver._expected_sync_id = uint64(1) - receiver._expected_message_id = uint64(1) - receiver._last_sync_id = uint64(1) - receiver._last_sync_time = time.time() + receiver._current_sync.state = State.done + receiver._current_sync.sync_id = uint64(1) + receiver._current_sync.next_message_id = uint64(1) + receiver._current_sync.plots_processed = uint32(1) + receiver._current_sync.plots_total = uint32(1) + receiver._current_sync.delta.valid.additions = receiver.plots().copy() + receiver._current_sync.delta.valid.removals = ["1"] + receiver._current_sync.delta.invalid.additions = ["1"] + receiver._current_sync.delta.invalid.removals = ["1"] + receiver._current_sync.delta.keys_missing.additions = ["1"] + receiver._current_sync.delta.keys_missing.removals = ["1"] + receiver._current_sync.delta.duplicates.additions = ["1"] + receiver._current_sync.delta.duplicates.removals = ["1"] + receiver._current_sync.time_done = time.time() + receiver._last_sync = dataclasses.replace(receiver._current_sync) receiver._invalid = ["1"] receiver._keys_missing = ["1"] - receiver._delta.valid.additions = receiver.plots().copy() - receiver._delta.valid.removals = ["1"] - receiver._delta.invalid.additions = ["1"] - receiver._delta.invalid.removals = ["1"] - receiver._delta.keys_missing.additions = ["1"] - receiver._delta.keys_missing.removals = ["1"] - receiver._delta.duplicates.additions = ["1"] - receiver._delta.duplicates.removals = ["1"] + + receiver._last_sync.sync_id = uint64(1) # Call `reset` and make sure all expected values are set back to their defaults. receiver.reset() assert_default_values(receiver) - assert receiver._delta == Delta() + assert receiver._current_sync.delta == Delta() # Connection should remain assert receiver.connection() == connection_before +@pytest.mark.parametrize("counts_only", [True, False]) @pytest.mark.asyncio -async def test_to_dict() -> None: +async def test_to_dict(counts_only: bool) -> None: receiver, sync_steps = plot_sync_setup() - plot_sync_dict_1 = receiver.to_dict() - assert "plots" in plot_sync_dict_1 and len(plot_sync_dict_1["plots"]) == 10 - assert "failed_to_open_filenames" in plot_sync_dict_1 and len(plot_sync_dict_1["failed_to_open_filenames"]) == 0 - assert "no_key_filenames" in plot_sync_dict_1 and len(plot_sync_dict_1["no_key_filenames"]) == 0 - assert "last_sync_time" not in plot_sync_dict_1 + plot_sync_dict_1 = receiver.to_dict(counts_only) + + assert get_list_or_len(plot_sync_dict_1["plots"], not counts_only) == 10 + assert get_list_or_len(plot_sync_dict_1["failed_to_open_filenames"], not counts_only) == 0 + assert get_list_or_len(plot_sync_dict_1["no_key_filenames"], not counts_only) == 0 + assert plot_sync_dict_1["total_plot_size"] == sum(plot.file_size for plot in receiver.plots().values()) + assert plot_sync_dict_1["syncing"] is None + assert plot_sync_dict_1["last_sync_time"] is None assert plot_sync_dict_1["connection"] == { "node_id": receiver.connection().peer_node_id, "host": receiver.connection().peer_host, @@ -232,35 +247,57 @@ async def test_to_dict() -> None: } # We should get equal dicts - plot_sync_dict_2 = receiver.to_dict() - assert plot_sync_dict_1 == plot_sync_dict_2 + assert plot_sync_dict_1 == receiver.to_dict(counts_only) + # But unequal dicts wit the opposite counts_only value + assert plot_sync_dict_1 != receiver.to_dict(not counts_only) - dict_2_paths = [x.filename for x in plot_sync_dict_2["plots"]] - for plot_info in sync_steps[State.loaded].args[0]: - assert plot_info.filename not in dict_2_paths + expected_plot_files_processed: int = 0 + expected_plot_files_total: int = sync_steps[State.idle].args[2] - # Walk through all states from idle to done and run them with the test data + # Walk through all states from idle to done and run them with the test data and validate the sync progress for state in State: await run_sync_step(receiver, sync_steps[state], state) - plot_sync_dict_3 = receiver.to_dict() - dict_3_paths = [x.filename for x in plot_sync_dict_3["plots"]] - for plot_info in sync_steps[State.loaded].args[0]: - assert plot_info.filename in dict_3_paths - - for path in sync_steps[State.removed].args[0]: - assert path not in plot_sync_dict_3["plots"] - - for path in sync_steps[State.invalid].args[0]: - assert path in plot_sync_dict_3["failed_to_open_filenames"] - - for path in sync_steps[State.keys_missing].args[0]: - assert path in plot_sync_dict_3["no_key_filenames"] - - for path in sync_steps[State.duplicates].args[0]: - assert path in plot_sync_dict_3["duplicates"] + if state != State.idle and state != State.removed and state != State.done: + expected_plot_files_processed += len(sync_steps[state].args[0]) + sync_data = receiver.to_dict()["syncing"] + if state == State.done: + expected_sync_data = None + else: + expected_sync_data = { + "initial": True, + "plot_files_processed": expected_plot_files_processed, + "plot_files_total": expected_plot_files_total, + } + assert sync_data == expected_sync_data + + plot_sync_dict_3 = receiver.to_dict(counts_only) + assert get_list_or_len(sync_steps[State.loaded].args[0], counts_only) == plot_sync_dict_3["plots"] + assert ( + get_list_or_len(sync_steps[State.invalid].args[0], counts_only) == plot_sync_dict_3["failed_to_open_filenames"] + ) + assert get_list_or_len(sync_steps[State.keys_missing].args[0], counts_only) == plot_sync_dict_3["no_key_filenames"] + assert get_list_or_len(sync_steps[State.duplicates].args[0], counts_only) == plot_sync_dict_3["duplicates"] + + assert plot_sync_dict_3["total_plot_size"] == sum(plot.file_size for plot in receiver.plots().values()) assert plot_sync_dict_3["last_sync_time"] > 0 + assert plot_sync_dict_3["syncing"] is None + + # Trigger a repeated plot sync + await receiver.sync_started( + PlotSyncStart( + PlotSyncIdentifier(uint64(time.time()), uint64(receiver.last_sync().sync_id + 1), uint64(0)), + False, + receiver.last_sync().sync_id, + uint32(1), + ) + ) + assert receiver.to_dict()["syncing"] == { + "initial": False, + "plot_files_processed": 0, + "plot_files_total": 1, + } @pytest.mark.asyncio @@ -302,18 +339,18 @@ async def test_sync_flow() -> None: assert path in receiver.duplicates() # We should be in idle state again - assert receiver.state() == State.idle + assert receiver.current_sync().state == State.idle @pytest.mark.asyncio async def test_invalid_ids() -> None: receiver, sync_steps = plot_sync_setup() for state in State: - assert receiver.state() == state + assert receiver.current_sync().state == state current_step = sync_steps[state] - if receiver.state() == State.idle: + if receiver.current_sync().state == State.idle: # Set last_sync_id for the tests below - receiver._last_sync_id = uint64(1) + receiver._last_sync.sync_id = uint64(1) # Test "sync_started last doesn't match" invalid_last_sync_id_param = PlotSyncStart( plot_sync_identifier(uint64(0), uint64(0)), False, uint64(2), uint32(0) @@ -327,17 +364,19 @@ async def test_invalid_ids() -> None: await current_step.function(invalid_sync_id_match_param) assert_error_response(receiver, ErrorCodes.sync_ids_match) # Reset the last_sync_id to the default - receiver._last_sync_id = uint64(0) + receiver._last_sync.sync_id = uint64(0) else: # Test invalid sync_id invalid_sync_id_param = current_step.payload_type( - plot_sync_identifier(uint64(10), uint64(receiver.expected_message_id())), *current_step.args + plot_sync_identifier(uint64(10), uint64(receiver.current_sync().next_message_id)), *current_step.args ) await current_step.function(invalid_sync_id_param) assert_error_response(receiver, ErrorCodes.invalid_identifier) # Test invalid message_id invalid_message_id_param = current_step.payload_type( - plot_sync_identifier(receiver.expected_sync_id(), uint64(receiver.expected_message_id() + 1)), + plot_sync_identifier( + receiver.current_sync().sync_id, uint64(receiver.current_sync().next_message_id + 1) + ), *current_step.args, ) await current_step.function(invalid_message_id_param) @@ -358,12 +397,12 @@ async def test_invalid_ids() -> None: async def test_plot_errors(state_to_fail: State, expected_error_code: ErrorCodes) -> None: receiver, sync_steps = plot_sync_setup() for state in State: - assert receiver.state() == state + assert receiver.current_sync().state == state current_step = sync_steps[state] if state == state_to_fail: plot_infos, _ = current_step.args await current_step.function(create_payload(current_step.payload_type, False, plot_infos, False)) - identifier = plot_sync_identifier(receiver.expected_sync_id(), receiver.expected_message_id()) + identifier = plot_sync_identifier(receiver.current_sync().sync_id, receiver.current_sync().next_message_id) invalid_payload = current_step.payload_type(identifier, plot_infos, True) await current_step.function(invalid_payload) if state == state_to_fail: diff --git a/tests/plot_sync/test_sync_simulated.py b/tests/plot_sync/test_sync_simulated.py index e5784967a7cf..0451fbbe2a93 100644 --- a/tests/plot_sync/test_sync_simulated.py +++ b/tests/plot_sync/test_sync_simulated.py @@ -110,7 +110,7 @@ def run_internal() -> None: async def sync_done() -> bool: assert sync_id is not None - return self.plot_sync_receiver.last_sync_id() == self.plot_sync_sender._last_sync_id == sync_id + return self.plot_sync_receiver.last_sync().sync_id == self.plot_sync_sender._last_sync_id == sync_id await time_out_assert(60, sync_done) @@ -415,7 +415,7 @@ async def wait_for_reset() -> bool: async def sync_done() -> bool: assert started_sync_id != 0 - return test_data.plot_sync_receiver.last_sync_id() == sender._last_sync_id == started_sync_id + return test_data.plot_sync_receiver.last_sync().sync_id == sender._last_sync_id == started_sync_id # Send start and capture the sync_id sender.sync_start(len(plots), True) diff --git a/tests/runner_templates/build-test-macos b/tests/runner_templates/build-test-macos index 916c5fb03674..e9986dc77214 100644 --- a/tests/runner_templates/build-test-macos +++ b/tests/runner_templates/build-test-macos @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -91,7 +91,7 @@ INSTALL_TIMELORD venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/tests/runner_templates/build-test-ubuntu b/tests/runner_templates/build-test-ubuntu index c51817db419f..65eed80bf48a 100644 --- a/tests/runner_templates/build-test-ubuntu +++ b/tests/runner_templates/build-test-ubuntu @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHINILLA_ROOT: ${{ github.workspace }}/.chinilla/vanillanet @@ -90,7 +90,7 @@ INSTALL_TIMELORD venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/tests/setup_nodes.py b/tests/setup_nodes.py index fed1825f1e4c..66ae430c6495 100644 --- a/tests/setup_nodes.py +++ b/tests/setup_nodes.py @@ -5,11 +5,9 @@ from pathlib import Path from chinilla.consensus.constants import ConsensusConstants -from chinilla.cmds.init_funcs import init from chinilla.full_node.full_node_api import FullNodeAPI from chinilla.server.start_service import Service from chinilla.server.start_wallet import service_kwargs_for_wallet -from chinilla.util.config import load_config, save_config from chinilla.util.hash import std_hash from chinilla.util.ints import uint16, uint32 from chinilla.util.keychain import bytes_to_mnemonic @@ -290,14 +288,17 @@ async def setup_simulators_and_wallets( await _teardown_nodes(node_iters) -async def setup_harvester_farmer(bt: BlockTools, consensus_constants: ConsensusConstants, *, start_services: bool): +async def setup_harvester_farmer( + bt: BlockTools, tmp_path: Path, consensus_constants: ConsensusConstants, *, start_services: bool +): farmer_port = find_available_listen_port("farmer") farmer_rpc_port = find_available_listen_port("farmer rpc") harvester_port = find_available_listen_port("harvester") harvester_rpc_port = find_available_listen_port("harvester rpc") node_iters = [ setup_harvester( - bt.root_path, + bt, + tmp_path / "harvester", bt.config["self_hostname"], harvester_port, harvester_rpc_port, @@ -307,6 +308,7 @@ async def setup_harvester_farmer(bt: BlockTools, consensus_constants: ConsensusC ), setup_farmer( bt, + tmp_path / "farmer", bt.config["self_hostname"], farmer_port, farmer_rpc_port, @@ -334,23 +336,22 @@ async def setup_farmer_multi_harvester( node_iterators = [ setup_farmer( - block_tools, block_tools.config["self_hostname"], farmer_port, farmer_rpc_port, consensus_constants + block_tools, + temp_dir / "farmer", + block_tools.config["self_hostname"], + farmer_port, + farmer_rpc_port, + consensus_constants, ) ] for i in range(0, harvester_count): - root_path: Path = temp_dir / str(i) - init(None, root_path) - init(block_tools.root_path / "config" / "ssl" / "ca", root_path) - config = load_config(root_path, "config.yaml") - config["logging"]["log_stdout"] = True - config["selected_network"] = "testnet0" - config["harvester"]["selected_network"] = "testnet0" + root_path: Path = temp_dir / f"harvester_{i}" harvester_port = find_available_listen_port("harvester") harvester_rpc_port = find_available_listen_port("harvester rpc") - save_config(root_path, "config.yaml", config) node_iterators.append( setup_harvester( + block_tools, root_path, block_tools.config["self_hostname"], harvester_port, @@ -412,7 +413,8 @@ async def setup_full_system( node_iters = [ setup_introducer(shared_b_tools, introducer_port), setup_harvester( - shared_b_tools.root_path, + shared_b_tools, + shared_b_tools.root_path / "harvester", shared_b_tools.config["self_hostname"], harvester_port, harvester_rpc_port, @@ -421,6 +423,7 @@ async def setup_full_system( ), setup_farmer( shared_b_tools, + shared_b_tools.root_path / "harvester", shared_b_tools.config["self_hostname"], farmer_port, farmer_rpc_port, diff --git a/tests/setup_services.py b/tests/setup_services.py index e384153815e4..30aad7b39afd 100644 --- a/tests/setup_services.py +++ b/tests/setup_services.py @@ -6,6 +6,7 @@ from secrets import token_bytes from typing import AsyncGenerator, Optional +from chinilla.cmds.init_funcs import init from chinilla.consensus.constants import ConsensusConstants from chinilla.daemon.server import WebSocketServer, daemon_launch_lock_path, singleton from chinilla.server.start_farmer import service_kwargs_for_farmer @@ -185,6 +186,7 @@ async def setup_wallet_node( async def setup_harvester( + b_tools: BlockTools, root_path: Path, self_hostname: str, port, @@ -193,11 +195,17 @@ async def setup_harvester( consensus_constants: ConsensusConstants, start_service: bool = True, ): + init(None, root_path) + init(b_tools.root_path / "config" / "ssl" / "ca", root_path) config = load_config(root_path, "config.yaml") + config["logging"]["log_stdout"] = True + config["selected_network"] = "testnet0" + config["harvester"]["selected_network"] = "testnet0" config["harvester"]["port"] = port config["harvester"]["rpc_port"] = rpc_port config["harvester"]["farmer_peer"]["host"] = self_hostname config["harvester"]["farmer_peer"]["port"] = farmer_port + config["harvester"]["plot_directories"] = [str(b_tools.plot_dir.resolve())] save_config(root_path, "config.yaml", config) kwargs = service_kwargs_for_harvester(root_path, config["harvester"], consensus_constants) kwargs.update( @@ -219,6 +227,7 @@ async def setup_harvester( async def setup_farmer( b_tools: BlockTools, + root_path: Path, self_hostname: str, port, rpc_port, @@ -226,8 +235,15 @@ async def setup_farmer( full_node_port: Optional[uint16] = None, start_service: bool = True, ): - config = b_tools.config["farmer"] - config_pool = b_tools.config["pool"] + init(None, root_path) + init(b_tools.root_path / "config" / "ssl" / "ca", root_path) + root_config = load_config(root_path, "config.yaml") + root_config["logging"]["log_stdout"] = True + root_config["selected_network"] = "testnet0" + root_config["farmer"]["selected_network"] = "testnet0" + save_config(root_path, "config.yaml", root_config) + config = root_config["farmer"] + config_pool = root_config["pool"] config["hcx_target_address"] = encode_puzzle_hash(b_tools.farmer_ph, "hcx") config["pool_public_keys"] = [bytes(pk).hex() for pk in b_tools.pool_pubkeys] @@ -241,9 +257,7 @@ async def setup_farmer( else: del config["full_node_peer"] - kwargs = service_kwargs_for_farmer( - b_tools.root_path, config, config_pool, consensus_constants, b_tools.local_keychain - ) + kwargs = service_kwargs_for_farmer(root_path, config, config_pool, consensus_constants, b_tools.local_keychain) kwargs.update( parse_cli_args=False, connect_to_daemon=False, diff --git a/tests/util/alert_server.py b/tests/util/alert_server.py index 6b5df98ea7e0..e5ee0ec1dfff 100644 --- a/tests/util/alert_server.py +++ b/tests/util/alert_server.py @@ -77,7 +77,7 @@ def main(): ) quit() - return asyncio.get_event_loop().run_until_complete(run_and_wait(file_path, port)) + return asyncio.run(run_and_wait(file_path, port)) if __name__ == "__main__": diff --git a/tests/util/test_paginator.py b/tests/util/test_paginator.py new file mode 100644 index 000000000000..69085181cee1 --- /dev/null +++ b/tests/util/test_paginator.py @@ -0,0 +1,70 @@ +from math import ceil +from typing import List, Type + +import pytest + +from chinilla.util.paginator import InvalidPageSizeError, InvalidPageSizeLimit, PageOutOfBoundsError, Paginator + + +@pytest.mark.parametrize( + "source, page_size, page_size_limit", + [([], 1, 1), ([1], 1, 2), ([1, 2], 2, 2), ([], 10, 100), ([1, 2, 10], 1000, 1000)], +) +def test_constructor_valid_inputs(source: List[int], page_size: int, page_size_limit: int) -> None: + paginator: Paginator = Paginator.create(source, page_size, page_size_limit) + assert paginator.page_size() == page_size + assert paginator.page_count() == 1 + assert paginator.get_page(0) == source + + +@pytest.mark.parametrize( + "page_size, page_size_limit, exception", + [ + (5, -1, InvalidPageSizeLimit), + (5, 0, InvalidPageSizeLimit), + (2, 1, InvalidPageSizeError), + (100, 1, InvalidPageSizeError), + (1001, 1000, InvalidPageSizeError), + ], +) +def test_constructor_invalid_inputs(page_size: int, page_size_limit: int, exception: Type[Exception]) -> None: + with pytest.raises(exception): + Paginator.create([], page_size, page_size_limit) + + +def test_page_count() -> None: + for page_size in range(1, 10): + for i in range(0, 10): + assert Paginator.create(range(0, i), page_size).page_count() == max(1, ceil(i / page_size)) + + +@pytest.mark.parametrize( + "length, page_size, page, expected_data", + [ + (17, 5, 0, [0, 1, 2, 3, 4]), + (17, 5, 1, [5, 6, 7, 8, 9]), + (17, 5, 2, [10, 11, 12, 13, 14]), + (17, 5, 3, [15, 16]), + (3, 4, 0, [0, 1, 2]), + (3, 3, 0, [0, 1, 2]), + (3, 2, 0, [0, 1]), + (3, 2, 1, [2]), + (3, 1, 0, [0]), + (3, 1, 1, [1]), + (3, 1, 2, [2]), + (2, 2, 0, [0, 1]), + (2, 1, 0, [0]), + (2, 1, 1, [1]), + (1, 2, 0, [0]), + (0, 2, 0, []), + (0, 10, 0, []), + ], +) +def test_get_page_valid(length: int, page: int, page_size: int, expected_data: List[int]) -> None: + assert Paginator.create(list(range(0, length)), page_size).get_page(page) == expected_data + + +@pytest.mark.parametrize("page", [-1000, -10, -1, 5, 10, 1000]) +def test_get_page_invalid(page: int) -> None: + with pytest.raises(PageOutOfBoundsError): + Paginator.create(range(0, 17), 5).get_page(page) diff --git a/tests/wallet/test_singleton_lifecycle.py b/tests/wallet/test_singleton_lifecycle.py index 069a2033b4ef..f76cc165ed54 100644 --- a/tests/wallet/test_singleton_lifecycle.py +++ b/tests/wallet/test_singleton_lifecycle.py @@ -113,8 +113,7 @@ def test_only_odd_coins_0(): conditions = Program.to(condition_list) coin_spend = CoinSpend(farmed_coin, ANYONE_CAN_SPEND_PUZZLE, conditions) spend_bundle = SpendBundle.aggregate([launcher_spend_bundle, SpendBundle([coin_spend], G2Element())]) - run = asyncio.get_event_loop().run_until_complete - coins_added, coins_removed = run(check_spend_bundle_validity(bt.constants, blocks, spend_bundle)) + coins_added, coins_removed = asyncio.run(check_spend_bundle_validity(bt.constants, blocks, spend_bundle)) coin_set_added = set([_.coin for _ in coins_added]) coin_set_removed = set([_.coin for _ in coins_removed])