diff --git a/.github/workflows/build-linux-arm64-installer.yml b/.github/workflows/build-linux-arm64-installer.yml index 61ad71d55030..56a3b5e7f0d8 100644 --- a/.github/workflows/build-linux-arm64-installer.yml +++ b/.github/workflows/build-linux-arm64-installer.yml @@ -68,11 +68,11 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string script: | - const releases = await github.rest.repos.listReleases({ + const release = await github.rest.repos.getLatestRelease({ owner: 'Chia-Network', repo: 'chia-plotter-madmax', }); - return releases.data[0].tag_name; + return release.data.tag_name; - name: Get latest madmax plotter run: | @@ -89,11 +89,11 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string script: | - const releases = await github.rest.repos.listReleases({ + const release = await github.rest.repos.getLatestRelease({ owner: 'Chia-Network', repo: 'bladebit', }); - return releases.data[0].tag_name; + return release.data.tag_name; - name: Get latest bladebit plotter run: | @@ -106,7 +106,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - sh install.sh + sh install.sh -d - name: Build arm64 .deb package env: @@ -119,7 +119,7 @@ jobs: sh build_linux_deb.sh arm64 - name: Upload Linux artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chia-installers-linux-deb-arm64 path: ${{ github.workspace }}/build_scripts/final_installer/ @@ -137,10 +137,11 @@ jobs: CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }} if: steps.check_secrets.outputs.HAS_SECRET run: | - GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8) - CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH - echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >>$GITHUB_ENV - aws s3 cp "$GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb" "s3://download.chia.net/dev/chia-blockchain_${CHIA_DEV_BUILD}_arm64.deb" + GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8) + CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH + echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >>$GITHUB_ENV + aws s3 cp "$GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb" "s3://download.chia.net/dev/chia-blockchain_${CHIA_DEV_BUILD}_arm64.deb" + aws s3 cp "$GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_arm64.deb" "s3://download.chia.net/dev/chia-blockchain-cli_${CHIA_DEV_BUILD}-1_arm64.deb" - name: Create Checksums if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' @@ -149,6 +150,7 @@ jobs: run: | ls $GITHUB_WORKSPACE/build_scripts/final_installer/ sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb > $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.sha256 + sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_arm64.deb > $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_arm64.deb.sha256 ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - name: Install py3createtorrent @@ -162,6 +164,7 @@ jobs: CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }} run: | py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb -o $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.torrent --webseed https://download.chia.net/install/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb + py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_arm64.deb -o $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_arm64.deb.torrent --webseed https://download.chia.net/install/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_arm64.deb ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - name: Upload Beta Installer @@ -171,16 +174,21 @@ jobs: run: | aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb s3://download.chia.net/beta/chia-blockchain_arm64_latest_beta.deb aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.sha256 s3://download.chia.net/beta/chia-blockchain_arm64_latest_beta.deb.sha256 + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_arm64.deb s3://download.chia.net/beta/chia-blockchain-cli_arm64_latest_beta.deb + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_arm64.deb.sha256 s3://download.chia.net/beta/chia-blockchain-cli_arm64_latest_beta.deb.sha256 - name: Upload Release Files if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') env: CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }} run: | - ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb s3://download.chia.net/install/ - aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.sha256 s3://download.chia.net/install/ - aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.torrent s3://download.chia.net/torrents/ + ls $GITHUB_WORKSPACE/build_scripts/final_installer/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb s3://download.chia.net/install/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.sha256 s3://download.chia.net/install/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_arm64.deb.torrent s3://download.chia.net/torrents/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_arm64.deb s3://download.chia.net/install/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_arm64.deb.sha256 s3://download.chia.net/install/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_arm64.deb.torrent s3://download.chia.net/torrents/ - name: Get tag name if: startsWith(github.ref, 'refs/tags/') diff --git a/.github/workflows/build-linux-installer-deb.yml b/.github/workflows/build-linux-installer-deb.yml index a9feddfe1441..e1ff9de5670b 100644 --- a/.github/workflows/build-linux-installer-deb.yml +++ b/.github/workflows/build-linux-installer-deb.yml @@ -102,11 +102,11 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string script: | - const releases = await github.rest.repos.listReleases({ + const release = await github.rest.repos.getLatestRelease({ owner: 'Chia-Network', repo: 'chia-plotter-madmax', }); - return releases.data[0].tag_name; + return release.data.tag_name; - name: Get latest madmax plotter run: | @@ -123,11 +123,11 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string script: | - const releases = await github.rest.repos.listReleases({ + const release = await github.rest.repos.getLatestRelease({ owner: 'Chia-Network', repo: 'bladebit', }); - return releases.data[0].tag_name; + return release.data.tag_name; - name: Get latest bladebit plotter run: | @@ -140,7 +140,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - sh install.sh + sh install.sh -d - name: Setup Node 16.x uses: actions/setup-node@v3 @@ -162,7 +162,7 @@ jobs: sh build_linux_deb.sh amd64 - name: Upload Linux artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chia-installers-linux-deb-intel path: ${{ github.workspace }}/build_scripts/final_installer/ @@ -178,22 +178,24 @@ jobs: - name: Upload to s3 if: steps.check_secrets.outputs.HAS_SECRET env: - CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }} + CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }} run: | - GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8) - CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH - echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >>$GITHUB_ENV - ls ${{ github.workspace }}/build_scripts/final_installer/ - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb s3://download.chia.net/dev/chia-blockchain_${CHIA_DEV_BUILD}_amd64.deb + GIT_SHORT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-8) + CHIA_DEV_BUILD=${CHIA_INSTALLER_VERSION}-$GIT_SHORT_HASH + echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >>$GITHUB_ENV + ls ${{ github.workspace }}/build_scripts/final_installer/ + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb s3://download.chia.net/dev/chia-blockchain_${CHIA_DEV_BUILD}_amd64.deb + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_amd64.deb s3://download.chia.net/dev/chia-blockchain-cli_${CHIA_DEV_BUILD}-1_amd64.deb - name: Create Checksums if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' env: CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }} run: | - ls ${{ github.workspace }}/build_scripts/final_installer/ - sha256sum ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb > ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.sha256 - ls ${{ github.workspace }}/build_scripts/final_installer/ + ls ${{ github.workspace }}/build_scripts/final_installer/ + sha256sum ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb > ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.sha256 + sha256sum ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_amd64.deb > ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_amd64.deb.sha256 + ls ${{ github.workspace }}/build_scripts/final_installer/ - name: Install py3createtorrent if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' @@ -206,6 +208,7 @@ jobs: if: startsWith(github.ref, 'refs/tags/') run: | py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb -o ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.torrent --webseed https://download.chia.net/install/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb + py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_amd64.deb -o ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_amd64.deb.torrent --webseed https://download.chia.net/install/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_amd64.deb ls - name: Upload Beta Installer @@ -215,15 +218,20 @@ jobs: run: | aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb s3://download.chia.net/beta/chia-blockchain_amd64_latest_beta.deb aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.sha256 s3://download.chia.net/beta/chia-blockchain_amd64_latest_beta.deb.sha256 + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_amd64.deb s3://download.chia.net/beta/chia-blockchain-cli_amd64_latest_beta.deb + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_amd64.deb.sha256 s3://download.chia.net/beta/chia-blockchain-cli_amd64_latest_beta.deb.sha256 - name: Upload Release Files env: CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }} if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') run: | - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb s3://download.chia.net/install/ - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.sha256 s3://download.chia.net/install/ - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.torrent s3://download.chia.net/torrents/ + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb s3://download.chia.net/install/ + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.sha256 s3://download.chia.net/install/ + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain_${CHIA_INSTALLER_VERSION}_amd64.deb.torrent s3://download.chia.net/torrents/ + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_amd64.deb s3://download.chia.net/install/ + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_amd64.deb.sha256 s3://download.chia.net/install/ + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/chia-blockchain-cli_${CHIA_INSTALLER_VERSION}-1_amd64.deb.torrent s3://download.chia.net/torrents/ - name: Get tag name if: startsWith(github.ref, 'refs/tags/') diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index c97c85526079..36c5795aae70 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -71,11 +71,11 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string script: | - const releases = await github.rest.repos.listReleases({ + const release = await github.rest.repos.getLatestRelease({ owner: 'Chia-Network', repo: 'chia-plotter-madmax', }); - return releases.data[0].tag_name; + return release.data.tag_name; - name: Get latest madmax plotter run: | @@ -92,11 +92,11 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string script: | - const releases = await github.rest.repos.listReleases({ + const release = await github.rest.repos.getLatestRelease({ owner: 'Chia-Network', repo: 'bladebit', }); - return releases.data[0].tag_name; + return release.data.tag_name; - name: Get latest bladebit plotter run: | @@ -109,7 +109,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - sh install.sh + sh install.sh -d - name: Build .rpm package env: @@ -119,10 +119,10 @@ jobs: git -C ./chia-blockchain-gui status . ./activate cd ./build_scripts - sh build_linux_rpm.sh amd64 + bash build_linux_rpm.sh amd64 - name: Upload Linux artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chia-installers-linux-rpm-intel path: ${{ github.workspace }}/build_scripts/final_installer/ diff --git a/.github/workflows/build-macos-installer.yml b/.github/workflows/build-macos-installer.yml index fa9f7d12dc98..ef3b41c58a83 100644 --- a/.github/workflows/build-macos-installer.yml +++ b/.github/workflows/build-macos-installer.yml @@ -25,7 +25,7 @@ jobs: max-parallel: 4 matrix: python-version: [3.9] - os: [macOS-latest] + os: [macos-10.15] steps: - name: Checkout Code @@ -110,11 +110,11 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string script: | - const releases = await github.rest.repos.listReleases({ + const release = await github.rest.repos.getLatestRelease({ owner: 'Chia-Network', repo: 'chia-plotter-madmax', }); - return releases.data[0].tag_name; + return release.data.tag_name; - name: Get latest madmax plotter run: | @@ -128,7 +128,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - sh install.sh + sh install.sh -d - name: Setup Node 16.x uses: actions/setup-node@v3 @@ -148,7 +148,7 @@ jobs: sh build_macos.sh - name: Upload MacOS artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chia-installers-macos-dmg-intel path: ${{ github.workspace }}/build_scripts/final_installer/ @@ -190,8 +190,8 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.INSTALLER_UPLOAD_SECRET }} AWS_REGION: us-west-2 run: | - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg s3://download.chia.net/beta/Chia_latest_beta.dmg - aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg.sha256 s3://download.chia.net/beta/Chia_latest_beta.dmg.sha256 + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg s3://download.chia.net/beta/Chia-intel_latest_beta.dmg + aws s3 cp ${{ github.workspace }}/build_scripts/final_installer/Chia-${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }}.dmg.sha256 s3://download.chia.net/beta/Chia-intel_latest_beta.dmg.sha256 - name: Upload Release Files if: steps.check_secrets.outputs.HAS_AWS_SECRET && startsWith(github.ref, 'refs/tags/') diff --git a/.github/workflows/build-macos-m1-installer.yml b/.github/workflows/build-macos-m1-installer.yml index 323e636311cf..17cc3b70cb76 100644 --- a/.github/workflows/build-macos-m1-installer.yml +++ b/.github/workflows/build-macos-m1-installer.yml @@ -84,11 +84,11 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string script: | - const releases = await github.rest.repos.listReleases({ + const release = await github.rest.repos.getLatestRelease({ owner: 'Chia-Network', repo: 'chia-plotter-madmax', }); - return releases.data[0].tag_name; + return release.data.tag_name; - name: Get latest madmax plotter run: | @@ -102,7 +102,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - arch -arm64 sh install.sh + arch -arm64 sh install.sh -d - name: Install node 16.x run: | @@ -122,7 +122,7 @@ jobs: arch -arm64 sh build_macos_m1.sh - name: Upload MacOS artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chia-installers-macos-dmg-arm64 path: ${{ github.workspace }}/build_scripts/final_installer/ diff --git a/.github/workflows/build-test-macos-blockchain.yml b/.github/workflows/build-test-macos-blockchain.yml index a816459ff7b7..01dd685889e4 100644 --- a/.github/workflows/build-test-macos-blockchain.yml +++ b/.github/workflows/build-test-macos-blockchain.yml @@ -93,7 +93,7 @@ jobs: - name: Test blockchain code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/blockchain/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/blockchain/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-clvm.yml b/.github/workflows/build-test-macos-clvm.yml index f2d496da9384..f27f9e6513b4 100644 --- a/.github/workflows/build-test-macos-clvm.yml +++ b/.github/workflows/build-test-macos-clvm.yml @@ -79,7 +79,7 @@ jobs: - name: Test clvm code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/clvm/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/clvm/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-cmds.yml b/.github/workflows/build-test-macos-core-cmds.yml index 687ae6c94900..f67ad25e35f6 100644 --- a/.github/workflows/build-test-macos-core-cmds.yml +++ b/.github/workflows/build-test-macos-core-cmds.yml @@ -79,7 +79,7 @@ jobs: - name: Test core-cmds code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/cmds/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/cmds/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-consensus.yml b/.github/workflows/build-test-macos-core-consensus.yml index f5cd798982e3..a5aad616941d 100644 --- a/.github/workflows/build-test-macos-core-consensus.yml +++ b/.github/workflows/build-test-macos-core-consensus.yml @@ -79,7 +79,7 @@ jobs: - name: Test core-consensus code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/consensus/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/consensus/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-custom_types.yml b/.github/workflows/build-test-macos-core-custom_types.yml index 130d15420c70..f2dad4ff3d76 100644 --- a/.github/workflows/build-test-macos-core-custom_types.yml +++ b/.github/workflows/build-test-macos-core-custom_types.yml @@ -79,7 +79,7 @@ jobs: - name: Test core-custom_types code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/custom_types/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/custom_types/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-daemon.yml b/.github/workflows/build-test-macos-core-daemon.yml index 7ab9dc765a25..a679447ef06f 100644 --- a/.github/workflows/build-test-macos-core-daemon.yml +++ b/.github/workflows/build-test-macos-core-daemon.yml @@ -97,7 +97,7 @@ jobs: - name: Test core-daemon code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/daemon/test_*.py --durations=10 -n 0 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/daemon/test_*.py --durations=10 -n 0 -m "not benchmark" - name: Process coverage data run: | @@ -109,7 +109,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-full_node-full_sync.yml b/.github/workflows/build-test-macos-core-full_node-full_sync.yml index fd8e5da8d28d..c613bcb87e06 100644 --- a/.github/workflows/build-test-macos-core-full_node-full_sync.yml +++ b/.github/workflows/build-test-macos-core-full_node-full_sync.yml @@ -93,7 +93,7 @@ jobs: - name: Test core-full_node-full_sync code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/full_sync/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/full_node/full_sync/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-full_node-stores.yml b/.github/workflows/build-test-macos-core-full_node-stores.yml index 72a06b6ca0b7..b6cd2c45dfa0 100644 --- a/.github/workflows/build-test-macos-core-full_node-stores.yml +++ b/.github/workflows/build-test-macos-core-full_node-stores.yml @@ -93,7 +93,7 @@ jobs: - name: Test core-full_node-stores code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/stores/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/full_node/stores/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-full_node.yml b/.github/workflows/build-test-macos-core-full_node.yml index 83d6cb01568c..2fa7e3c51e57 100644 --- a/.github/workflows/build-test-macos-core-full_node.yml +++ b/.github/workflows/build-test-macos-core-full_node.yml @@ -93,7 +93,7 @@ jobs: - name: Test core-full_node code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/full_node/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-server.yml b/.github/workflows/build-test-macos-core-server.yml index e93f23b2f8b9..0fa88e48fbf1 100644 --- a/.github/workflows/build-test-macos-core-server.yml +++ b/.github/workflows/build-test-macos-core-server.yml @@ -93,7 +93,7 @@ jobs: - name: Test core-server code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/server/test_*.py --durations=10 -n 0 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/server/test_*.py --durations=10 -n 0 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-ssl.yml b/.github/workflows/build-test-macos-core-ssl.yml index db9903634ac1..4a11ccd2ebef 100644 --- a/.github/workflows/build-test-macos-core-ssl.yml +++ b/.github/workflows/build-test-macos-core-ssl.yml @@ -93,7 +93,7 @@ jobs: - name: Test core-ssl code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/ssl/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/ssl/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-util.yml b/.github/workflows/build-test-macos-core-util.yml index 0935d2d546b4..a17bb909b672 100644 --- a/.github/workflows/build-test-macos-core-util.yml +++ b/.github/workflows/build-test-macos-core-util.yml @@ -93,7 +93,7 @@ jobs: - name: Test core-util code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/util/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/util/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core.yml b/.github/workflows/build-test-macos-core.yml index 4fe7cab2494f..be7d8fcaba39 100644 --- a/.github/workflows/build-test-macos-core.yml +++ b/.github/workflows/build-test-macos-core.yml @@ -93,7 +93,7 @@ jobs: - name: Test core code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-farmer_harvester.yml b/.github/workflows/build-test-macos-farmer_harvester.yml index 9c9f2b4736a2..0286aba80caa 100644 --- a/.github/workflows/build-test-macos-farmer_harvester.yml +++ b/.github/workflows/build-test-macos-farmer_harvester.yml @@ -93,7 +93,7 @@ jobs: - name: Test farmer_harvester code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/farmer_harvester/test_*.py --durations=10 -n 0 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/farmer_harvester/test_*.py --durations=10 -n 0 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-generator.yml b/.github/workflows/build-test-macos-generator.yml index e695c65e96c8..ccce30af66ef 100644 --- a/.github/workflows/build-test-macos-generator.yml +++ b/.github/workflows/build-test-macos-generator.yml @@ -79,7 +79,7 @@ jobs: - name: Test generator code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/generator/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/generator/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-plot_sync.yml b/.github/workflows/build-test-macos-plot_sync.yml index 315c909bb476..1acd030f9243 100644 --- a/.github/workflows/build-test-macos-plot_sync.yml +++ b/.github/workflows/build-test-macos-plot_sync.yml @@ -93,7 +93,7 @@ jobs: - name: Test plot_sync code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/plot_sync/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/plot_sync/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-plotting.yml b/.github/workflows/build-test-macos-plotting.yml index 30d44782fd4b..561f1fa81b52 100644 --- a/.github/workflows/build-test-macos-plotting.yml +++ b/.github/workflows/build-test-macos-plotting.yml @@ -93,7 +93,7 @@ jobs: - name: Test plotting code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/plotting/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/plotting/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-pools.yml b/.github/workflows/build-test-macos-pools.yml index 9279455daaf5..8b36cbc7cd38 100644 --- a/.github/workflows/build-test-macos-pools.yml +++ b/.github/workflows/build-test-macos-pools.yml @@ -93,7 +93,7 @@ jobs: - name: Test pools code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/pools/test_*.py --durations=10 -n 2 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/pools/test_*.py --durations=10 -n 2 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-simulation.yml b/.github/workflows/build-test-macos-simulation.yml index 9d8e1f57eb7c..b1e2e79f677b 100644 --- a/.github/workflows/build-test-macos-simulation.yml +++ b/.github/workflows/build-test-macos-simulation.yml @@ -97,7 +97,7 @@ jobs: - name: Test simulation code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/simulation/test_*.py --durations=10 -n 0 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/simulation/test_*.py --durations=10 -n 0 -m "not benchmark" - name: Process coverage data run: | @@ -109,7 +109,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-tools.yml b/.github/workflows/build-test-macos-tools.yml index e0a4396b6e1e..4be349499dd1 100644 --- a/.github/workflows/build-test-macos-tools.yml +++ b/.github/workflows/build-test-macos-tools.yml @@ -79,7 +79,7 @@ jobs: - name: Test tools code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/tools/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/tools/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-util.yml b/.github/workflows/build-test-macos-util.yml index f0f2334a0d0c..136cbfe8f99a 100644 --- a/.github/workflows/build-test-macos-util.yml +++ b/.github/workflows/build-test-macos-util.yml @@ -79,7 +79,7 @@ jobs: - name: Test util code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/util/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/util/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-cat_wallet.yml b/.github/workflows/build-test-macos-wallet-cat_wallet.yml index 70858e93b721..5bceb86a7101 100644 --- a/.github/workflows/build-test-macos-wallet-cat_wallet.yml +++ b/.github/workflows/build-test-macos-wallet-cat_wallet.yml @@ -93,7 +93,7 @@ jobs: - name: Test wallet-cat_wallet code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/cat_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/cat_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-did_wallet.yml b/.github/workflows/build-test-macos-wallet-did_wallet.yml index 4f97d0ea2f19..c182bc72cf67 100644 --- a/.github/workflows/build-test-macos-wallet-did_wallet.yml +++ b/.github/workflows/build-test-macos-wallet-did_wallet.yml @@ -79,7 +79,7 @@ jobs: - name: Test wallet-did_wallet code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/did_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/did_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" - name: Process coverage data run: | @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-rl_wallet.yml b/.github/workflows/build-test-macos-wallet-rl_wallet.yml index 1e19516260a4..3045f4be03c8 100644 --- a/.github/workflows/build-test-macos-wallet-rl_wallet.yml +++ b/.github/workflows/build-test-macos-wallet-rl_wallet.yml @@ -79,7 +79,7 @@ jobs: - name: Test wallet-rl_wallet code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/rl_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/rl_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" - name: Process coverage data run: | @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-rpc.yml b/.github/workflows/build-test-macos-wallet-rpc.yml index 68a95ea7a772..12a24bd1ff00 100644 --- a/.github/workflows/build-test-macos-wallet-rpc.yml +++ b/.github/workflows/build-test-macos-wallet-rpc.yml @@ -93,7 +93,7 @@ jobs: - name: Test wallet-rpc code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/rpc/test_*.py --durations=10 -n 0 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/rpc/test_*.py --durations=10 -n 0 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-simple_sync.yml b/.github/workflows/build-test-macos-wallet-simple_sync.yml index 0a1a359b3621..d8cf60ed0f33 100644 --- a/.github/workflows/build-test-macos-wallet-simple_sync.yml +++ b/.github/workflows/build-test-macos-wallet-simple_sync.yml @@ -93,7 +93,7 @@ jobs: - name: Test wallet-simple_sync code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/simple_sync/test_*.py --durations=10 -n 0 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/simple_sync/test_*.py --durations=10 -n 0 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-sync.yml b/.github/workflows/build-test-macos-wallet-sync.yml index c92ca8e21efe..ad4c852c3a19 100644 --- a/.github/workflows/build-test-macos-wallet-sync.yml +++ b/.github/workflows/build-test-macos-wallet-sync.yml @@ -93,7 +93,7 @@ jobs: - name: Test wallet-sync code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/sync/test_*.py --durations=10 -n 0 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/sync/test_*.py --durations=10 -n 0 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet.yml b/.github/workflows/build-test-macos-wallet.yml index 0a71248808d7..c4be58dd0b9e 100644 --- a/.github/workflows/build-test-macos-wallet.yml +++ b/.github/workflows/build-test-macos-wallet.yml @@ -93,7 +93,7 @@ jobs: - name: Test wallet code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-weight_proof.yml b/.github/workflows/build-test-macos-weight_proof.yml index 9b737c31e4c3..2690434d817c 100644 --- a/.github/workflows/build-test-macos-weight_proof.yml +++ b/.github/workflows/build-test-macos-weight_proof.yml @@ -93,7 +93,7 @@ jobs: - name: Test weight_proof code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/weight_proof/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/weight_proof/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-blockchain.yml b/.github/workflows/build-test-ubuntu-blockchain.yml index 60dbb451694e..a670a85e01e1 100644 --- a/.github/workflows/build-test-ubuntu-blockchain.yml +++ b/.github/workflows/build-test-ubuntu-blockchain.yml @@ -92,7 +92,7 @@ jobs: - name: Test blockchain code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/blockchain/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/blockchain/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-clvm.yml b/.github/workflows/build-test-ubuntu-clvm.yml index 35275d29e874..0c8bcd74dbdd 100644 --- a/.github/workflows/build-test-ubuntu-clvm.yml +++ b/.github/workflows/build-test-ubuntu-clvm.yml @@ -78,7 +78,7 @@ jobs: - name: Test clvm code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/clvm/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/clvm/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-cmds.yml b/.github/workflows/build-test-ubuntu-core-cmds.yml index f5f8ed214dbc..1e99104ffe8f 100644 --- a/.github/workflows/build-test-ubuntu-core-cmds.yml +++ b/.github/workflows/build-test-ubuntu-core-cmds.yml @@ -78,7 +78,7 @@ jobs: - name: Test core-cmds code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/cmds/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/cmds/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-consensus.yml b/.github/workflows/build-test-ubuntu-core-consensus.yml index f9045c31ba30..8f589ae91b6f 100644 --- a/.github/workflows/build-test-ubuntu-core-consensus.yml +++ b/.github/workflows/build-test-ubuntu-core-consensus.yml @@ -78,7 +78,7 @@ jobs: - name: Test core-consensus code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/consensus/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/consensus/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-custom_types.yml b/.github/workflows/build-test-ubuntu-core-custom_types.yml index 447a28627335..14a7817de474 100644 --- a/.github/workflows/build-test-ubuntu-core-custom_types.yml +++ b/.github/workflows/build-test-ubuntu-core-custom_types.yml @@ -78,7 +78,7 @@ jobs: - name: Test core-custom_types code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/custom_types/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/custom_types/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-daemon.yml b/.github/workflows/build-test-ubuntu-core-daemon.yml index 1992c859f152..3f9d4328f379 100644 --- a/.github/workflows/build-test-ubuntu-core-daemon.yml +++ b/.github/workflows/build-test-ubuntu-core-daemon.yml @@ -96,7 +96,7 @@ jobs: - name: Test core-daemon code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/daemon/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/daemon/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -108,7 +108,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml b/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml index a2295de7ebb7..9cb55ba9a735 100644 --- a/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml +++ b/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml @@ -92,7 +92,7 @@ jobs: - name: Test core-full_node-full_sync code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/full_sync/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/full_node/full_sync/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-full_node-stores.yml b/.github/workflows/build-test-ubuntu-core-full_node-stores.yml index fbcf34703727..03661c5ca7fa 100644 --- a/.github/workflows/build-test-ubuntu-core-full_node-stores.yml +++ b/.github/workflows/build-test-ubuntu-core-full_node-stores.yml @@ -92,7 +92,7 @@ jobs: - name: Test core-full_node-stores code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/stores/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/full_node/stores/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-full_node.yml b/.github/workflows/build-test-ubuntu-core-full_node.yml index 97208996a1fa..cbfbcf5db199 100644 --- a/.github/workflows/build-test-ubuntu-core-full_node.yml +++ b/.github/workflows/build-test-ubuntu-core-full_node.yml @@ -92,7 +92,7 @@ jobs: - name: Test core-full_node code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/full_node/test_*.py --durations=10 -n 4 -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/full_node/test_*.py --durations=10 -n 4 -m "not benchmark" - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-server.yml b/.github/workflows/build-test-ubuntu-core-server.yml index c96f2e13f107..2e29e7c5f4b6 100644 --- a/.github/workflows/build-test-ubuntu-core-server.yml +++ b/.github/workflows/build-test-ubuntu-core-server.yml @@ -92,7 +92,7 @@ jobs: - name: Test core-server code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/server/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/server/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-ssl.yml b/.github/workflows/build-test-ubuntu-core-ssl.yml index 0edc22b104e5..931892d87558 100644 --- a/.github/workflows/build-test-ubuntu-core-ssl.yml +++ b/.github/workflows/build-test-ubuntu-core-ssl.yml @@ -92,7 +92,7 @@ jobs: - name: Test core-ssl code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/ssl/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/ssl/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-util.yml b/.github/workflows/build-test-ubuntu-core-util.yml index 2e5c699cb98d..6b40c42a71cc 100644 --- a/.github/workflows/build-test-ubuntu-core-util.yml +++ b/.github/workflows/build-test-ubuntu-core-util.yml @@ -92,7 +92,7 @@ jobs: - name: Test core-util code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/util/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/util/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core.yml b/.github/workflows/build-test-ubuntu-core.yml index ed717b529b72..49c5c21e4ff9 100644 --- a/.github/workflows/build-test-ubuntu-core.yml +++ b/.github/workflows/build-test-ubuntu-core.yml @@ -92,7 +92,7 @@ jobs: - name: Test core code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/core/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/core/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-farmer_harvester.yml b/.github/workflows/build-test-ubuntu-farmer_harvester.yml index 03a3050232a7..2304dadab59f 100644 --- a/.github/workflows/build-test-ubuntu-farmer_harvester.yml +++ b/.github/workflows/build-test-ubuntu-farmer_harvester.yml @@ -92,7 +92,7 @@ jobs: - name: Test farmer_harvester code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/farmer_harvester/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/farmer_harvester/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-generator.yml b/.github/workflows/build-test-ubuntu-generator.yml index 3f928e2c0443..17f9b1ce5d85 100644 --- a/.github/workflows/build-test-ubuntu-generator.yml +++ b/.github/workflows/build-test-ubuntu-generator.yml @@ -78,7 +78,7 @@ jobs: - name: Test generator code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/generator/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/generator/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-plot_sync.yml b/.github/workflows/build-test-ubuntu-plot_sync.yml index fddf0cd9c2ab..2877a4c1998e 100644 --- a/.github/workflows/build-test-ubuntu-plot_sync.yml +++ b/.github/workflows/build-test-ubuntu-plot_sync.yml @@ -92,7 +92,7 @@ jobs: - name: Test plot_sync code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/plot_sync/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/plot_sync/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-plotting.yml b/.github/workflows/build-test-ubuntu-plotting.yml index 882d0b0c113a..32bf337eaceb 100644 --- a/.github/workflows/build-test-ubuntu-plotting.yml +++ b/.github/workflows/build-test-ubuntu-plotting.yml @@ -92,7 +92,7 @@ jobs: - name: Test plotting code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/plotting/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/plotting/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-pools.yml b/.github/workflows/build-test-ubuntu-pools.yml index d2671e3a18de..bfbe02312e22 100644 --- a/.github/workflows/build-test-ubuntu-pools.yml +++ b/.github/workflows/build-test-ubuntu-pools.yml @@ -92,7 +92,7 @@ jobs: - name: Test pools code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/pools/test_*.py --durations=10 -n 2 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/pools/test_*.py --durations=10 -n 2 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-simulation.yml b/.github/workflows/build-test-ubuntu-simulation.yml index 06435f947ea9..b286ebdb24b3 100644 --- a/.github/workflows/build-test-ubuntu-simulation.yml +++ b/.github/workflows/build-test-ubuntu-simulation.yml @@ -96,7 +96,7 @@ jobs: - name: Test simulation code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/simulation/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/simulation/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -108,7 +108,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-tools.yml b/.github/workflows/build-test-ubuntu-tools.yml index 97877660be2d..b07714020fc2 100644 --- a/.github/workflows/build-test-ubuntu-tools.yml +++ b/.github/workflows/build-test-ubuntu-tools.yml @@ -78,7 +78,7 @@ jobs: - name: Test tools code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/tools/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/tools/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-util.yml b/.github/workflows/build-test-ubuntu-util.yml index 1d0567e628d2..ff9f5dbf37b1 100644 --- a/.github/workflows/build-test-ubuntu-util.yml +++ b/.github/workflows/build-test-ubuntu-util.yml @@ -78,7 +78,7 @@ jobs: - name: Test util code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/util/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/util/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml b/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml index af979fc0fd9c..1c04603e0cf6 100644 --- a/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml @@ -92,7 +92,7 @@ jobs: - name: Test wallet-cat_wallet code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/cat_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/cat_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml b/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml index 06bf1ad15d55..8bf4e33a3933 100644 --- a/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml @@ -78,7 +78,7 @@ jobs: - name: Test wallet-did_wallet code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/did_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/did_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml b/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml index c791e3f32c1e..2eefca2d09f7 100644 --- a/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml @@ -78,7 +78,7 @@ jobs: - name: Test wallet-rl_wallet code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/rl_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/rl_wallet/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-rpc.yml b/.github/workflows/build-test-ubuntu-wallet-rpc.yml index bdeb794df439..36a38b740317 100644 --- a/.github/workflows/build-test-ubuntu-wallet-rpc.yml +++ b/.github/workflows/build-test-ubuntu-wallet-rpc.yml @@ -92,7 +92,7 @@ jobs: - name: Test wallet-rpc code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/rpc/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/rpc/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml b/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml index 7a9585c713fd..bd6925631a9f 100644 --- a/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml +++ b/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml @@ -92,7 +92,7 @@ jobs: - name: Test wallet-simple_sync code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/simple_sync/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/simple_sync/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-sync.yml b/.github/workflows/build-test-ubuntu-wallet-sync.yml index ceb86d8b4234..c607a7028601 100644 --- a/.github/workflows/build-test-ubuntu-wallet-sync.yml +++ b/.github/workflows/build-test-ubuntu-wallet-sync.yml @@ -92,7 +92,7 @@ jobs: - name: Test wallet-sync code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/sync/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/sync/test_*.py --durations=10 -n 0 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet.yml b/.github/workflows/build-test-ubuntu-wallet.yml index 0e88ee66d539..349879eda849 100644 --- a/.github/workflows/build-test-ubuntu-wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet.yml @@ -92,7 +92,7 @@ jobs: - name: Test wallet code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/wallet/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/wallet/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-weight_proof.yml b/.github/workflows/build-test-ubuntu-weight_proof.yml index 041aaef86bfc..2616c1d402e9 100644 --- a/.github/workflows/build-test-ubuntu-weight_proof.yml +++ b/.github/workflows/build-test-ubuntu-weight_proof.yml @@ -92,7 +92,7 @@ jobs: - name: Test weight_proof code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test tests/weight_proof/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor + venv/bin/coverage run --rcfile=.coveragerc --module pytest tests/weight_proof/test_*.py --durations=10 -n 4 -m "not benchmark" -p no:monitor - name: Process coverage data run: | @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-windows-installer.yml b/.github/workflows/build-windows-installer.yml index ae2161f0b3b3..b74fefd1bd10 100644 --- a/.github/workflows/build-windows-installer.yml +++ b/.github/workflows/build-windows-installer.yml @@ -19,7 +19,7 @@ jobs: build: name: Windows Installer on Windows 10 and Python 3.9 runs-on: [windows-2019] - timeout-minutes: 40 + timeout-minutes: 50 steps: - name: Checkout Code @@ -62,7 +62,7 @@ jobs: - uses: actions/setup-python@v2 name: Install Python 3.9 with: - python-version: "3.9.11" + python-version: "3.9" - name: Setup Node 16.x uses: actions/setup-node@v3 @@ -111,11 +111,11 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string script: | - const releases = await github.rest.repos.listReleases({ + const release = await github.rest.repos.getLatestRelease({ owner: 'Chia-Network', repo: 'chia-plotter-madmax', }); - return releases.data[0].tag_name; + return release.data.tag_name; - name: Get latest madmax plotter run: | @@ -130,11 +130,11 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string script: | - const releases = await github.rest.repos.listReleases({ + const release = await github.rest.repos.getLatestRelease({ owner: 'Chia-Network', repo: 'bladebit', }); - return releases.data[0].tag_name; + return release.data.tag_name; - name: Get latest bladebit plotter run: | @@ -155,13 +155,13 @@ jobs: .\build_scripts\build_windows.ps1 - name: Upload Windows exe's to artifacts - uses: actions/upload-artifact@v2.2.2 + uses: actions/upload-artifact@v3 with: name: chia-installers-windows-exe-intel path: ${{ github.workspace }}\chia-blockchain-gui\Chia-win32-x64\ - name: Upload Installer to artifacts - uses: actions/upload-artifact@v2.2.2 + uses: actions/upload-artifact@v3 with: name: Windows-Installers path: ${{ github.workspace }}\chia-blockchain-gui\release-builds\ diff --git a/.github/workflows/test-install-scripts.yml b/.github/workflows/test-install-scripts.yml index 9b2a7e859673..7d27cec94d1a 100644 --- a/.github/workflows/test-install-scripts.yml +++ b/.github/workflows/test-install-scripts.yml @@ -156,6 +156,16 @@ jobs: apt-get --yes update apt-get install --yes git lsb-release sudo + # @TODO this step can be removed once Python 3.10 is supported + # Python 3.10 is now the default in bookworm, so install 3.9 specifically so install does not fail + - name: Prepare debian:bookworm + if: ${{ matrix.distribution.name == 'debian:bookworm' }} + env: + DEBIAN_FRONTEND: noninteractive + run: | + apt-get update -y + apt-get install -y python3.9-venv + - name: Prepare Fedora if: ${{ matrix.distribution.type == 'fedora' }} run: | @@ -178,6 +188,9 @@ jobs: apt-get --yes update apt-get install --yes git lsb-release sudo + - name: Add safe git directory + run: git config --global --add safe.directory $GITHUB_WORKSPACE + # after installing git so we use that copy - name: Checkout Code uses: actions/checkout@v3 diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index 9114d3498665..c059199495d6 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -65,7 +65,7 @@ jobs: python -m build --sdist --outdir dist . - name: Upload artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: dist path: ./dist diff --git a/.gitignore b/.gitignore index c5082e8b5f64..f70408d1fec5 100644 --- a/.gitignore +++ b/.gitignore @@ -298,3 +298,7 @@ tags [._]*.un~ # End of https://www.toptal.com/developers/gitignore/api/python,git,vim + +# Ignore the binaries that are pulled for the installer +/bladebit/ +/madmax/ diff --git a/.isort.cfg b/.isort.cfg index 9ed754a63a73..011e9d61b846 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -31,8 +31,6 @@ extend_skip= chia/daemon/keychain_proxy.py chia/daemon/keychain_server.py chia/daemon/server.py - chia/farmer/farmer_api.py - chia/farmer/farmer.py chia/full_node/block_height_map.py chia/full_node/block_store.py chia/full_node/bundle_tools.py @@ -45,8 +43,6 @@ extend_skip= chia/full_node/mempool_check_conditions.py chia/full_node/mempool_manager.py chia/full_node/weight_proof.py - chia/harvester/harvester_api.py - chia/harvester/harvester.py chia/introducer/introducer.py chia/plotters/bladebit.py chia/plotters/chiapos.py @@ -60,10 +56,8 @@ extend_skip= chia/pools/pool_puzzles.py chia/pools/pool_wallet_info.py chia/pools/pool_wallet.py - chia/protocols/harvester_protocol.py chia/protocols/pool_protocol.py chia/protocols/protocol_state_machine.py - chia/rpc/farmer_rpc_client.py chia/rpc/full_node_rpc_client.py chia/rpc/rpc_client.py chia/rpc/wallet_rpc_api.py @@ -109,7 +103,6 @@ extend_skip= chia/util/profiler.py chia/util/service_groups.py chia/util/ssl_check.py - chia/util/streamable.py chia/util/ws_message.py chia/wallet/cat_wallet/cat_info.py chia/wallet/cat_wallet/cat_utils.py @@ -191,7 +184,6 @@ extend_skip= tests/core/util/test_files.py tests/core/util/test_keychain.py tests/core/util/test_keyring_wrapper.py - tests/core/util/test_streamable.py tests/generator/test_compression.py tests/generator/test_generator_types.py tests/generator/test_list_to_batches.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 343909b93ca3..e8a9a7844f94 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,76 @@ for setuptools_scm/PEP 440 reasons. ## [Unreleased] +## 1.3.4 Chia blockchain 2022-4-19 + +## What's Changed + +### Added + +- Creating an offer now allows you to edit the exchange between two tokens that will auto calculate either the sending token amount or the receiving token amount +- When making an offer, makers can now create an offer including a fee to help get the transaction into the mempool when an offer is accepted +- Implemented `chia rpc` command +- New RPC `get_coin_records_by_hint` - Get coins for a given hint (Thanks @freddiecoleman) +- Add maker fee to remaining offer RPCs +- Add healthcheck endpoint to rpc services +- Optional wallet type parameter for `get_wallets` and `wallet show` +- Add `select_coins` RPC method by (Thanks @ftruzzi) +- Added `-n`/`--new-address` option to `chia wallet get_address` +- New DBWrapper supporting concurrent readers +- Added `config.yaml` option to run the `full_node` in single-threaded mode +- Build cli only version of debs +- Add `/get_stray_cats` API for accessing unknown CATs + +### Changed + +- Left navigation bar in the GUI has been reorganized and icons have been updated +- Settings has been moved to the new left hand nav bar +- Token selection has been changed to a permanent column in the GUI instead of the drop down list along +- Manage token option has been added at the bottom of the Token column to all users to show/hide token wallets +- Users can show/hide token wallets. If you have auto-discover cats in config.yaml turned off, new tokens will still show up there, but those wallets won’t get created until the token has been toggled on for the first time +- CATs now have a link to Taildatabase.com to look up the Asset ID +- Ongoing improvements to the internal test framework for speed and reliability. +- Significant harvester protocol update: You will need to update your farmer and all your harvesters as this is a breaking change in the harvester protocol. The new protocol solves many scaling issues. In particular, the protocol supports sending delta changes to the farmer - so for example, adding plots to a farm results in only the new plots being reported. We recommend you update your farmer first. +- Updated clvm_tools to 0.4.4 +- Updated clvm_tools_rs to 0.1.7 +- Changed code to use by default the Rust implementation of clvm_tools (clvm_tools_rs) +- Consolidated socket library to aiohttp and removed websockets dependency +- During node startup, missing blocks in the DB will throw an exception +- Updated cryptography to 36.0.2 +- The rust implementation of CLVM is now called `chia_rs` instead of `clvm_rs`. +- Updated code to use improved rust interface `run_generator2` +- Code improvements to prefer connecting to a local trusted node over untrusted nodes + +### Fixed + +- Fixed issues with claiming self-pool rewards with and without a fee +- Fixed wallet creation in edge cases around chain reorgs +- Harvester: Reuse legacy refresh interval if new params aren't available +- Fixed typos `lastest` > `latest` (Thanks @daverof) +- Fixed typo in command line argument parsing for `chia db validate` +- Improved backwards compatibility for node RPC calls `get_blockchain_state` and `get_additions_and_removals` +- Fixed issue where `--root_path` option was not honored by `chia configure` CLI command +- Fixed cases where node DB was not created initially using v2 format +- Improved error messages from `chia db upgrade` +- Capitalized display of `Rpc` -> `RPC` in `chia show -s` by (Thanks @hugepants) +- Improved handling of chain reorgs with atomic rollback for the wallet +- Handled cases where one node doesn't have the coin we are looking for +- Fixed timelord installation for Debian +- Checked for requesting items when creating an offer +- Minor output formatting/enhancements for `chia wallet show` +- Fixed typo and index issues in wallet database +- Used the rust clvm version instead of python in more places +- Fixed trailing bytes shown in CAT asset ID row when using `chia wallet show` +- Maintain all chain state during reorg until the new fork has been fully validated +- Improved performance of `get_coin_records_by_names` by using proper index (Thanks @roseiliend) +- Improved handling of unknown pending balances +- Improved plot load times + +### Known Issues + +- You cannot install and run chia blockchain using the macOS packaged DMG on macOS Mojave (10.14). +- Pending transactions are not retried correctly and so can be stuck in the pending state unless manually removed and re-submitted + ## 1.3.3 Chia blockchain 2022-4-02 ### Fixed @@ -49,7 +119,6 @@ for setuptools_scm/PEP 440 reasons. - Update the database queries for the `block_count_metrics` RPC endpoint to utilize indexes effectively for V2 DBs. - Several improvements to tests. - ## 1.3.0 Chia blockchain 2022-3-07 ### Added: @@ -139,7 +208,6 @@ for setuptools_scm/PEP 440 reasons. - Workaround: Restart the GUI, or clear unconfirmed TX. - Claiming rewards when self-pooling using CLI will show an error message, but it will actually create the transaction. - ## 1.2.11 Chia blockchain 2021-11-4 Farmers rejoice: today's release integrates two plotters in broad use in the Chia community: Bladebit, created by @harold-b, and Madmax, created by @madMAx43v3r. Both of these plotters bring significant improvements in plotting time. More plotting info [here](https://github.com/Chia-Network/chia-blockchain/wiki/Alternative--Plotters). @@ -174,7 +242,6 @@ This release also includes several important performance improvements as a resul - PlotNFT transactions via CLI (e.g. `chia plotnft join`) now accept a fee parameter, but it is not yet operable. - ## 1.2.10 Chia blockchain 2021-10-25 We have some great improvements in this release: We launched our migration of keys to a common encrypted keyring.yaml file, and we secure this with an optional passphrase in both GUI and CLI. We've added a passphrase hint in case you forget your passphrase. More info on our [wiki](https://github.com/Chia-Network/chia-blockchain/wiki/Passphrase-Protected-Chia-Keys-and-Key-Storage-Migration). We also launched a new Chialisp compiler in clvm_tools_rs which substantially improves compile time for Chialisp developers. We also addressed a widely reported issue in which a system failure, such as a power outage, would require some farmers to sync their full node from zero. This release also includes several other improvements and fixes. diff --git a/build_scripts/assets/deb/__init__.py b/build_scripts/assets/deb/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/build_scripts/assets/deb/control.j2 b/build_scripts/assets/deb/control.j2 new file mode 100644 index 000000000000..29e9d2678059 --- /dev/null +++ b/build_scripts/assets/deb/control.j2 @@ -0,0 +1,6 @@ +Package: chia-blockchain-cli +Version: {{ CHIA_INSTALLER_VERSION }} +Architecture: {{ PLATFORM }} +Maintainer: Chia Network Inc +Description: Chia Blockchain + Chia is a modern cryptocurrency built from scratch, designed to be efficient, decentralized, and secure. diff --git a/build_scripts/build_linux_deb.sh b/build_scripts/build_linux_deb.sh index ea493bafe525..ba74d7b46313 100644 --- a/build_scripts/build_linux_deb.sh +++ b/build_scripts/build_linux_deb.sh @@ -12,6 +12,7 @@ else PLATFORM="$1" DIR_NAME="chia-blockchain-linux-arm64" fi +export PLATFORM # If the env variable NOTARIZE and the username and password variables are # set, this will attempt to Notarize the signed DMG @@ -21,6 +22,7 @@ if [ ! "$CHIA_INSTALLER_VERSION" ]; then CHIA_INSTALLER_VERSION="0.0.0" fi echo "Chia Installer Version is: $CHIA_INSTALLER_VERSION" +export CHIA_INSTALLER_VERSION echo "Installing npm and electron packagers" cd npm_linux_deb || exit @@ -33,7 +35,6 @@ rm -rf dist mkdir dist echo "Create executables with pyinstaller" -pip install pyinstaller==4.9 SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" LAST_EXIT_CODE=$? @@ -42,6 +43,19 @@ if [ "$LAST_EXIT_CODE" -ne 0 ]; then exit $LAST_EXIT_CODE fi +# Builds CLI only .deb +# need j2 for templating the control file +pip install j2cli +CLI_DEB_BASE="chia-blockchain-cli_$CHIA_INSTALLER_VERSION-1_$PLATFORM" +mkdir -p "dist/$CLI_DEB_BASE/opt/chia" +mkdir -p "dist/$CLI_DEB_BASE/usr/bin" +mkdir -p "dist/$CLI_DEB_BASE/DEBIAN" +j2 -o "dist/$CLI_DEB_BASE/DEBIAN/control" assets/deb/control.j2 +cp -r dist/daemon/* "dist/$CLI_DEB_BASE/opt/chia/" +ln -s ../../opt/chia/chia "dist/$CLI_DEB_BASE/usr/bin/chia" +dpkg-deb --build --root-owner-group "dist/$CLI_DEB_BASE" +# CLI only .deb done + cp -r dist/daemon ../chia-blockchain-gui/packages/gui cd .. || exit cd chia-blockchain-gui || exit @@ -92,4 +106,7 @@ if [ "$LAST_EXIT_CODE" -ne 0 ]; then exit $LAST_EXIT_CODE fi +# Move the cli only deb into final installers as well, so it gets uploaded as an artifact +mv "dist/$CLI_DEB_BASE.deb" final_installer/ + ls final_installer/ diff --git a/build_scripts/build_linux_rpm.sh b/build_scripts/build_linux_rpm.sh index 7ec656eeef8e..60829b193737 100644 --- a/build_scripts/build_linux_rpm.sh +++ b/build_scripts/build_linux_rpm.sh @@ -35,7 +35,6 @@ rm -rf dist mkdir dist echo "Create executables with pyinstaller" -pip install pyinstaller==4.9 SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" LAST_EXIT_CODE=$? @@ -44,6 +43,31 @@ if [ "$LAST_EXIT_CODE" -ne 0 ]; then exit $LAST_EXIT_CODE fi +# Builds CLI only rpm +CLI_RPM_BASE="chia-blockchain-cli-$CHIA_INSTALLER_VERSION-1.$REDHAT_PLATFORM" +mkdir -p "dist/$CLI_RPM_BASE/opt/chia" +mkdir -p "dist/$CLI_RPM_BASE/usr/bin" +cp -r dist/daemon/* "dist/$CLI_RPM_BASE/opt/chia/" +ln -s ../../opt/chia/chia "dist/$CLI_RPM_BASE/usr/bin/chia" +# This is built into the base build image +# shellcheck disable=SC1091 +. /etc/profile.d/rvm.sh +rvm use ruby-3 +# /usr/lib64/libcrypt.so.1 is marked as a dependency specifically because newer versions of fedora bundle +# libcrypt.so.2 by default, and the libxcrypt-compat package needs to be installed for the other version +# Marking as a dependency allows yum/dnf to automatically install the libxcrypt-compat package as well +fpm -s dir -t rpm \ + -C "dist/$CLI_RPM_BASE" \ + -p "dist/$CLI_RPM_BASE.rpm" \ + --name chia-blockchain-cli \ + --license Apache-2.0 \ + --version "$CHIA_INSTALLER_VERSION" \ + --architecture "$REDHAT_PLATFORM" \ + --description "Chia is a modern cryptocurrency built from scratch, designed to be efficient, decentralized, and secure." \ + --depends /usr/lib64/libcrypt.so.1 \ + . +# CLI only rpm done + cp -r dist/daemon ../chia-blockchain-gui/packages/gui cd .. || exit cd chia-blockchain-gui || exit @@ -110,4 +134,7 @@ if [ "$REDHAT_PLATFORM" = "x86_64" ]; then fi fi +# Move the cli only rpm into final installers as well, so it gets uploaded as an artifact +mv "dist/$CLI_RPM_BASE.rpm" final_installer/ + ls final_installer/ diff --git a/build_scripts/build_macos.sh b/build_scripts/build_macos.sh index 2cacc4d9fb3c..35aedaef97eb 100644 --- a/build_scripts/build_macos.sh +++ b/build_scripts/build_macos.sh @@ -22,7 +22,6 @@ sudo rm -rf dist mkdir dist echo "Create executables with pyinstaller" -pip install pyinstaller==4.9 SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" LAST_EXIT_CODE=$? diff --git a/build_scripts/build_macos_m1.sh b/build_scripts/build_macos_m1.sh index 45f063373ba9..95582cb9fc24 100644 --- a/build_scripts/build_macos_m1.sh +++ b/build_scripts/build_macos_m1.sh @@ -21,9 +21,6 @@ echo "Create dist/" sudo rm -rf dist mkdir dist -echo "Install pyinstaller and build bootloaders for M1" -pip install pyinstaller==4.9 - echo "Create executables with pyinstaller" SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" diff --git a/build_scripts/installer-version.py b/build_scripts/installer-version.py index d9d71e02e7cf..12e13b0d934e 100644 --- a/build_scripts/installer-version.py +++ b/build_scripts/installer-version.py @@ -1,4 +1,3 @@ -import os import sys from setuptools_scm import get_version @@ -10,7 +9,6 @@ def main(): scm_full_version = get_version(root="..", relative_to=__file__) # scm_full_version = "1.0.5.dev22" - os.environ["SCM_VERSION"] = scm_full_version left_full_version = scm_full_version.split("+") version = left_full_version[0].split(".") diff --git a/chia-blockchain-gui b/chia-blockchain-gui index fccbd3e10d27..81303fb962f4 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit fccbd3e10d27673e39c01f0f89e47b5455b8331a +Subproject commit 81303fb962f4a627a2e1c55098e187a9057745da diff --git a/chia/cmds/cmds_util.py b/chia/cmds/cmds_util.py new file mode 100644 index 000000000000..f2c764bfc9a1 --- /dev/null +++ b/chia/cmds/cmds_util.py @@ -0,0 +1,12 @@ +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.types.mempool_submission_status import MempoolSubmissionStatus +from chia.wallet.transaction_record import TransactionRecord + + +def transaction_submitted_msg(tx: TransactionRecord) -> str: + sent_to = [MempoolSubmissionStatus(s[0], s[1], s[2]).to_json_dict_convenience() for s in tx.sent_to] + return f"Transaction submitted to nodes: {sent_to}" + + +def transaction_status_msg(fingerprint: int, tx_id: bytes32) -> str: + return f"Run 'chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id}' to get status" diff --git a/chia/cmds/keys_funcs.py b/chia/cmds/keys_funcs.py index b153ea95daff..1d9af0580756 100644 --- a/chia/cmds/keys_funcs.py +++ b/chia/cmds/keys_funcs.py @@ -17,7 +17,6 @@ master_sk_to_pool_sk, master_sk_to_wallet_sk, master_sk_to_wallet_sk_unhardened, - master_pk_to_wallet_pk_unhardened, ) @@ -98,13 +97,7 @@ def show_all_keys(show_mnemonic: bool, non_observer_derivation: bool): if non_observer_derivation else master_sk_to_wallet_sk_unhardened(sk, uint32(0)) ) - # Test pk derivation - wallet_root_unhardened = master_pk_to_wallet_pk_unhardened(sk.get_g1(), uint32(0)) wallet_address: str = encode_puzzle_hash(create_puzzlehash_for_pk(first_wallet_sk.get_g1()), prefix) - wallet_address_from_unhard_root: str = encode_puzzle_hash( - create_puzzlehash_for_pk(wallet_root_unhardened), prefix - ) - assert wallet_address == wallet_address_from_unhard_root print(f"First wallet address{' (non-observer)' if non_observer_derivation else ''}: {wallet_address}") assert seed is not None if show_mnemonic: diff --git a/chia/cmds/plotnft_funcs.py b/chia/cmds/plotnft_funcs.py index b70f202a5dd8..e739c49f6bf0 100644 --- a/chia/cmds/plotnft_funcs.py +++ b/chia/cmds/plotnft_funcs.py @@ -26,6 +26,7 @@ from chia.util.config import load_config from chia.util.default_root import DEFAULT_ROOT_PATH from chia.util.ints import uint16, uint32, uint64 +from chia.cmds.cmds_util import transaction_submitted_msg, transaction_status_msg from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.wallet_types import WalletType @@ -100,8 +101,8 @@ async def create(args: dict, wallet_client: WalletRpcClient, fingerprint: int) - await asyncio.sleep(0.1) tx = await wallet_client.get_transaction(str(1), tx_record.name) if len(tx.sent_to) > 0: - print(f"Transaction submitted to nodes: {tx.sent_to}") - print(f"Do chia wallet get_transaction -f {fingerprint} -tx 0x{tx_record.name} to get status") + print(transaction_submitted_msg(tx)) + print(transaction_status_msg(fingerprint, tx_record.name)) return None except Exception as e: print(f"Error creating plot NFT: {e}\n Please start both farmer and wallet with: chia start -r farmer") @@ -286,8 +287,8 @@ async def submit_tx_with_confirmation( await asyncio.sleep(0.1) tx = await wallet_client.get_transaction(str(1), tx_record.name) if len(tx.sent_to) > 0: - print(f"Transaction submitted to nodes: {tx.sent_to}") - print(f"Do chia wallet get_transaction -f {fingerprint} -tx 0x{tx_record.name} to get status") + print(transaction_submitted_msg(tx)) + print(transaction_status_msg(fingerprint, tx_record.name)) return None except Exception as e: print(f"Error performing operation on Plot NFT -f {fingerprint} wallet id: {wallet_id}: {e}") diff --git a/chia/cmds/start_funcs.py b/chia/cmds/start_funcs.py index 82bc4fccc86c..ed36fec90b53 100644 --- a/chia/cmds/start_funcs.py +++ b/chia/cmds/start_funcs.py @@ -62,9 +62,7 @@ async def async_start(root_path: Path, group: str, restart: bool) -> None: if await daemon.is_running(service_name=service): print(f"{service}: ", end="", flush=True) if restart: - if not await daemon.is_running(service_name=service): - print("not running") - elif await daemon.stop_service(service_name=service): + if await daemon.stop_service(service_name=service): print("stopped") else: print("stop failed") diff --git a/chia/cmds/wallet.py b/chia/cmds/wallet.py index b2e93954051c..9486e3947059 100644 --- a/chia/cmds/wallet.py +++ b/chia/cmds/wallet.py @@ -4,6 +4,7 @@ import click from chia.wallet.util.wallet_types import WalletType +from chia.wallet.transaction_sorting import SortKey @click.group("wallet", short_help="Manage your wallet") @@ -65,6 +66,25 @@ def get_transaction_cmd(wallet_rpc_port: Optional[int], fingerprint: int, id: in default=None, help="Prompt for each page of data. Defaults to true for interactive consoles, otherwise false.", ) +@click.option( + "--sort-by-height", + "sort_key", + flag_value=SortKey.CONFIRMED_AT_HEIGHT, + help="Sort transactions by height", +) +@click.option( + "--sort-by-relevance", + "sort_key", + flag_value=SortKey.RELEVANCE, + default=True, + help="Sort transactions by {confirmed, height, time}", +) +@click.option( + "--reverse", + is_flag=True, + default=False, + help="Reverse the transaction ordering", +) def get_transactions_cmd( wallet_rpc_port: Optional[int], fingerprint: int, @@ -73,8 +93,19 @@ def get_transactions_cmd( limit: int, verbose: bool, paginate: Optional[bool], + sort_key: SortKey, + reverse: bool, ) -> None: - extra_params = {"id": id, "verbose": verbose, "offset": offset, "paginate": paginate, "limit": limit} + extra_params = { + "id": id, + "verbose": verbose, + "offset": offset, + "paginate": paginate, + "limit": limit, + "sort_key": sort_key, + "reverse": reverse, + } + import asyncio from .wallet_funcs import execute_with_wallet, get_transactions diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py index 5f5effbadf20..1522d21c87c7 100644 --- a/chia/cmds/wallet_funcs.py +++ b/chia/cmds/wallet_funcs.py @@ -18,15 +18,31 @@ from chia.util.config import load_config from chia.util.default_root import DEFAULT_ROOT_PATH from chia.util.ints import uint16, uint32, uint64 +from chia.cmds.cmds_util import transaction_submitted_msg, transaction_status_msg from chia.wallet.trade_record import TradeRecord from chia.wallet.trading.offer import Offer from chia.wallet.trading.trade_status import TradeStatus from chia.wallet.transaction_record import TransactionRecord +from chia.wallet.util.transaction_type import TransactionType from chia.wallet.util.wallet_types import WalletType CATNameResolver = Callable[[bytes32], Awaitable[Optional[Tuple[Optional[uint32], str]]]] +transaction_type_descriptions = { + TransactionType.INCOMING_TX: "received", + TransactionType.OUTGOING_TX: "sent", + TransactionType.COINBASE_REWARD: "rewarded", + TransactionType.FEE_REWARD: "rewarded", + TransactionType.INCOMING_TRADE: "received in trade", + TransactionType.OUTGOING_TRADE: "sent in trade", +} + + +def transaction_description_from_type(tx: TransactionRecord) -> str: + return transaction_type_descriptions.get(TransactionType(tx.type), "(unknown reason)") + + def print_transaction(tx: TransactionRecord, verbose: bool, name, address_prefix: str, mojo_per_unit: int) -> None: if verbose: print(tx) @@ -35,7 +51,8 @@ def print_transaction(tx: TransactionRecord, verbose: bool, name, address_prefix to_address = encode_puzzle_hash(tx.to_puzzle_hash, address_prefix) print(f"Transaction {tx.name}") print(f"Status: {'Confirmed' if tx.confirmed else ('In mempool' if tx.is_in_mempool() else 'Pending')}") - print(f"Amount {'sent' if tx.sent else 'received'}: {chia_amount} {name}") + description = transaction_description_from_type(tx) + print(f"Amount {description}: {chia_amount} {name}") print(f"To address: {to_address}") print("Created at:", datetime.fromtimestamp(tx.created_at_time).strftime("%Y-%m-%d %H:%M:%S")) print("") @@ -115,9 +132,13 @@ async def get_transactions(args: dict, wallet_client: WalletRpcClient, fingerpri paginate = sys.stdout.isatty() offset = args["offset"] limit = args["limit"] + sort_key = args["sort_key"] + reverse = args["reverse"] + txs: List[TransactionRecord] = await wallet_client.get_transactions( - wallet_id, start=offset, end=(offset + limit), reverse=True + wallet_id, start=offset, end=(offset + limit), sort_key=sort_key, reverse=reverse ) + config = load_config(DEFAULT_ROOT_PATH, "config.yaml", SERVICE_NAME) address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"] if len(txs) == 0: @@ -208,8 +229,8 @@ async def send(args: dict, wallet_client: WalletRpcClient, fingerprint: int) -> await asyncio.sleep(0.1) tx = await wallet_client.get_transaction(str(wallet_id), tx_id) if len(tx.sent_to) > 0: - print(f"Transaction submitted to nodes: {tx.sent_to}") - print(f"Do chia wallet get_transaction -f {fingerprint} -tx 0x{tx_id} to get status") + print(transaction_submitted_msg(tx)) + print(transaction_status_msg(fingerprint, tx_id)) return None print("Transaction not yet submitted to nodes") diff --git a/chia/daemon/client.py b/chia/daemon/client.py index 88677d689084..b4c83c07f398 100644 --- a/chia/daemon/client.py +++ b/chia/daemon/client.py @@ -67,7 +67,7 @@ async def _get(self, request: WsRpcMessage) -> WsRpcMessage: request_id = request["request_id"] self._request_dict[request_id] = asyncio.Event() string = dict_to_json_str(request) - if self.websocket is None: + if self.websocket is None or self.websocket.closed: raise Exception("Websocket is not connected") asyncio.create_task(self.websocket.send_str(string)) diff --git a/chia/daemon/server.py b/chia/daemon/server.py index 014bb2a78259..3079d8ce878f 100644 --- a/chia/daemon/server.py +++ b/chia/daemon/server.py @@ -220,7 +220,7 @@ async def incoming_connection(self, request): while True: msg = await ws.receive() - self.log.debug(f"Received message: {msg}") + self.log.debug("Received message: %s", msg) if msg.type == WSMsgType.TEXT: try: decoded = json.loads(msg.data) diff --git a/chia/farmer/farmer.py b/chia/farmer/farmer.py index f7e50e9ed89e..c2a6a084854e 100644 --- a/chia/farmer/farmer.py +++ b/chia/farmer/farmer.py @@ -3,14 +3,13 @@ import logging import time from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, Callable, Dict, List, Optional, Tuple, Set import traceback import aiohttp from blspy import AugSchemeMPL, G1Element, G2Element, PrivateKey import chia.server.ws_connection as ws # lgtm [py/import-and-import-from] -from chia.consensus.coinbase import create_puzzlehash_for_pk from chia.consensus.constants import ConsensusConstants from chia.daemon.keychain_proxy import ( KeychainProxy, @@ -18,20 +17,20 @@ connect_to_keychain_and_validate, wrap_local_keychain, ) -from chia.plot_sync.receiver import Receiver from chia.plot_sync.delta import Delta -from chia.pools.pool_config import PoolWalletConfig, load_pool_config, add_auth_key +from chia.plot_sync.receiver import Receiver +from chia.pools.pool_config import PoolWalletConfig, add_auth_key, load_pool_config from chia.protocols import farmer_protocol, harvester_protocol from chia.protocols.pool_protocol import ( + AuthenticationPayload, ErrorResponse, - get_current_authentication_token, GetFarmerResponse, PoolErrorCode, PostFarmerPayload, PostFarmerRequest, PutFarmerPayload, PutFarmerRequest, - AuthenticationPayload, + get_current_authentication_token, ) from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.server.outbound_message import NodeType, make_msg @@ -42,16 +41,16 @@ from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.bech32m import decode_puzzle_hash from chia.util.byte_types import hexstr_to_bytes -from chia.util.config import load_config, lock_and_load_config, save_config, config_path_for_filename +from chia.util.config import config_path_for_filename, load_config, lock_and_load_config, save_config from chia.util.hash import std_hash from chia.util.ints import uint8, uint16, uint32, uint64 from chia.util.keychain import Keychain from chia.wallet.derive_keys import ( - master_sk_to_farmer_sk, - master_sk_to_pool_sk, - master_sk_to_wallet_sk, find_authentication_sk, find_owner_sk, + master_sk_to_farmer_sk, + master_sk_to_pool_sk, + match_address_to_sk, ) from chia.wallet.puzzles.singleton_top_layer import SINGLETON_MOD @@ -60,6 +59,7 @@ log = logging.getLogger(__name__) UPDATE_POOL_INFO_INTERVAL: int = 3600 +UPDATE_POOL_INFO_FAILURE_RETRY_INTERVAL: int = 120 UPDATE_POOL_FARMER_INFO_INTERVAL: int = 300 """ @@ -468,6 +468,8 @@ async def update_pool_state(self): # Only update the first time from GET /pool_info, gets updated from GET /farmer later if pool_state["current_difficulty"] is None: pool_state["current_difficulty"] = pool_info["minimum_difficulty"] + else: + pool_state["next_pool_info_update"] = time.time() + UPDATE_POOL_INFO_FAILURE_RETRY_INTERVAL if time.time() >= pool_state["next_farmer_update"]: pool_state["next_farmer_update"] = time.time() + UPDATE_POOL_FARMER_INFO_INTERVAL @@ -500,7 +502,7 @@ async def update_pool_farmer_info() -> Tuple[Optional[GetFarmerResponse], Option farmer_info, error_code = await update_pool_farmer_info() if error_code == PoolErrorCode.FARMER_NOT_KNOWN: # Make the farmer known on the pool with a POST /farmer - owner_sk_and_index: Optional[PrivateKey, uint32] = find_owner_sk( + owner_sk_and_index: Optional[Tuple[G1Element, uint32]] = find_owner_sk( self.all_root_sks, pool_config.owner_public_key ) assert owner_sk_and_index is not None @@ -524,7 +526,7 @@ async def update_pool_farmer_info() -> Tuple[Optional[GetFarmerResponse], Option and pool_config.payout_instructions.lower() != farmer_info.payout_instructions.lower() ) if payout_instructions_update_required or error_code == PoolErrorCode.INVALID_SIGNATURE: - owner_sk_and_index: Optional[PrivateKey, uint32] = find_owner_sk( + owner_sk_and_index: Optional[Tuple[G1Element, uint32]] = find_owner_sk( self.all_root_sks, pool_config.owner_public_key ) assert owner_sk_and_index is not None @@ -547,25 +549,30 @@ def get_public_keys(self): def get_private_keys(self): return self._private_keys - async def get_reward_targets(self, search_for_private_key: bool) -> Dict: + async def get_reward_targets(self, search_for_private_key: bool, max_ph_to_search: int = 500) -> Dict: if search_for_private_key: all_sks = await self.get_all_private_keys() - stop_searching_for_farmer, stop_searching_for_pool = False, False - for i in range(500): - if stop_searching_for_farmer and stop_searching_for_pool and i > 0: + have_farmer_sk, have_pool_sk = False, False + search_addresses: List[bytes32] = [self.farmer_target, self.pool_target] + for sk, _ in all_sks: + found_addresses: Set[bytes32] = match_address_to_sk(sk, search_addresses, max_ph_to_search) + + if not have_farmer_sk and self.farmer_target in found_addresses: + search_addresses.remove(self.farmer_target) + have_farmer_sk = True + + if not have_pool_sk and self.pool_target in found_addresses: + search_addresses.remove(self.pool_target) + have_pool_sk = True + + if have_farmer_sk and have_pool_sk: break - for sk, _ in all_sks: - ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(i)).get_g1()) - if ph == self.farmer_target: - stop_searching_for_farmer = True - if ph == self.pool_target: - stop_searching_for_pool = True return { "farmer_target": self.farmer_target_encoded, "pool_target": self.pool_target_encoded, - "have_farmer_sk": stop_searching_for_farmer, - "have_pool_sk": stop_searching_for_pool, + "have_farmer_sk": have_farmer_sk, + "have_pool_sk": have_pool_sk, } return { "farmer_target": self.farmer_target_encoded, diff --git a/chia/farmer/farmer_api.py b/chia/farmer/farmer_api.py index 9e94a5052334..87ce5525fffb 100644 --- a/chia/farmer/farmer_api.py +++ b/chia/farmer/farmer_api.py @@ -1,6 +1,6 @@ import json import time -from typing import Callable, Optional, List, Any, Dict, Tuple +from typing import Any, Callable, Dict, List, Optional, Tuple import aiohttp from blspy import AugSchemeMPL, G2Element, PrivateKey @@ -12,17 +12,17 @@ from chia.farmer.farmer import Farmer from chia.protocols import farmer_protocol, harvester_protocol from chia.protocols.harvester_protocol import ( - PoolDifficulty, - PlotSyncStart, - PlotSyncPlotList, - PlotSyncPathList, PlotSyncDone, + PlotSyncPathList, + PlotSyncPlotList, + PlotSyncStart, + PoolDifficulty, ) from chia.protocols.pool_protocol import ( - get_current_authentication_token, PoolErrorCode, - PostPartialRequest, PostPartialPayload, + PostPartialRequest, + get_current_authentication_token, ) from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.server.outbound_message import NodeType, make_msg diff --git a/chia/full_node/coin_store.py b/chia/full_node/coin_store.py index 9f24e68e45fe..0013e9df098d 100644 --- a/chia/full_node/coin_store.py +++ b/chia/full_node/coin_store.py @@ -428,6 +428,7 @@ async def rollback_to_block(self, block_index: int) -> List[CoinRecord]: self.coin_record_cache.remove(coin_name) coin_changes: Dict[bytes32, CoinRecord] = {} + # Add coins that are confirmed in the reverted blocks to the list of updated coins. async with self.db_wrapper.write_db() as conn: async with conn.execute( "SELECT confirmed_index, spent_index, coinbase, puzzle_hash, " @@ -439,12 +440,13 @@ async def rollback_to_block(self, block_index: int) -> List[CoinRecord]: record = CoinRecord(coin, uint32(0), row[1], row[2], uint64(0)) coin_changes[record.name] = record - # Delete from storage + # Delete reverted blocks from storage await conn.execute("DELETE FROM coin_record WHERE confirmed_index>?", (block_index,)) + # Add coins that are confirmed in the reverted blocks to the list of changed coins. async with conn.execute( "SELECT confirmed_index, spent_index, coinbase, puzzle_hash, " - "coin_parent, amount, timestamp FROM coin_record WHERE confirmed_index>?", + "coin_parent, amount, timestamp FROM coin_record WHERE spent_index>?", (block_index,), ) as cursor: for row in await cursor.fetchall(): diff --git a/chia/full_node/mempool_check_conditions.py b/chia/full_node/mempool_check_conditions.py index 1457a29f4560..04bae27dd05d 100644 --- a/chia/full_node/mempool_check_conditions.py +++ b/chia/full_node/mempool_check_conditions.py @@ -1,6 +1,6 @@ import logging from typing import Dict, Optional -from chia_rs import MEMPOOL_MODE, COND_CANON_INTS, NO_NEG_DIV, STRICT_ARGS_COUNT +from chia_rs import MEMPOOL_MODE, COND_CANON_INTS, NO_NEG_DIV from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.consensus.cost_calculator import NPCResult @@ -42,8 +42,7 @@ def get_name_puzzle_conditions( assert (MEMPOOL_MODE & NO_NEG_DIV) != 0 if mempool_mode: - # Don't apply the strict args count rule yet - flags = MEMPOOL_MODE & (~STRICT_ARGS_COUNT) + flags = MEMPOOL_MODE elif unwrap(height) >= DEFAULT_CONSTANTS.SOFT_FORK_HEIGHT: # conditions must use integers in canonical encoding (i.e. no redundant # leading zeros) diff --git a/chia/full_node/weight_proof.py b/chia/full_node/weight_proof.py index 7756b8e65a80..451499e9615f 100644 --- a/chia/full_node/weight_proof.py +++ b/chia/full_node/weight_proof.py @@ -99,7 +99,7 @@ def get_sub_epoch_data(self, tip_height: uint32, summary_heights: List[uint32]) if ses_height > tip_height: break ses = self.blockchain.get_ses(ses_height) - log.debug(f"handle sub epoch summary {sub_epoch_n} at height: {ses_height} ses {ses}") + log.debug("handle sub epoch summary %s at height: %s ses %s", sub_epoch_n, ses_height, ses) sub_epoch_data.append(_create_sub_epoch_data(ses)) return sub_epoch_data diff --git a/chia/harvester/harvester.py b/chia/harvester/harvester.py index fab6a77da9ca..ff5ba444f271 100644 --- a/chia/harvester/harvester.py +++ b/chia/harvester/harvester.py @@ -11,13 +11,13 @@ from chia.plot_sync.sender import Sender from chia.plotting.manager import PlotManager from chia.plotting.util import ( + PlotRefreshEvents, + PlotRefreshResult, + PlotsRefreshParameter, add_plot_directory, get_plot_directories, - remove_plot_directory, remove_plot, - PlotsRefreshParameter, - PlotRefreshResult, - PlotRefreshEvents, + remove_plot_directory, ) from chia.util.streamable import dataclass_from_dict diff --git a/chia/harvester/harvester_api.py b/chia/harvester/harvester_api.py index 88528678594d..26197d7d5296 100644 --- a/chia/harvester/harvester_api.py +++ b/chia/harvester/harvester_api.py @@ -3,7 +3,7 @@ from pathlib import Path from typing import Callable, List, Tuple -from blspy import AugSchemeMPL, G2Element, G1Element +from blspy import AugSchemeMPL, G1Element, G2Element from chia.consensus.pot_iterations import calculate_iterations_quality, calculate_sp_interval_iters from chia.harvester.harvester import Harvester diff --git a/chia/plotting/cache.py b/chia/plotting/cache.py new file mode 100644 index 000000000000..58d73a2b9831 --- /dev/null +++ b/chia/plotting/cache.py @@ -0,0 +1,176 @@ +import logging +import time +import traceback +from dataclasses import dataclass, field +from pathlib import Path +from typing import Dict, ItemsView, KeysView, List, Optional, Tuple, ValuesView + +from blspy import G1Element +from chiapos import DiskProver + +from chia.plotting.util import parse_plot_info +from chia.types.blockchain_format.proof_of_space import ProofOfSpace +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.ints import uint16, uint64 +from chia.util.path import mkdir +from chia.util.streamable import Streamable, streamable +from chia.wallet.derive_keys import master_sk_to_local_sk + +log = logging.getLogger(__name__) + +CURRENT_VERSION: int = 1 + + +@streamable +@dataclass(frozen=True) +class DiskCacheEntry(Streamable): + prover_data: bytes + farmer_public_key: G1Element + pool_public_key: Optional[G1Element] + pool_contract_puzzle_hash: Optional[bytes32] + plot_public_key: G1Element + last_use: uint64 + + +@streamable +@dataclass(frozen=True) +class DiskCache(Streamable): + version: uint16 + data: bytes + + +@streamable +@dataclass(frozen=True) +class CacheDataV1(Streamable): + entries: List[Tuple[str, DiskCacheEntry]] + + +@dataclass +class CacheEntry: + prover: DiskProver + farmer_public_key: G1Element + pool_public_key: Optional[G1Element] + pool_contract_puzzle_hash: Optional[bytes32] + plot_public_key: G1Element + last_use: float + + @classmethod + def from_disk_prover(cls, prover: DiskProver) -> "CacheEntry": + ( + pool_public_key_or_puzzle_hash, + farmer_public_key, + local_master_sk, + ) = parse_plot_info(prover.get_memo()) + + pool_public_key: Optional[G1Element] = None + pool_contract_puzzle_hash: Optional[bytes32] = None + if isinstance(pool_public_key_or_puzzle_hash, G1Element): + pool_public_key = pool_public_key_or_puzzle_hash + else: + assert isinstance(pool_public_key_or_puzzle_hash, bytes32) + pool_contract_puzzle_hash = pool_public_key_or_puzzle_hash + + local_sk = master_sk_to_local_sk(local_master_sk) + + plot_public_key: G1Element = ProofOfSpace.generate_plot_public_key( + local_sk.get_g1(), farmer_public_key, pool_contract_puzzle_hash is not None + ) + + return cls(prover, farmer_public_key, pool_public_key, pool_contract_puzzle_hash, plot_public_key, time.time()) + + def bump_last_use(self) -> None: + self.last_use = time.time() + + def expired(self, expiry_seconds: int) -> bool: + return time.time() - self.last_use > expiry_seconds + + +@dataclass +class Cache: + _path: Path + _changed: bool = False + _data: Dict[Path, CacheEntry] = field(default_factory=dict) + expiry_seconds: int = 7 * 24 * 60 * 60 # Keep the cache entries alive for 7 days after its last access + + def __post_init__(self) -> None: + mkdir(self._path.parent) + + def __len__(self) -> int: + return len(self._data) + + def update(self, path: Path, entry: CacheEntry) -> None: + self._data[path] = entry + self._changed = True + + def remove(self, cache_keys: List[Path]) -> None: + for key in cache_keys: + if key in self._data: + del self._data[key] + self._changed = True + + def save(self) -> None: + try: + disk_cache_entries: Dict[str, DiskCacheEntry] = { + str(path): DiskCacheEntry( + bytes(cache_entry.prover), + cache_entry.farmer_public_key, + cache_entry.pool_public_key, + cache_entry.pool_contract_puzzle_hash, + cache_entry.plot_public_key, + uint64(int(cache_entry.last_use)), + ) + for path, cache_entry in self.items() + } + cache_data: CacheDataV1 = CacheDataV1( + [(plot_id, cache_entry) for plot_id, cache_entry in disk_cache_entries.items()] + ) + disk_cache: DiskCache = DiskCache(uint16(CURRENT_VERSION), bytes(cache_data)) + serialized: bytes = bytes(disk_cache) + self._path.write_bytes(serialized) + self._changed = False + log.info(f"Saved {len(serialized)} bytes of cached data") + except Exception as e: + log.error(f"Failed to save cache: {e}, {traceback.format_exc()}") + + def load(self) -> None: + try: + serialized = self._path.read_bytes() + log.info(f"Loaded {len(serialized)} bytes of cached data") + stored_cache: DiskCache = DiskCache.from_bytes(serialized) + if stored_cache.version == CURRENT_VERSION: + cache_data: CacheDataV1 = CacheDataV1.from_bytes(stored_cache.data) + self._data = { + Path(path): CacheEntry( + DiskProver.from_bytes(cache_entry.prover_data), + cache_entry.farmer_public_key, + cache_entry.pool_public_key, + cache_entry.pool_contract_puzzle_hash, + cache_entry.plot_public_key, + float(cache_entry.last_use), + ) + for path, cache_entry in cache_data.entries + } + else: + raise ValueError(f"Invalid cache version {stored_cache.version}. Expected version {CURRENT_VERSION}.") + except FileNotFoundError: + log.debug(f"Cache {self._path} not found") + except Exception as e: + log.error(f"Failed to load cache: {e}, {traceback.format_exc()}") + + def keys(self) -> KeysView[Path]: + return self._data.keys() + + def values(self) -> ValuesView[CacheEntry]: + return self._data.values() + + def items(self) -> ItemsView[Path, CacheEntry]: + return self._data.items() + + def get(self, path: Path) -> Optional[CacheEntry]: + return self._data.get(path) + + def changed(self) -> bool: + return self._changed + + def path(self) -> Path: + return self._path diff --git a/chia/plotting/manager.py b/chia/plotting/manager.py index aa309795e95a..72a66f629d8f 100644 --- a/chia/plotting/manager.py +++ b/chia/plotting/manager.py @@ -1,4 +1,3 @@ -from dataclasses import dataclass import logging import threading import time @@ -11,107 +10,18 @@ from chiapos import DiskProver from chia.consensus.pos_quality import UI_ACTUAL_SPACE_CONSTANT_FACTOR, _expected_plot_size +from chia.plotting.cache import Cache, CacheEntry from chia.plotting.util import ( PlotInfo, PlotRefreshResult, PlotsRefreshParameter, PlotRefreshEvents, get_plot_filenames, - parse_plot_info, ) from chia.util.generator_tools import list_to_batches -from chia.util.ints import uint16 -from chia.util.path import mkdir -from chia.util.streamable import Streamable, streamable -from chia.types.blockchain_format.proof_of_space import ProofOfSpace -from chia.types.blockchain_format.sized_bytes import bytes32 -from chia.wallet.derive_keys import master_sk_to_local_sk log = logging.getLogger(__name__) -CURRENT_VERSION: uint16 = uint16(0) - - -@streamable -@dataclass(frozen=True) -class CacheEntry(Streamable): - pool_public_key: Optional[G1Element] - pool_contract_puzzle_hash: Optional[bytes32] - plot_public_key: G1Element - - -@streamable -@dataclass(frozen=True) -class DiskCache(Streamable): - version: uint16 - data: List[Tuple[bytes32, CacheEntry]] - - -class Cache: - _changed: bool - _data: Dict[bytes32, CacheEntry] - - def __init__(self, path: Path): - self._changed = False - self._data = {} - self._path = path - if not path.parent.exists(): - mkdir(path.parent) - - def __len__(self): - return len(self._data) - - def update(self, plot_id: bytes32, entry: CacheEntry): - self._data[plot_id] = entry - self._changed = True - - def remove(self, cache_keys: List[bytes32]): - for key in cache_keys: - if key in self._data: - del self._data[key] - self._changed = True - - def save(self): - try: - disk_cache: DiskCache = DiskCache( - CURRENT_VERSION, [(plot_id, cache_entry) for plot_id, cache_entry in self.items()] - ) - serialized: bytes = bytes(disk_cache) - self._path.write_bytes(serialized) - self._changed = False - log.info(f"Saved {len(serialized)} bytes of cached data") - except Exception as e: - log.error(f"Failed to save cache: {e}, {traceback.format_exc()}") - - def load(self): - try: - serialized = self._path.read_bytes() - log.info(f"Loaded {len(serialized)} bytes of cached data") - stored_cache: DiskCache = DiskCache.from_bytes(serialized) - if stored_cache.version != CURRENT_VERSION: - # TODO, Migrate or drop current cache if the version changes. - raise ValueError(f"Invalid cache version {stored_cache.version}. Expected version {CURRENT_VERSION}.") - self._data = {plot_id: cache_entry for plot_id, cache_entry in stored_cache.data} - except FileNotFoundError: - log.debug(f"Cache {self._path} not found") - except Exception as e: - log.error(f"Failed to load cache: {e}, {traceback.format_exc()}") - - def keys(self): - return self._data.keys() - - def items(self): - return self._data.items() - - def get(self, plot_id): - return self._data.get(plot_id) - - def changed(self): - return self._changed - - def path(self): - return self._path - class PlotManager: plots: Dict[Path, PlotInfo] @@ -232,9 +142,9 @@ def _refresh_task(self, sleep_interval_ms: int): plot_filenames: Dict[Path, List[Path]] = get_plot_filenames(self.root_path) plot_directories: Set[Path] = set(plot_filenames.keys()) - plot_paths: List[Path] = [] + plot_paths: Set[Path] = set() for paths in plot_filenames.values(): - plot_paths += paths + plot_paths.update(paths) total_result: PlotRefreshResult = PlotRefreshResult() total_size = len(plot_paths) @@ -274,7 +184,7 @@ def _refresh_task(self, sleep_interval_ms: int): for filename in filenames_to_remove: del self.plot_filename_paths[filename] - for remaining, batch in list_to_batches(plot_paths, self.refresh_parameter.batch_size): + for remaining, batch in list_to_batches(list(plot_paths), self.refresh_parameter.batch_size): batch_result: PlotRefreshResult = self.refresh_batch(batch, plot_directories) if not self._refreshing_enabled: self.log.debug("refresh_plots: Aborted") @@ -299,10 +209,15 @@ def _refresh_task(self, sleep_interval_ms: int): self._initial = False # Cleanup unused cache - available_ids = set([plot_info.prover.get_id() for plot_info in self.plots.values()]) - invalid_cache_keys = [plot_id for plot_id in self.cache.keys() if plot_id not in available_ids] - self.cache.remove(invalid_cache_keys) - self.log.debug(f"_refresh_task: cached entries removed: {len(invalid_cache_keys)}") + self.log.debug(f"_refresh_task: cached entries before cleanup: {len(self.cache)}") + remove_paths: List[Path] = [] + for path, cache_entry in self.cache.items(): + if cache_entry.expired(Cache.expiry_seconds) and path not in self.plots: + remove_paths.append(path) + elif path in self.plots: + cache_entry.bump_last_use() + self.cache.remove(remove_paths) + self.log.debug(f"_refresh_task: cached entries removed: {len(remove_paths)}") if self.cache.changed(): self.cache.save() @@ -355,78 +270,61 @@ def process_file(file_path: Path) -> Optional[PlotInfo]: if not file_path.exists(): return None - prover = DiskProver(str(file_path)) - - log.debug(f"process_file {str(file_path)}") - - expected_size = _expected_plot_size(prover.get_size()) * UI_ACTUAL_SPACE_CONSTANT_FACTOR stat_info = file_path.stat() - # TODO: consider checking if the file was just written to (which would mean that the file is still - # being copied). A segfault might happen in this edge case. + cache_entry = self.cache.get(file_path) + cache_hit = cache_entry is not None + if not cache_hit: + prover = DiskProver(str(file_path)) - if prover.get_size() >= 30 and stat_info.st_size < 0.98 * expected_size: - log.warning( - f"Not farming plot {file_path}. Size is {stat_info.st_size / (1024**3)} GiB, but expected" - f" at least: {expected_size / (1024 ** 3)} GiB. We assume the file is being copied." - ) - return None + log.debug(f"process_file {str(file_path)}") - cache_entry = self.cache.get(prover.get_id()) - if cache_entry is None: - ( - pool_public_key_or_puzzle_hash, - farmer_public_key, - local_master_sk, - ) = parse_plot_info(prover.get_memo()) - - # Only use plots that correct keys associated with them - if farmer_public_key not in self.farmer_public_keys: - log.warning(f"Plot {file_path} has a farmer public key that is not in the farmer's pk list.") - self.no_key_filenames.add(file_path) - if not self.open_no_key_filenames: - return None - - pool_public_key: Optional[G1Element] = None - pool_contract_puzzle_hash: Optional[bytes32] = None - if isinstance(pool_public_key_or_puzzle_hash, G1Element): - pool_public_key = pool_public_key_or_puzzle_hash - else: - assert isinstance(pool_public_key_or_puzzle_hash, bytes32) - pool_contract_puzzle_hash = pool_public_key_or_puzzle_hash + expected_size = _expected_plot_size(prover.get_size()) * UI_ACTUAL_SPACE_CONSTANT_FACTOR - if pool_public_key is not None and pool_public_key not in self.pool_public_keys: - log.warning(f"Plot {file_path} has a pool public key that is not in the farmer's pool pk list.") - self.no_key_filenames.add(file_path) - if not self.open_no_key_filenames: - return None + # TODO: consider checking if the file was just written to (which would mean that the file is still + # being copied). A segfault might happen in this edge case. - # If a plot is in `no_key_filenames` the keys were missing in earlier refresh cycles. We can remove - # the current plot from that list if its in there since we passed the key checks above. - if file_path in self.no_key_filenames: - self.no_key_filenames.remove(file_path) + if prover.get_size() >= 30 and stat_info.st_size < 0.98 * expected_size: + log.warning( + f"Not farming plot {file_path}. Size is {stat_info.st_size / (1024 ** 3)} GiB, but expected" + f" at least: {expected_size / (1024 ** 3)} GiB. We assume the file is being copied." + ) + return None - local_sk = master_sk_to_local_sk(local_master_sk) + cache_entry = CacheEntry.from_disk_prover(prover) + self.cache.update(file_path, cache_entry) - plot_public_key: G1Element = ProofOfSpace.generate_plot_public_key( - local_sk.get_g1(), farmer_public_key, pool_contract_puzzle_hash is not None - ) + assert cache_entry is not None + # Only use plots that correct keys associated with them + if cache_entry.farmer_public_key not in self.farmer_public_keys: + log.warning(f"Plot {file_path} has a farmer public key that is not in the farmer's pk list.") + self.no_key_filenames.add(file_path) + if not self.open_no_key_filenames: + return None + + if cache_entry.pool_public_key is not None and cache_entry.pool_public_key not in self.pool_public_keys: + log.warning(f"Plot {file_path} has a pool public key that is not in the farmer's pool pk list.") + self.no_key_filenames.add(file_path) + if not self.open_no_key_filenames: + return None - cache_entry = CacheEntry(pool_public_key, pool_contract_puzzle_hash, plot_public_key) - self.cache.update(prover.get_id(), cache_entry) + # If a plot is in `no_key_filenames` the keys were missing in earlier refresh cycles. We can remove + # the current plot from that list if its in there since we passed the key checks above. + if file_path in self.no_key_filenames: + self.no_key_filenames.remove(file_path) with self.plot_filename_paths_lock: paths: Optional[Tuple[str, Set[str]]] = self.plot_filename_paths.get(file_path.name) if paths is None: - paths = (str(Path(prover.get_filename()).parent), set()) + paths = (str(Path(cache_entry.prover.get_filename()).parent), set()) self.plot_filename_paths[file_path.name] = paths else: - paths[1].add(str(Path(prover.get_filename()).parent)) + paths[1].add(str(Path(cache_entry.prover.get_filename()).parent)) log.warning(f"Have multiple copies of the plot {file_path.name} in {[paths[0], *paths[1]]}.") return None new_plot_info: PlotInfo = PlotInfo( - prover, + cache_entry.prover, cache_entry.pool_public_key, cache_entry.pool_contract_puzzle_hash, cache_entry.plot_public_key, @@ -434,6 +332,8 @@ def process_file(file_path: Path) -> Optional[PlotInfo]: stat_info.st_mtime, ) + cache_entry.bump_last_use() + with counter_lock: result.loaded.append(new_plot_info) @@ -445,7 +345,8 @@ def process_file(file_path: Path) -> Optional[PlotInfo]: log.error(f"Failed to open file {file_path}. {e} {tb}") self.failed_to_open_filenames[file_path] = int(time.time()) return None - log.info(f"Found plot {file_path} of size {new_plot_info.prover.get_size()}") + log.info(f"Found plot {file_path} of size {new_plot_info.prover.get_size()}, cache_hit: {cache_hit}") + return new_plot_info with self, ThreadPoolExecutor() as executor: diff --git a/chia/protocols/harvester_protocol.py b/chia/protocols/harvester_protocol.py index d3323a48a0a9..bc4f5da8a4e3 100644 --- a/chia/protocols/harvester_protocol.py +++ b/chia/protocols/harvester_protocol.py @@ -1,5 +1,5 @@ from dataclasses import dataclass -from typing import List, Tuple, Optional +from typing import List, Optional, Tuple from blspy import G1Element, G2Element diff --git a/chia/pyinstaller.spec b/chia/pyinstaller.spec index 08769306086e..6d81c943a30d 100644 --- a/chia/pyinstaller.spec +++ b/chia/pyinstaller.spec @@ -189,6 +189,9 @@ add_binary("daemon", f"{ROOT}/chia/daemon/server.py", COLLECT_ARGS) for server in SERVERS: add_binary(f"start_{server}", f"{ROOT}/chia/server/start_{server}.py", COLLECT_ARGS) +add_binary("start_crawler", f"{ROOT}/chia/seeder/start_crawler.py", COLLECT_ARGS) +add_binary("start_seeder", f"{ROOT}/chia/seeder/dns_server.py", COLLECT_ARGS) + COLLECT_KWARGS = dict( strip=False, upx_exclude=[], diff --git a/chia/rpc/farmer_rpc_api.py b/chia/rpc/farmer_rpc_api.py index 396a3c47629c..0c0a7b6e11b2 100644 --- a/chia/rpc/farmer_rpc_api.py +++ b/chia/rpc/farmer_rpc_api.py @@ -97,7 +97,8 @@ async def get_signage_points(self, _: Dict) -> Dict[str, Any]: async def get_reward_targets(self, request: Dict) -> Dict: search_for_private_key = request["search_for_private_key"] - return await self.service.get_reward_targets(search_for_private_key) + max_ph_to_search = request.get("max_ph_to_search", 500) + return await self.service.get_reward_targets(search_for_private_key, max_ph_to_search) async def set_reward_targets(self, request: Dict) -> Dict: farmer_target, pool_target = None, None diff --git a/chia/rpc/farmer_rpc_client.py b/chia/rpc/farmer_rpc_client.py index 612f42e9d093..e746614eef62 100644 --- a/chia/rpc/farmer_rpc_client.py +++ b/chia/rpc/farmer_rpc_client.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional, Any +from typing import Any, Dict, List, Optional from chia.rpc.rpc_client import RpcClient from chia.types.blockchain_format.sized_bytes import bytes32 @@ -22,8 +22,11 @@ async def get_signage_point(self, sp_hash: bytes32) -> Optional[Dict]: async def get_signage_points(self) -> List[Dict]: return (await self.fetch("get_signage_points", {}))["signage_points"] - async def get_reward_targets(self, search_for_private_key: bool) -> Dict: - response = await self.fetch("get_reward_targets", {"search_for_private_key": search_for_private_key}) + async def get_reward_targets(self, search_for_private_key: bool, max_ph_to_search: int = 500) -> Dict: + response = await self.fetch( + "get_reward_targets", + {"search_for_private_key": search_for_private_key, "max_ph_to_search": max_ph_to_search}, + ) return_dict = { "farmer_target": response["farmer_target"], "pool_target": response["pool_target"], diff --git a/chia/rpc/full_node_rpc_api.py b/chia/rpc/full_node_rpc_api.py index 04e84a9bf5d8..807bba18755e 100644 --- a/chia/rpc/full_node_rpc_api.py +++ b/chia/rpc/full_node_rpc_api.py @@ -470,10 +470,10 @@ async def get_network_space(self, request: Dict) -> Optional[Dict]: newer_block = await self.service.block_store.get_block_record(newer_block_bytes) if newer_block is None: - raise ValueError("Newer block not found") + raise ValueError(f"Newer block {newer_block_hex} not found") older_block = await self.service.block_store.get_block_record(older_block_bytes) if older_block is None: - raise ValueError("Newer block not found") + raise ValueError(f"Older block {older_block_hex} not found") delta_weight = newer_block.weight - older_block.weight delta_iters = newer_block.total_iters - older_block.total_iters diff --git a/chia/rpc/rpc_server.py b/chia/rpc/rpc_server.py index 4abf16969275..9fecdf57526b 100644 --- a/chia/rpc/rpc_server.py +++ b/chia/rpc/rpc_server.py @@ -9,7 +9,7 @@ from chia.rpc.util import wrap_http_handler from chia.server.outbound_message import NodeType -from chia.server.server import ssl_context_for_server +from chia.server.server import ssl_context_for_client, ssl_context_for_server from chia.types.peer_info import PeerInfo from chia.util.byte_types import hexstr_to_bytes from chia.util.ints import uint16 @@ -42,6 +42,9 @@ def __init__(self, rpc_api: Any, service_name: str, stop_cb: Callable, root_path self.ssl_context = ssl_context_for_server( self.ca_cert_path, self.ca_key_path, self.crt_path, self.key_path, log=self.log ) + self.ssl_client_context = ssl_context_for_client( + self.ca_cert_path, self.ca_key_path, self.crt_path, self.key_path, log=self.log + ) async def stop(self): self.shut_down = True @@ -51,7 +54,7 @@ async def stop(self): await self.client_session.close() async def _state_changed(self, *args): - if self.websocket is None: + if self.websocket is None or self.websocket.closed: return None payloads: List[Dict] = await self.rpc_api._state_changed(*args) @@ -70,7 +73,7 @@ async def _state_changed(self, *args): for payload in payloads: if "success" not in payload["data"]: payload["data"]["success"] = True - if self.websocket is None: + if self.websocket is None or self.websocket.closed: return None try: await self.websocket.send_str(dict_to_json_str(payload)) @@ -79,7 +82,7 @@ async def _state_changed(self, *args): self.log.warning(f"Sending data failed. Exception {tb}.") def state_changed(self, *args): - if self.websocket is None: + if self.websocket is None or self.websocket.closed: return None asyncio.create_task(self._state_changed(*args)) @@ -278,7 +281,7 @@ async def connect_to_daemon(self, self_hostname: str, daemon_port: uint16): autoclose=True, autoping=True, heartbeat=60, - ssl_context=self.ssl_context, + ssl_context=self.ssl_client_context, max_msg_size=max_message_size, ) await self.connection(self.websocket) diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py index d4e7820411c3..487b2078a89f 100644 --- a/chia/rpc/wallet_rpc_api.py +++ b/chia/rpc/wallet_rpc_api.py @@ -2,13 +2,13 @@ import dataclasses import logging from pathlib import Path -from typing import Callable, Dict, List, Optional, Tuple, Set, Any +from typing import Any, Callable, Dict, List, Optional, Set, Tuple -from blspy import PrivateKey, G1Element +from blspy import G1Element, PrivateKey from chia.consensus.block_rewards import calculate_base_farmer_reward from chia.pools.pool_wallet import PoolWallet -from chia.pools.pool_wallet_info import create_pool_state, FARMING_TO_POOL, PoolWalletInfo, PoolState +from chia.pools.pool_wallet_info import FARMING_TO_POOL, PoolState, PoolWalletInfo, create_pool_state from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.server.outbound_message import NodeType, make_msg from chia.simulator.simulator_protocol import FarmNewBlockProtocol @@ -18,16 +18,22 @@ from chia.types.spend_bundle import SpendBundle from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash from chia.util.byte_types import hexstr_to_bytes -from chia.util.ints import uint32, uint64, uint8 +from chia.util.config import load_config +from chia.util.ints import uint8, uint32, uint64 from chia.util.keychain import KeyringIsLocked, bytes_to_mnemonic, generate_mnemonic from chia.util.path import path_from_root from chia.util.ws_message import WsRpcMessage, create_payload_dict from chia.wallet.cat_wallet.cat_constants import DEFAULT_CATS from chia.wallet.cat_wallet.cat_wallet import CATWallet -from chia.wallet.derive_keys import master_sk_to_singleton_owner_sk, master_sk_to_wallet_sk_unhardened, MAX_POOL_WALLETS -from chia.wallet.rl_wallet.rl_wallet import RLWallet -from chia.wallet.derive_keys import master_sk_to_farmer_sk, master_sk_to_pool_sk, master_sk_to_wallet_sk +from chia.wallet.derive_keys import ( + MAX_POOL_WALLETS, + master_sk_to_farmer_sk, + master_sk_to_pool_sk, + master_sk_to_singleton_owner_sk, + match_address_to_sk, +) from chia.wallet.did_wallet.did_wallet import DIDWallet +from chia.wallet.rl_wallet.rl_wallet import RLWallet from chia.wallet.trade_record import TradeRecord from chia.wallet.trading.offer import Offer from chia.wallet.transaction_record import TransactionRecord @@ -35,8 +41,6 @@ from chia.wallet.util.wallet_types import AmountWithPuzzlehash, WalletType from chia.wallet.wallet_info import WalletInfo from chia.wallet.wallet_node import WalletNode -from chia.util.config import load_config -from chia.consensus.coinbase import create_puzzlehash_for_pk # Timeout for response from wallet/full node for sending a transaction TIMEOUT = 30 @@ -302,25 +306,12 @@ async def _check_key_used_for_rewards( config: Dict = load_config(new_root, "config.yaml") farmer_target = config["farmer"].get("xch_target_address") pool_target = config["pool"].get("xch_target_address") - found_farmer = False - found_pool = False - selected = config["selected_network"] - prefix = config["network_overrides"]["config"][selected]["address_prefix"] - for i in range(max_ph_to_search): - if found_farmer and found_pool: - break - - phs = [ - encode_puzzle_hash(create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(i)).get_g1()), prefix), - encode_puzzle_hash( - create_puzzlehash_for_pk(master_sk_to_wallet_sk_unhardened(sk, uint32(i)).get_g1()), prefix - ), - ] - for ph in phs: - if ph == farmer_target: - found_farmer = True - if ph == pool_target: - found_pool = True + address_to_check: List[bytes32] = [decode_puzzle_hash(farmer_target), decode_puzzle_hash(pool_target)] + + found_addresses: Set[bytes32] = match_address_to_sk(sk, address_to_check, max_ph_to_search) + + found_farmer = address_to_check[0] in found_addresses + found_pool = address_to_check[1] in found_addresses return found_farmer, found_pool @@ -334,9 +325,12 @@ async def check_delete_key(self, request): walletBalance: bool = False fingerprint = request["fingerprint"] + max_ph_to_search = request.get("max_ph_to_search", 100) sk, _ = await self._get_private_key(fingerprint) if sk is not None: - used_for_farmer, used_for_pool = await self._check_key_used_for_rewards(self.service.root_path, sk, 100) + used_for_farmer, used_for_pool = await self._check_key_used_for_rewards( + self.service.root_path, sk, max_ph_to_search + ) if self.service.logged_in_fingerprint != fingerprint: await self._stop_wallet() diff --git a/chia/rpc/wallet_rpc_client.py b/chia/rpc/wallet_rpc_client.py index 135fb9f37ef0..8ece41978c45 100644 --- a/chia/rpc/wallet_rpc_client.py +++ b/chia/rpc/wallet_rpc_client.py @@ -58,8 +58,8 @@ async def add_key(self, mnemonic: List[str], request_type: str = "new_wallet") - async def delete_key(self, fingerprint: int) -> None: return await self.fetch("delete_key", {"fingerprint": fingerprint}) - async def check_delete_key(self, fingerprint: int) -> None: - return await self.fetch("check_delete_key", {"fingerprint": fingerprint}) + async def check_delete_key(self, fingerprint: int, max_ph_to_search: int = 100) -> None: + return await self.fetch("check_delete_key", {"fingerprint": fingerprint, "max_ph_to_search": max_ph_to_search}) async def delete_all_keys(self) -> None: return await self.fetch("delete_all_keys", {}) diff --git a/chia/server/server.py b/chia/server/server.py index 9edac98fc118..896906ce1b7a 100644 --- a/chia/server/server.py +++ b/chia/server/server.py @@ -48,7 +48,7 @@ def ssl_context_for_server( if check_permissions: verify_ssl_certs_and_keys([ca_cert, private_cert_path], [ca_key, private_key_path], log) - ssl_context = ssl._create_unverified_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=str(ca_cert)) + ssl_context = ssl._create_unverified_context(purpose=ssl.Purpose.CLIENT_AUTH, cafile=str(ca_cert)) ssl_context.check_hostname = False ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2 ssl_context.set_ciphers( diff --git a/chia/timelord/timelord.py b/chia/timelord/timelord.py index 456b6641d15a..732d90f85c57 100644 --- a/chia/timelord/timelord.py +++ b/chia/timelord/timelord.py @@ -183,25 +183,22 @@ async def _handle_client(self, reader: asyncio.StreamReader, writer: asyncio.Str async def _stop_chain(self, chain: Chain): try: - while chain not in self.allows_iters: - self.lock.release() - await asyncio.sleep(0.05) - log.error(f"Trying to stop {chain} before its initialization.") - await self.lock.acquire() - if chain not in self.chain_type_to_stream: - log.warning(f"Trying to stop a crashed chain: {chain}.") - return None - stop_ip, _, stop_writer = self.chain_type_to_stream[chain] - stop_writer.write(b"010") - await stop_writer.drain() + _, _, stop_writer = self.chain_type_to_stream[chain] if chain in self.allows_iters: + stop_writer.write(b"010") + await stop_writer.drain() self.allows_iters.remove(chain) + else: + log.error(f"Trying to stop {chain} before its initialization.") + stop_writer.close() + await stop_writer.wait_closed() if chain not in self.unspawned_chains: self.unspawned_chains.append(chain) - if chain in self.chain_type_to_stream: - del self.chain_type_to_stream[chain] + del self.chain_type_to_stream[chain] except ConnectionResetError as e: log.error(f"{e}") + except Exception as e: + log.error(f"Exception in stop chain: {type(e)} {e}") def _can_infuse_unfinished_block(self, block: timelord_protocol.NewUnfinishedBlockTimelord) -> Optional[uint64]: assert self.last_state is not None diff --git a/chia/types/blockchain_format/program.py b/chia/types/blockchain_format/program.py index 096f72d25bdf..784697a5c2e3 100644 --- a/chia/types/blockchain_format/program.py +++ b/chia/types/blockchain_format/program.py @@ -1,12 +1,12 @@ import io -from typing import List, Set, Tuple, Optional +from typing import List, Set, Tuple, Optional, Any from clvm import SExp from clvm.casts import int_from_bytes from clvm.EvalError import EvalError from clvm.serialize import sexp_from_stream, sexp_to_stream from chia_rs import MEMPOOL_MODE, run_chia_program, serialized_length, run_generator -from clvm_tools.curry import curry, uncurry +from clvm_tools.curry import uncurry from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.hash import std_hash @@ -88,9 +88,27 @@ def run(self, args) -> "Program": cost, r = self.run_with_cost(INFINITE_COST, args) return r + # Replicates the curry function from clvm_tools, taking advantage of *args + # being a list. We iterate through args in reverse building the code to + # create a clvm list. + # + # Given arguments to a function addressable by the '1' reference in clvm + # + # fixed_args = 1 + # + # Each arg is prepended as fixed_args = (c (q . arg) fixed_args) + # + # The resulting argument list is interpreted with apply (2) + # + # (2 (1 . self) rest) + # + # Resulting in a function which places its own arguments after those + # curried in in the form of a proper list. def curry(self, *args) -> "Program": - cost, r = curry(self, list(args)) - return Program.to(r) + fixed_args: Any = 1 + for arg in reversed(args): + fixed_args = [4, (1, arg), fixed_args] + return Program.to([2, (1, self), fixed_args]) def uncurry(self) -> Tuple["Program", "Program"]: r = uncurry(self) diff --git a/chia/types/mempool_submission_status.py b/chia/types/mempool_submission_status.py new file mode 100644 index 000000000000..9ba1d5c70cca --- /dev/null +++ b/chia/types/mempool_submission_status.py @@ -0,0 +1,28 @@ +from dataclasses import dataclass +from typing import Dict, Optional, Union + +from chia.types.mempool_inclusion_status import MempoolInclusionStatus +from chia.util.ints import uint8 +from chia.util.streamable import Streamable, streamable + + +@streamable +@dataclass(frozen=True) +class MempoolSubmissionStatus(Streamable): + """ + :sent_to: in `TradeRecord` and `TransactionRecord` are a + Tuple of (peer_id: str, status: MempoolInclusionStatus, error: Optional[str]) + MempoolInclusionStatus is represented as a uint8 in those structs so they can be `Streamable` + """ + + peer_id: str + inclusion_status: uint8 # MempoolInclusionStatus + error_msg: Optional[str] + + def to_json_dict_convenience(self) -> Dict[str, Union[str, MempoolInclusionStatus, Optional[str]]]: + formatted = self.to_json_dict() + formatted["inclusion_status"] = MempoolInclusionStatus(self.inclusion_status).name + return formatted + + def __str__(self) -> str: + return f"{self.to_json_dict_convenience()}" diff --git a/chia/util/chia_logging.py b/chia/util/chia_logging.py index 2bb62de970c8..4f033557f5bc 100644 --- a/chia/util/chia_logging.py +++ b/chia/util/chia_logging.py @@ -61,7 +61,6 @@ def initialize_logging(service_name: str, logging_config: Dict, root_path: Path) elif logging_config["log_level"] == "DEBUG": logger.setLevel(logging.DEBUG) logging.getLogger("aiosqlite").setLevel(logging.INFO) # Too much logging on debug level - logging.getLogger("websockets").setLevel(logging.INFO) # Too much logging on debug level else: logger.setLevel(logging.INFO) else: diff --git a/chia/util/streamable.py b/chia/util/streamable.py index cf545fd4833d..fd5fd468e337 100644 --- a/chia/util/streamable.py +++ b/chia/util/streamable.py @@ -5,7 +5,21 @@ import pprint import sys from enum import Enum -from typing import Any, BinaryIO, Dict, get_type_hints, List, Tuple, Type, TypeVar, Union, Callable, Optional, Iterator +from typing import ( + Any, + BinaryIO, + Callable, + Dict, + Iterator, + List, + Optional, + Tuple, + Type, + TypeVar, + Union, + get_type_hints, + overload, +) from blspy import G1Element, G2Element, PrivateKey from typing_extensions import Literal @@ -58,29 +72,32 @@ class DefinitionError(StreamableError): _T_Streamable = TypeVar("_T_Streamable", bound="Streamable") +ParseFunctionType = Callable[[BinaryIO], object] +StreamFunctionType = Callable[[object, BinaryIO], None] + # Caches to store the fields and (de)serialization methods for all available streamable classes. -FIELDS_FOR_STREAMABLE_CLASS = {} -STREAM_FUNCTIONS_FOR_STREAMABLE_CLASS = {} -PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS = {} +FIELDS_FOR_STREAMABLE_CLASS: Dict[Type[object], Dict[str, Type[object]]] = {} +STREAM_FUNCTIONS_FOR_STREAMABLE_CLASS: Dict[Type[object], List[StreamFunctionType]] = {} +PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS: Dict[Type[object], List[ParseFunctionType]] = {} -def is_type_List(f_type: Type) -> bool: +def is_type_List(f_type: object) -> bool: return get_origin(f_type) == list or f_type == list -def is_type_SpecificOptional(f_type) -> bool: +def is_type_SpecificOptional(f_type: object) -> bool: """ Returns true for types such as Optional[T], but not Optional, or T. """ return get_origin(f_type) == Union and get_args(f_type)[1]() is None -def is_type_Tuple(f_type: Type) -> bool: +def is_type_Tuple(f_type: object) -> bool: return get_origin(f_type) == tuple or f_type == tuple -def dataclass_from_dict(klass, d): +def dataclass_from_dict(klass: Type[Any], d: Any) -> Any: """ Converts a dictionary based on a dataclass, into an instance of that dataclass. Recursively goes through lists, optionals, and dictionaries. @@ -100,7 +117,8 @@ def dataclass_from_dict(klass, d): return tuple(klass_properties) elif dataclasses.is_dataclass(klass): # Type is a dataclass, data is a dictionary - fieldtypes = {f.name: f.type for f in dataclasses.fields(klass)} + hints = get_type_hints(klass) + fieldtypes = {f.name: hints.get(f.name, f.type) for f in dataclasses.fields(klass)} return klass(**{f: dataclass_from_dict(fieldtypes[f], d[f]) for f in d}) elif is_type_List(klass): # Type is a list, data is a list @@ -116,7 +134,17 @@ def dataclass_from_dict(klass, d): return klass(d) -def recurse_jsonify(d): +@overload +def recurse_jsonify(d: Union[List[Any], Tuple[Any, ...]]) -> List[Any]: + ... + + +@overload +def recurse_jsonify(d: Dict[str, Any]) -> Dict[str, Any]: + ... + + +def recurse_jsonify(d: Union[List[Any], Tuple[Any, ...], Dict[str, Any]]) -> Union[List[Any], Dict[str, Any]]: """ Makes bytes objects and unhashable types into strings with 0x, and makes large ints into strings. @@ -173,11 +201,11 @@ def parse_uint32(f: BinaryIO, byteorder: Literal["little", "big"] = "big") -> ui return uint32(int.from_bytes(size_bytes, byteorder)) -def write_uint32(f: BinaryIO, value: uint32, byteorder: Literal["little", "big"] = "big"): +def write_uint32(f: BinaryIO, value: uint32, byteorder: Literal["little", "big"] = "big") -> None: f.write(value.to_bytes(4, byteorder)) -def parse_optional(f: BinaryIO, parse_inner_type_f: Callable[[BinaryIO], Any]) -> Optional[Any]: +def parse_optional(f: BinaryIO, parse_inner_type_f: ParseFunctionType) -> Optional[object]: is_present_bytes = f.read(1) assert is_present_bytes is not None and len(is_present_bytes) == 1 # Checks for EOF if is_present_bytes == bytes([0]): @@ -195,8 +223,8 @@ def parse_bytes(f: BinaryIO) -> bytes: return bytes_read -def parse_list(f: BinaryIO, parse_inner_type_f: Callable[[BinaryIO], Any]) -> List[Any]: - full_list: List = [] +def parse_list(f: BinaryIO, parse_inner_type_f: ParseFunctionType) -> List[object]: + full_list: List[object] = [] # wjb assert inner_type != get_args(List)[0] list_size = parse_uint32(f) for list_index in range(list_size): @@ -204,14 +232,14 @@ def parse_list(f: BinaryIO, parse_inner_type_f: Callable[[BinaryIO], Any]) -> Li return full_list -def parse_tuple(f: BinaryIO, list_parse_inner_type_f: List[Callable[[BinaryIO], Any]]) -> Tuple[Any, ...]: - full_list = [] +def parse_tuple(f: BinaryIO, list_parse_inner_type_f: List[ParseFunctionType]) -> Tuple[object, ...]: + full_list: List[object] = [] for parse_f in list_parse_inner_type_f: full_list.append(parse_f(f)) return tuple(full_list) -def parse_size_hints(f: BinaryIO, f_type: Type, bytes_to_read: int) -> Any: +def parse_size_hints(f: BinaryIO, f_type: Type[Any], bytes_to_read: int) -> Any: bytes_read = f.read(bytes_to_read) assert bytes_read is not None and len(bytes_read) == bytes_to_read return f_type.from_bytes(bytes_read) @@ -224,7 +252,7 @@ def parse_str(f: BinaryIO) -> str: return bytes.decode(str_read_bytes, "utf-8") -def stream_optional(stream_inner_type_func: Callable[[Any, BinaryIO], None], item: Any, f: BinaryIO) -> None: +def stream_optional(stream_inner_type_func: StreamFunctionType, item: Any, f: BinaryIO) -> None: if item is None: f.write(bytes([0])) else: @@ -237,13 +265,13 @@ def stream_bytes(item: Any, f: BinaryIO) -> None: f.write(item) -def stream_list(stream_inner_type_func: Callable[[Any, BinaryIO], None], item: Any, f: BinaryIO) -> None: +def stream_list(stream_inner_type_func: StreamFunctionType, item: Any, f: BinaryIO) -> None: write_uint32(f, uint32(len(item))) for element in item: stream_inner_type_func(element, f) -def stream_tuple(stream_inner_type_funcs: List[Callable[[Any, BinaryIO], None]], item: Any, f: BinaryIO) -> None: +def stream_tuple(stream_inner_type_funcs: List[StreamFunctionType], item: Any, f: BinaryIO) -> None: assert len(stream_inner_type_funcs) == len(item) for i in range(len(item)): stream_inner_type_funcs[i](item[i], f) @@ -255,7 +283,19 @@ def stream_str(item: Any, f: BinaryIO) -> None: f.write(str_bytes) -def streamable(cls: Any): +def stream_bool(item: Any, f: BinaryIO) -> None: + f.write(int(item).to_bytes(1, "big")) + + +def stream_streamable(item: object, f: BinaryIO) -> None: + getattr(item, "stream")(f) + + +def stream_byte_convertible(item: object, f: BinaryIO) -> None: + f.write(getattr(item, "__bytes__")()) + + +def streamable(cls: Type[_T_Streamable]) -> Type[_T_Streamable]: """ This decorator forces correct streamable protocol syntax/usage and populates the caches for types hints and (de)serialization methods for all members of the class. The correct usage is: @@ -279,7 +319,9 @@ class Example(Streamable): raise DefinitionError(f"@dataclass(frozen=True) required first. {correct_usage_string}") try: - object.__new__(cls)._streamable_test_if_dataclass_frozen_ = None + # Ignore mypy here because we especially want to access a not available member to test if + # the dataclass is frozen. + object.__new__(cls)._streamable_test_if_dataclass_frozen_ = None # type: ignore[attr-defined] except dataclasses.FrozenInstanceError: pass else: @@ -352,10 +394,10 @@ class Streamable: Make sure to use the streamable decorator when inheriting from the Streamable class to prepare the streaming caches. """ - def post_init_parse(self, item: Any, f_name: str, f_type: Type) -> Any: + def post_init_parse(self, item: Any, f_name: str, f_type: Type[Any]) -> Any: if is_type_List(f_type): - collected_list: List = [] - inner_type: Type = get_args(f_type)[0] + collected_list: List[Any] = [] + inner_type: Type[Any] = get_args(f_type)[0] # wjb assert inner_type != get_args(List)[0] # type: ignore if not is_type_List(type(item)): raise ValueError(f"Wrong type for {f_name}, need a list.") @@ -391,7 +433,7 @@ def post_init_parse(self, item: Any, f_name: str, f_type: Type) -> Any: raise ValueError(f"Wrong type for {f_name}") return item - def __post_init__(self): + def __post_init__(self) -> None: try: fields = FIELDS_FOR_STREAMABLE_CLASS[type(self)] except Exception: @@ -408,12 +450,12 @@ def __post_init__(self): object.__setattr__(self, f_name, self.post_init_parse(data[f_name], f_name, f_type)) @classmethod - def function_to_parse_one_item(cls, f_type: Type) -> Callable[[BinaryIO], Any]: + def function_to_parse_one_item(cls, f_type: Type[Any]) -> ParseFunctionType: """ This function returns a function taking one argument `f: BinaryIO` that parses and returns a value of the given type. """ - inner_type: Type + inner_type: Type[Any] if f_type is bool: return parse_bool if is_type_SpecificOptional(f_type): @@ -421,7 +463,8 @@ def function_to_parse_one_item(cls, f_type: Type) -> Callable[[BinaryIO], Any]: parse_inner_type_f = cls.function_to_parse_one_item(inner_type) return lambda f: parse_optional(f, parse_inner_type_f) if hasattr(f_type, "parse"): - return f_type.parse + # Ignoring for now as the proper solution isn't obvious + return f_type.parse # type: ignore[no-any-return] if f_type == bytes: return parse_bytes if is_type_List(f_type): @@ -444,7 +487,7 @@ def parse(cls: Type[_T_Streamable], f: BinaryIO) -> _T_Streamable: # Create the object without calling __init__() to avoid unnecessary post-init checks in strictdataclass obj: _T_Streamable = object.__new__(cls) fields: Iterator[str] = iter(FIELDS_FOR_STREAMABLE_CLASS.get(cls, {})) - values: Iterator = (parse_f(f) for parse_f in PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS[cls]) + values: Iterator[object] = (parse_f(f) for parse_f in PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS[cls]) for field, value in zip(fields, values): object.__setattr__(obj, field, value) @@ -456,8 +499,8 @@ def parse(cls: Type[_T_Streamable], f: BinaryIO) -> _T_Streamable: return obj @classmethod - def function_to_stream_one_item(cls, f_type: Type) -> Callable[[Any, BinaryIO], Any]: - inner_type: Type + def function_to_stream_one_item(cls, f_type: Type[Any]) -> StreamFunctionType: + inner_type: Type[Any] if is_type_SpecificOptional(f_type): inner_type = get_args(f_type)[0] stream_inner_type_func = cls.function_to_stream_one_item(inner_type) @@ -465,9 +508,9 @@ def function_to_stream_one_item(cls, f_type: Type) -> Callable[[Any, BinaryIO], elif f_type == bytes: return stream_bytes elif hasattr(f_type, "stream"): - return lambda item, f: item.stream(f) + return stream_streamable elif hasattr(f_type, "__bytes__"): - return lambda item, f: f.write(bytes(item)) + return stream_byte_convertible elif is_type_List(f_type): inner_type = get_args(f_type)[0] stream_inner_type_func = cls.function_to_stream_one_item(inner_type) @@ -481,7 +524,7 @@ def function_to_stream_one_item(cls, f_type: Type) -> Callable[[Any, BinaryIO], elif f_type is str: return stream_str elif f_type is bool: - return lambda item, f: f.write(int(item).to_bytes(1, "big")) + return stream_bool else: raise NotImplementedError(f"can't stream {f_type}") @@ -518,9 +561,9 @@ def __str__(self: Any) -> str: def __repr__(self: Any) -> str: return pp.pformat(recurse_jsonify(dataclasses.asdict(self))) - def to_json_dict(self) -> Dict: + def to_json_dict(self) -> Dict[str, Any]: return recurse_jsonify(dataclasses.asdict(self)) @classmethod - def from_json_dict(cls: Any, json_dict: Dict) -> Any: + def from_json_dict(cls: Any, json_dict: Dict[str, Any]) -> Any: return dataclass_from_dict(cls, json_dict) diff --git a/chia/wallet/cat_wallet/cat_wallet.py b/chia/wallet/cat_wallet/cat_wallet.py index b58c9b9ce92b..b8bacf0e2a11 100644 --- a/chia/wallet/cat_wallet/cat_wallet.py +++ b/chia/wallet/cat_wallet/cat_wallet.py @@ -97,8 +97,6 @@ async def create_new_cat_wallet( self.wallet_info = new_wallet_info - self.lineage_store = await CATLineageStore.create(self.wallet_state_manager.db_wrapper, self.get_asset_id()) - try: chia_tx, spend_bundle = await ALL_LIMITATIONS_PROGRAMS[ cat_tail_info["identifier"] diff --git a/chia/wallet/derive_keys.py b/chia/wallet/derive_keys.py index 573870a3baf3..39594cddd215 100644 --- a/chia/wallet/derive_keys.py +++ b/chia/wallet/derive_keys.py @@ -1,7 +1,9 @@ -from typing import List, Optional, Tuple +from typing import Set, List, Optional, Tuple -from blspy import AugSchemeMPL, PrivateKey, G1Element +from blspy import AugSchemeMPL, G1Element, PrivateKey +from chia.consensus.coinbase import create_puzzlehash_for_pk +from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import uint32 # EIP 2334 bls key derivation @@ -20,12 +22,6 @@ def _derive_path(sk: PrivateKey, path: List[int]) -> PrivateKey: return sk -def _derive_path_pub(pk: G1Element, path: List[int]) -> G1Element: - for index in path: - pk = AugSchemeMPL.derive_child_pk_unhardened(pk, index) - return pk - - def _derive_path_unhardened(sk: PrivateKey, path: List[int]) -> PrivateKey: for index in path: sk = AugSchemeMPL.derive_child_sk_unhardened(sk, index) @@ -44,20 +40,11 @@ def master_sk_to_wallet_sk_intermediate(master: PrivateKey) -> PrivateKey: return _derive_path(master, [12381, 8444, 2]) -def master_pk_to_wallet_pk_intermediate(master: G1Element) -> G1Element: - return _derive_path_pub(master, [12381, 8444, 2]) - - def master_sk_to_wallet_sk(master: PrivateKey, index: uint32) -> PrivateKey: intermediate = master_sk_to_wallet_sk_intermediate(master) return _derive_path(intermediate, [index]) -def master_pk_to_wallet_pk_unhardened(master: G1Element, index: uint32) -> G1Element: - intermediate = master_pk_to_wallet_pk_intermediate(master) - return _derive_path_pub(intermediate, [index]) - - def master_sk_to_wallet_sk_unhardened_intermediate(master: PrivateKey) -> PrivateKey: return _derive_path_unhardened(master, [12381, 8444, 2]) @@ -91,7 +78,7 @@ def master_sk_to_pooling_authentication_sk(master: PrivateKey, pool_wallet_index return _derive_path(master, [12381, 8444, 6, pool_wallet_index * 10000 + index]) -def find_owner_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Optional[Tuple[G1Element, uint32]]: +def find_owner_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Optional[Tuple[PrivateKey, uint32]]: for pool_wallet_index in range(MAX_POOL_WALLETS): for sk in all_sks: try_owner_sk = master_sk_to_singleton_owner_sk(sk, uint32(pool_wallet_index)) @@ -110,3 +97,34 @@ def find_authentication_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Op # NOTE: ONLY use 0 for authentication key index to ensure compatibility return master_sk_to_pooling_authentication_sk(sk, uint32(pool_wallet_index), uint32(0)) return None + + +def match_address_to_sk( + sk: PrivateKey, addresses_to_search: List[bytes32], max_ph_to_search: int = 500 +) -> Set[bytes32]: + """ + Checks the list of given address is a derivation of the given sk within the given number of derivations + Returns a Set of the addresses that are derivations of the given sk + """ + if sk is None or not addresses_to_search: + return set() + + found_addresses: Set[bytes32] = set() + search_list: Set[bytes32] = set(addresses_to_search) + + for i in range(max_ph_to_search): + + phs = [ + create_puzzlehash_for_pk(master_sk_to_wallet_sk(sk, uint32(i)).get_g1()), + create_puzzlehash_for_pk(master_sk_to_wallet_sk_unhardened(sk, uint32(i)).get_g1()), + ] + + for address in search_list: + if address in phs: + found_addresses.add(address) + + search_list = search_list - found_addresses + if not len(search_list): + return found_addresses + + return found_addresses diff --git a/chia/wallet/puzzles/tails.py b/chia/wallet/puzzles/tails.py index 4af3590cb395..6aac17354689 100644 --- a/chia/wallet/puzzles/tails.py +++ b/chia/wallet/puzzles/tails.py @@ -5,6 +5,7 @@ from chia.types.spend_bundle import SpendBundle from chia.util.ints import uint64 from chia.util.byte_types import hexstr_to_bytes +from chia.wallet.cat_wallet.lineage_store import CATLineageStore from chia.wallet.lineage_proof import LineageProof from chia.wallet.puzzles.load_clvm import load_clvm from chia.wallet.cat_wallet.cat_utils import ( @@ -72,9 +73,13 @@ async def generate_issuance_bundle(cls, wallet, _: Dict, amount: uint64) -> Tupl origin_id = origin.name() cat_inner: Program = await wallet.get_new_inner_puzzle() - await wallet.add_lineage(origin_id, LineageProof(), False) tail: Program = cls.construct([Program.to(origin_id)]) + wallet.lineage_store = await CATLineageStore.create( + wallet.wallet_state_manager.db_wrapper, tail.get_tree_hash().hex() + ) + await wallet.add_lineage(origin_id, LineageProof(), False) + minted_cat_puzzle_hash: bytes32 = construct_cat_puzzle(CAT_MOD, tail.get_tree_hash(), cat_inner).get_tree_hash() tx_record: TransactionRecord = await wallet.standard_wallet.generate_signed_transaction( diff --git a/chia/wallet/trade_record.py b/chia/wallet/trade_record.py index 08c56bb4bb60..1cf9a452df2b 100644 --- a/chia/wallet/trade_record.py +++ b/chia/wallet/trade_record.py @@ -26,7 +26,7 @@ class TradeRecord(Streamable): coins_of_interest: List[Coin] trade_id: bytes32 status: uint32 # TradeStatus, enum not streamable - sent_to: List[Tuple[str, uint8, Optional[str]]] + sent_to: List[Tuple[str, uint8, Optional[str]]] # MempoolSubmissionStatus.status enum not streamable def to_json_dict_convenience(self) -> Dict[str, Any]: formatted = self.to_json_dict() diff --git a/chia/wallet/wallet_blockchain.py b/chia/wallet/wallet_blockchain.py index f4ea59d1b9b6..d0ebc650e8be 100644 --- a/chia/wallet/wallet_blockchain.py +++ b/chia/wallet/wallet_blockchain.py @@ -187,7 +187,8 @@ async def get_peak_block(self) -> Optional[HeaderBlock]: async def set_finished_sync_up_to(self, height: int, in_transaction=False): if height > await self.get_finished_sync_up_to(): await self._basic_store.set_object("FINISHED_SYNC_UP_TO", uint32(height), in_transaction) - await self.clean_block_records() + if not in_transaction: + await self.clean_block_records() async def get_finished_sync_up_to(self): h: Optional[uint32] = await self._basic_store.get_object("FINISHED_SYNC_UP_TO", uint32) diff --git a/chia/wallet/wallet_node.py b/chia/wallet/wallet_node.py index b64f38cf1a7e..f417e58d7a4a 100644 --- a/chia/wallet/wallet_node.py +++ b/chia/wallet/wallet_node.py @@ -358,7 +358,7 @@ async def _process_new_subscriptions(self): try: peer, item = None, None item = await self.new_peak_queue.get() - self.log.debug(f"Pulled from queue: {item}") + self.log.debug("Pulled from queue: %s", item) assert item is not None if item.item_type == NewPeakQueueTypes.COIN_ID_SUBSCRIPTION: # Subscriptions are the highest priority, because we don't want to process any more peaks or @@ -478,8 +478,8 @@ async def perform_atomic_rollback(self, fork_height: int, cache: Optional[PeerRe async with self.wallet_state_manager.db_wrapper.lock: try: await self.wallet_state_manager.db_wrapper.begin_transaction() - await self.wallet_state_manager.reorg_rollback(fork_height) - await self.wallet_state_manager.blockchain.set_finished_sync_up_to(fork_height) + removed_wallet_ids = await self.wallet_state_manager.reorg_rollback(fork_height) + await self.wallet_state_manager.blockchain.set_finished_sync_up_to(fork_height, True) if cache is None: self.rollback_request_caches(fork_height) else: @@ -493,6 +493,11 @@ async def perform_atomic_rollback(self, fork_height: int, cache: Optional[PeerRe await self.wallet_state_manager.tx_store.rebuild_tx_cache() await self.wallet_state_manager.pool_store.rebuild_cache() raise + else: + await self.wallet_state_manager.blockchain.clean_block_records() + + for wallet_id in removed_wallet_ids: + self.wallet_state_manager.wallets.pop(wallet_id) async def long_sync( self, @@ -686,6 +691,9 @@ async def receive_and_validate(inner_states: List[CoinState], inner_idx_start: i await self.wallet_state_manager.coin_store.rebuild_wallet_cache() await self.wallet_state_manager.tx_store.rebuild_tx_cache() await self.wallet_state_manager.pool_store.rebuild_cache() + else: + await self.wallet_state_manager.blockchain.clean_block_records() + except Exception as e: tb = traceback.format_exc() self.log.error(f"Exception while adding state: {e} {tb}") @@ -721,6 +729,9 @@ async def receive_and_validate(inner_states: List[CoinState], inner_idx_start: i tb = traceback.format_exc() self.log.error(f"Error adding states.. {e} {tb}") return False + else: + await self.wallet_state_manager.blockchain.clean_block_records() + else: while len(concurrent_tasks_cs_heights) >= target_concurrent_tasks: await asyncio.sleep(0.1) diff --git a/chia/wallet/wallet_state_manager.py b/chia/wallet/wallet_state_manager.py index 01282e32932c..84d31c2af203 100644 --- a/chia/wallet/wallet_state_manager.py +++ b/chia/wallet/wallet_state_manager.py @@ -207,16 +207,6 @@ async def create( return self - def get_derivation_index(self, pubkey: G1Element, max_depth: int = 1000) -> int: - for i in range(0, max_depth): - derived = self.get_public_key(uint32(i)) - if derived == pubkey: - return i - derived = self.get_public_key_unhardened(uint32(i)) - if derived == pubkey: - return i - return -1 - def get_public_key(self, index: uint32) -> G1Element: return master_sk_to_wallet_sk(self.private_key, index).get_g1() @@ -1088,7 +1078,7 @@ async def get_wallet_for_coin(self, coin_id: bytes32) -> Any: wallet = self.wallets[wallet_id] return wallet - async def reorg_rollback(self, height: int): + async def reorg_rollback(self, height: int) -> List[uint32]: """ Rolls back and updates the coin_store and transaction store. It's possible this height is the tip, or even beyond the tip. @@ -1114,7 +1104,8 @@ async def reorg_rollback(self, height: int): remove_ids.append(wallet_id) for wallet_id in remove_ids: await self.user_store.delete_wallet(wallet_id, in_transaction=True) - self.wallets.pop(wallet_id) + + return remove_ids async def _await_closed(self) -> None: await self.db_connection.close() diff --git a/mypy.ini b/mypy.ini index 4c3f96cbfd0f..795d940d34fd 100644 --- a/mypy.ini +++ b/mypy.ini @@ -17,7 +17,7 @@ no_implicit_reexport = True strict_equality = True # list created by: venv/bin/mypy | sed -n 's/.py:.*//p' | sort | uniq | tr '/' '.' | tr '\n' ',' -[mypy-benchmarks.block_ref,benchmarks.block_store,benchmarks.coin_store,benchmarks.utils,build_scripts.installer-version,chia.clvm.spend_sim,chia.cmds.configure,chia.cmds.db,chia.cmds.db_upgrade_func,chia.cmds.farm_funcs,chia.cmds.init,chia.cmds.init_funcs,chia.cmds.keys,chia.cmds.keys_funcs,chia.cmds.passphrase,chia.cmds.passphrase_funcs,chia.cmds.plotnft,chia.cmds.plotnft_funcs,chia.cmds.plots,chia.cmds.plotters,chia.cmds.show,chia.cmds.start_funcs,chia.cmds.wallet,chia.cmds.wallet_funcs,chia.consensus.block_body_validation,chia.consensus.blockchain,chia.consensus.blockchain_interface,chia.consensus.block_creation,chia.consensus.block_header_validation,chia.consensus.block_record,chia.consensus.block_root_validation,chia.consensus.coinbase,chia.consensus.constants,chia.consensus.difficulty_adjustment,chia.consensus.get_block_challenge,chia.consensus.multiprocess_validation,chia.consensus.pos_quality,chia.consensus.vdf_info_computation,chia.daemon.client,chia.daemon.keychain_proxy,chia.daemon.keychain_server,chia.daemon.server,chia.farmer.farmer,chia.farmer.farmer_api,chia.full_node.block_height_map,chia.full_node.block_store,chia.full_node.bundle_tools,chia.full_node.coin_store,chia.full_node.full_node,chia.full_node.full_node_api,chia.full_node.full_node_store,chia.full_node.generator,chia.full_node.hint_store,chia.full_node.lock_queue,chia.full_node.mempool,chia.full_node.mempool_check_conditions,chia.full_node.mempool_manager,chia.full_node.pending_tx_cache,chia.full_node.sync_store,chia.full_node.weight_proof,chia.harvester.harvester,chia.harvester.harvester_api,chia.introducer.introducer,chia.introducer.introducer_api,chia.plotters.bladebit,chia.plotters.chiapos,chia.plotters.install_plotter,chia.plotters.madmax,chia.plotters.plotters,chia.plotters.plotters_util,chia.plotting.check_plots,chia.plotting.create_plots,chia.plotting.manager,chia.plotting.util,chia.pools.pool_config,chia.pools.pool_puzzles,chia.pools.pool_wallet,chia.pools.pool_wallet_info,chia.protocols.pool_protocol,chia.rpc.crawler_rpc_api,chia.rpc.farmer_rpc_api,chia.rpc.farmer_rpc_client,chia.rpc.full_node_rpc_api,chia.rpc.full_node_rpc_client,chia.rpc.harvester_rpc_api,chia.rpc.harvester_rpc_client,chia.rpc.rpc_client,chia.rpc.rpc_server,chia.rpc.timelord_rpc_api,chia.rpc.util,chia.rpc.wallet_rpc_api,chia.rpc.wallet_rpc_client,chia.seeder.crawler,chia.seeder.crawler_api,chia.seeder.crawl_store,chia.seeder.dns_server,chia.seeder.peer_record,chia.seeder.start_crawler,chia.server.address_manager,chia.server.address_manager_store,chia.server.connection_utils,chia.server.introducer_peers,chia.server.node_discovery,chia.server.peer_store_resolver,chia.server.rate_limits,chia.server.reconnect_task,chia.server.server,chia.server.ssl_context,chia.server.start_farmer,chia.server.start_full_node,chia.server.start_harvester,chia.server.start_introducer,chia.server.start_service,chia.server.start_timelord,chia.server.start_wallet,chia.server.upnp,chia.server.ws_connection,chia.simulator.full_node_simulator,chia.simulator.start_simulator,chia.ssl.create_ssl,chia.timelord.iters_from_block,chia.timelord.timelord,chia.timelord.timelord_api,chia.timelord.timelord_launcher,chia.timelord.timelord_state,chia.types.announcement,chia.types.blockchain_format.classgroup,chia.types.blockchain_format.coin,chia.types.blockchain_format.program,chia.types.blockchain_format.proof_of_space,chia.types.blockchain_format.tree_hash,chia.types.blockchain_format.vdf,chia.types.full_block,chia.types.header_block,chia.types.mempool_item,chia.types.name_puzzle_condition,chia.types.peer_info,chia.types.spend_bundle,chia.types.transaction_queue_entry,chia.types.unfinished_block,chia.types.unfinished_header_block,chia.util.api_decorators,chia.util.block_cache,chia.util.byte_types,chia.util.cached_bls,chia.util.check_fork_next_block,chia.util.chia_logging,chia.util.config,chia.util.db_wrapper,chia.util.dump_keyring,chia.util.file_keyring,chia.util.files,chia.util.hash,chia.util.ints,chia.util.json_util,chia.util.keychain,chia.util.keyring_wrapper,chia.util.log_exceptions,chia.util.lru_cache,chia.util.make_test_constants,chia.util.merkle_set,chia.util.network,chia.util.partial_func,chia.util.pip_import,chia.util.profiler,chia.util.safe_cancel_task,chia.util.service_groups,chia.util.ssl_check,chia.util.streamable,chia.util.struct_stream,chia.util.validate_alert,chia.wallet.block_record,chia.wallet.cat_wallet.cat_utils,chia.wallet.cat_wallet.cat_wallet,chia.wallet.cat_wallet.lineage_store,chia.wallet.chialisp,chia.wallet.did_wallet.did_wallet,chia.wallet.did_wallet.did_wallet_puzzles,chia.wallet.key_val_store,chia.wallet.lineage_proof,chia.wallet.payment,chia.wallet.puzzles.load_clvm,chia.wallet.puzzles.p2_conditions,chia.wallet.puzzles.p2_delegated_conditions,chia.wallet.puzzles.p2_delegated_puzzle,chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle,chia.wallet.puzzles.p2_m_of_n_delegate_direct,chia.wallet.puzzles.p2_puzzle_hash,chia.wallet.puzzles.prefarm.spend_prefarm,chia.wallet.puzzles.puzzle_utils,chia.wallet.puzzles.rom_bootstrap_generator,chia.wallet.puzzles.singleton_top_layer,chia.wallet.puzzles.tails,chia.wallet.rl_wallet.rl_wallet,chia.wallet.rl_wallet.rl_wallet_puzzles,chia.wallet.secret_key_store,chia.wallet.settings.user_settings,chia.wallet.trade_manager,chia.wallet.trade_record,chia.wallet.trading.offer,chia.wallet.trading.trade_store,chia.wallet.transaction_record,chia.wallet.util.debug_spend_bundle,chia.wallet.util.new_peak_queue,chia.wallet.util.peer_request_cache,chia.wallet.util.wallet_sync_utils,chia.wallet.wallet,chia.wallet.wallet_action_store,chia.wallet.wallet_blockchain,chia.wallet.wallet_coin_store,chia.wallet.wallet_interested_store,chia.wallet.wallet_node,chia.wallet.wallet_node_api,chia.wallet.wallet_pool_store,chia.wallet.wallet_puzzle_store,chia.wallet.wallet_state_manager,chia.wallet.wallet_sync_store,chia.wallet.wallet_transaction_store,chia.wallet.wallet_user_store,chia.wallet.wallet_weight_proof_handler,installhelper,tests.blockchain.blockchain_test_utils,tests.blockchain.test_blockchain,tests.blockchain.test_blockchain_transactions,tests.block_tools,tests.build-init-files,tests.build-workflows,tests.clvm.coin_store,tests.clvm.test_chialisp_deserialization,tests.clvm.test_clvm_compilation,tests.clvm.test_program,tests.clvm.test_puzzle_compression,tests.clvm.test_puzzles,tests.clvm.test_serialized_program,tests.clvm.test_singletons,tests.clvm.test_spend_sim,tests.conftest,tests.connection_utils,tests.core.cmds.test_keys,tests.core.consensus.test_pot_iterations,tests.core.custom_types.test_coin,tests.core.custom_types.test_proof_of_space,tests.core.custom_types.test_spend_bundle,tests.core.daemon.test_daemon,tests.core.full_node.full_sync.test_full_sync,tests.core.full_node.stores.test_block_store,tests.core.full_node.stores.test_coin_store,tests.core.full_node.stores.test_full_node_store,tests.core.full_node.stores.test_hint_store,tests.core.full_node.stores.test_sync_store,tests.core.full_node.test_address_manager,tests.core.full_node.test_block_height_map,tests.core.full_node.test_conditions,tests.core.full_node.test_full_node,tests.core.full_node.test_mempool,tests.core.full_node.test_mempool_performance,tests.core.full_node.test_node_load,tests.core.full_node.test_peer_store_resolver,tests.core.full_node.test_performance,tests.core.full_node.test_transactions,tests.core.make_block_generator,tests.core.node_height,tests.core.server.test_dos,tests.core.server.test_rate_limits,tests.core.ssl.test_ssl,tests.core.test_cost_calculation,tests.core.test_crawler_rpc,tests.core.test_daemon_rpc,tests.core.test_db_conversion,tests.core.test_farmer_harvester_rpc,tests.core.test_filter,tests.core.test_full_node_rpc,tests.core.test_merkle_set,tests.core.test_setproctitle,tests.core.util.test_cached_bls,tests.core.util.test_config,tests.core.util.test_file_keyring_synchronization,tests.core.util.test_files,tests.core.util.test_keychain,tests.core.util.test_keyring_wrapper,tests.core.util.test_lru_cache,tests.core.util.test_significant_bits,tests.core.util.test_streamable,tests.farmer_harvester.test_farmer_harvester,tests.generator.test_compression,tests.generator.test_generator_types,tests.generator.test_list_to_batches,tests.generator.test_rom,tests.generator.test_scan,tests.plotting.test_plot_manager,tests.pools.test_pool_cmdline,tests.pools.test_pool_config,tests.pools.test_pool_puzzles_lifecycle,tests.pools.test_pool_rpc,tests.pools.test_wallet_pool_store,tests.setup_nodes,tests.setup_services,tests.simulation.test_simulation,tests.time_out_assert,tests.tools.test_full_sync,tests.tools.test_run_block,tests.util.alert_server,tests.util.benchmark_cost,tests.util.blockchain,tests.util.build_network_protocol_files,tests.util.db_connection,tests.util.generator_tools_testing,tests.util.keyring,tests.util.key_tool,tests.util.misc,tests.util.network,tests.util.rpc,tests.util.test_full_block_utils,tests.util.test_lock_queue,tests.util.test_network_protocol_files,tests.util.test_struct_stream,tests.wallet.cat_wallet.test_cat_lifecycle,tests.wallet.cat_wallet.test_cat_wallet,tests.wallet.cat_wallet.test_offer_lifecycle,tests.wallet.cat_wallet.test_trades,tests.wallet.did_wallet.test_did,tests.wallet.did_wallet.test_did_rpc,tests.wallet.rl_wallet.test_rl_rpc,tests.wallet.rl_wallet.test_rl_wallet,tests.wallet.rpc.test_wallet_rpc,tests.wallet.simple_sync.test_simple_sync_protocol,tests.wallet.sync.test_wallet_sync,tests.wallet.test_bech32m,tests.wallet.test_chialisp,tests.wallet.test_puzzle_store,tests.wallet.test_singleton,tests.wallet.test_singleton_lifecycle,tests.wallet.test_singleton_lifecycle_fast,tests.wallet.test_taproot,tests.wallet.test_wallet,tests.wallet.test_wallet_blockchain,tests.wallet.test_wallet_interested_store,tests.wallet.test_wallet_key_val_store,tests.wallet.test_wallet_user_store,tests.wallet_tools,tests.weight_proof.test_weight_proof,tools.analyze-chain,tools.run_block,tools.test_full_sync] +[mypy-benchmarks.block_ref,benchmarks.block_store,benchmarks.coin_store,benchmarks.utils,build_scripts.installer-version,chia.clvm.spend_sim,chia.cmds.configure,chia.cmds.db,chia.cmds.db_upgrade_func,chia.cmds.farm_funcs,chia.cmds.init,chia.cmds.init_funcs,chia.cmds.keys,chia.cmds.keys_funcs,chia.cmds.passphrase,chia.cmds.passphrase_funcs,chia.cmds.plotnft,chia.cmds.plotnft_funcs,chia.cmds.plots,chia.cmds.plotters,chia.cmds.show,chia.cmds.start_funcs,chia.cmds.wallet,chia.cmds.wallet_funcs,chia.consensus.block_body_validation,chia.consensus.blockchain,chia.consensus.blockchain_interface,chia.consensus.block_creation,chia.consensus.block_header_validation,chia.consensus.block_record,chia.consensus.block_root_validation,chia.consensus.coinbase,chia.consensus.constants,chia.consensus.difficulty_adjustment,chia.consensus.get_block_challenge,chia.consensus.multiprocess_validation,chia.consensus.pos_quality,chia.consensus.vdf_info_computation,chia.daemon.client,chia.daemon.keychain_proxy,chia.daemon.keychain_server,chia.daemon.server,chia.farmer.farmer,chia.farmer.farmer_api,chia.full_node.block_height_map,chia.full_node.block_store,chia.full_node.bundle_tools,chia.full_node.coin_store,chia.full_node.full_node,chia.full_node.full_node_api,chia.full_node.full_node_store,chia.full_node.generator,chia.full_node.hint_store,chia.full_node.lock_queue,chia.full_node.mempool,chia.full_node.mempool_check_conditions,chia.full_node.mempool_manager,chia.full_node.pending_tx_cache,chia.full_node.sync_store,chia.full_node.weight_proof,chia.harvester.harvester,chia.harvester.harvester_api,chia.introducer.introducer,chia.introducer.introducer_api,chia.plotters.bladebit,chia.plotters.chiapos,chia.plotters.install_plotter,chia.plotters.madmax,chia.plotters.plotters,chia.plotters.plotters_util,chia.plotting.check_plots,chia.plotting.create_plots,chia.plotting.manager,chia.plotting.util,chia.pools.pool_config,chia.pools.pool_puzzles,chia.pools.pool_wallet,chia.pools.pool_wallet_info,chia.protocols.pool_protocol,chia.rpc.crawler_rpc_api,chia.rpc.farmer_rpc_api,chia.rpc.farmer_rpc_client,chia.rpc.full_node_rpc_api,chia.rpc.full_node_rpc_client,chia.rpc.harvester_rpc_api,chia.rpc.harvester_rpc_client,chia.rpc.rpc_client,chia.rpc.rpc_server,chia.rpc.timelord_rpc_api,chia.rpc.util,chia.rpc.wallet_rpc_api,chia.rpc.wallet_rpc_client,chia.seeder.crawler,chia.seeder.crawler_api,chia.seeder.crawl_store,chia.seeder.dns_server,chia.seeder.peer_record,chia.seeder.start_crawler,chia.server.address_manager,chia.server.address_manager_store,chia.server.connection_utils,chia.server.introducer_peers,chia.server.node_discovery,chia.server.peer_store_resolver,chia.server.rate_limits,chia.server.reconnect_task,chia.server.server,chia.server.ssl_context,chia.server.start_farmer,chia.server.start_full_node,chia.server.start_harvester,chia.server.start_introducer,chia.server.start_service,chia.server.start_timelord,chia.server.start_wallet,chia.server.upnp,chia.server.ws_connection,chia.simulator.full_node_simulator,chia.simulator.start_simulator,chia.ssl.create_ssl,chia.timelord.iters_from_block,chia.timelord.timelord,chia.timelord.timelord_api,chia.timelord.timelord_launcher,chia.timelord.timelord_state,chia.types.announcement,chia.types.blockchain_format.classgroup,chia.types.blockchain_format.coin,chia.types.blockchain_format.program,chia.types.blockchain_format.proof_of_space,chia.types.blockchain_format.tree_hash,chia.types.blockchain_format.vdf,chia.types.full_block,chia.types.header_block,chia.types.mempool_item,chia.types.name_puzzle_condition,chia.types.peer_info,chia.types.spend_bundle,chia.types.transaction_queue_entry,chia.types.unfinished_block,chia.types.unfinished_header_block,chia.util.api_decorators,chia.util.block_cache,chia.util.byte_types,chia.util.cached_bls,chia.util.check_fork_next_block,chia.util.chia_logging,chia.util.config,chia.util.db_wrapper,chia.util.dump_keyring,chia.util.file_keyring,chia.util.files,chia.util.hash,chia.util.ints,chia.util.json_util,chia.util.keychain,chia.util.keyring_wrapper,chia.util.log_exceptions,chia.util.lru_cache,chia.util.make_test_constants,chia.util.merkle_set,chia.util.network,chia.util.partial_func,chia.util.pip_import,chia.util.profiler,chia.util.safe_cancel_task,chia.util.service_groups,chia.util.ssl_check,chia.util.struct_stream,chia.util.validate_alert,chia.wallet.block_record,chia.wallet.cat_wallet.cat_utils,chia.wallet.cat_wallet.cat_wallet,chia.wallet.cat_wallet.lineage_store,chia.wallet.chialisp,chia.wallet.did_wallet.did_wallet,chia.wallet.did_wallet.did_wallet_puzzles,chia.wallet.key_val_store,chia.wallet.lineage_proof,chia.wallet.payment,chia.wallet.puzzles.load_clvm,chia.wallet.puzzles.p2_conditions,chia.wallet.puzzles.p2_delegated_conditions,chia.wallet.puzzles.p2_delegated_puzzle,chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle,chia.wallet.puzzles.p2_m_of_n_delegate_direct,chia.wallet.puzzles.p2_puzzle_hash,chia.wallet.puzzles.prefarm.spend_prefarm,chia.wallet.puzzles.puzzle_utils,chia.wallet.puzzles.rom_bootstrap_generator,chia.wallet.puzzles.singleton_top_layer,chia.wallet.puzzles.tails,chia.wallet.rl_wallet.rl_wallet,chia.wallet.rl_wallet.rl_wallet_puzzles,chia.wallet.secret_key_store,chia.wallet.settings.user_settings,chia.wallet.trade_manager,chia.wallet.trade_record,chia.wallet.trading.offer,chia.wallet.trading.trade_store,chia.wallet.transaction_record,chia.wallet.util.debug_spend_bundle,chia.wallet.util.new_peak_queue,chia.wallet.util.peer_request_cache,chia.wallet.util.wallet_sync_utils,chia.wallet.wallet,chia.wallet.wallet_action_store,chia.wallet.wallet_blockchain,chia.wallet.wallet_coin_store,chia.wallet.wallet_interested_store,chia.wallet.wallet_node,chia.wallet.wallet_node_api,chia.wallet.wallet_pool_store,chia.wallet.wallet_puzzle_store,chia.wallet.wallet_state_manager,chia.wallet.wallet_sync_store,chia.wallet.wallet_transaction_store,chia.wallet.wallet_user_store,chia.wallet.wallet_weight_proof_handler,installhelper,tests.blockchain.blockchain_test_utils,tests.blockchain.test_blockchain,tests.blockchain.test_blockchain_transactions,tests.block_tools,tests.build-init-files,tests.build-workflows,tests.clvm.coin_store,tests.clvm.test_chialisp_deserialization,tests.clvm.test_clvm_compilation,tests.clvm.test_program,tests.clvm.test_puzzle_compression,tests.clvm.test_puzzles,tests.clvm.test_serialized_program,tests.clvm.test_singletons,tests.clvm.test_spend_sim,tests.conftest,tests.connection_utils,tests.core.cmds.test_keys,tests.core.consensus.test_pot_iterations,tests.core.custom_types.test_coin,tests.core.custom_types.test_proof_of_space,tests.core.custom_types.test_spend_bundle,tests.core.daemon.test_daemon,tests.core.full_node.full_sync.test_full_sync,tests.core.full_node.stores.test_block_store,tests.core.full_node.stores.test_coin_store,tests.core.full_node.stores.test_full_node_store,tests.core.full_node.stores.test_hint_store,tests.core.full_node.stores.test_sync_store,tests.core.full_node.test_address_manager,tests.core.full_node.test_block_height_map,tests.core.full_node.test_conditions,tests.core.full_node.test_full_node,tests.core.full_node.test_mempool,tests.core.full_node.test_mempool_performance,tests.core.full_node.test_node_load,tests.core.full_node.test_peer_store_resolver,tests.core.full_node.test_performance,tests.core.full_node.test_transactions,tests.core.make_block_generator,tests.core.node_height,tests.core.server.test_dos,tests.core.server.test_rate_limits,tests.core.ssl.test_ssl,tests.core.test_cost_calculation,tests.core.test_crawler_rpc,tests.core.test_daemon_rpc,tests.core.test_db_conversion,tests.core.test_farmer_harvester_rpc,tests.core.test_filter,tests.core.test_full_node_rpc,tests.core.test_merkle_set,tests.core.test_setproctitle,tests.core.util.test_cached_bls,tests.core.util.test_config,tests.core.util.test_file_keyring_synchronization,tests.core.util.test_files,tests.core.util.test_keychain,tests.core.util.test_keyring_wrapper,tests.core.util.test_lru_cache,tests.core.util.test_significant_bits,tests.farmer_harvester.test_farmer_harvester,tests.generator.test_compression,tests.generator.test_generator_types,tests.generator.test_list_to_batches,tests.generator.test_rom,tests.generator.test_scan,tests.plotting.test_plot_manager,tests.pools.test_pool_cmdline,tests.pools.test_pool_config,tests.pools.test_pool_puzzles_lifecycle,tests.pools.test_pool_rpc,tests.pools.test_wallet_pool_store,tests.setup_nodes,tests.setup_services,tests.simulation.test_simulation,tests.time_out_assert,tests.tools.test_full_sync,tests.tools.test_run_block,tests.util.alert_server,tests.util.benchmark_cost,tests.util.blockchain,tests.util.build_network_protocol_files,tests.util.db_connection,tests.util.generator_tools_testing,tests.util.keyring,tests.util.key_tool,tests.util.misc,tests.util.network,tests.util.rpc,tests.util.test_full_block_utils,tests.util.test_lock_queue,tests.util.test_network_protocol_files,tests.util.test_struct_stream,tests.wallet.cat_wallet.test_cat_lifecycle,tests.wallet.cat_wallet.test_cat_wallet,tests.wallet.cat_wallet.test_offer_lifecycle,tests.wallet.cat_wallet.test_trades,tests.wallet.did_wallet.test_did,tests.wallet.did_wallet.test_did_rpc,tests.wallet.rl_wallet.test_rl_rpc,tests.wallet.rl_wallet.test_rl_wallet,tests.wallet.rpc.test_wallet_rpc,tests.wallet.simple_sync.test_simple_sync_protocol,tests.wallet.sync.test_wallet_sync,tests.wallet.test_bech32m,tests.wallet.test_chialisp,tests.wallet.test_puzzle_store,tests.wallet.test_singleton,tests.wallet.test_singleton_lifecycle,tests.wallet.test_singleton_lifecycle_fast,tests.wallet.test_taproot,tests.wallet.test_wallet,tests.wallet.test_wallet_blockchain,tests.wallet.test_wallet_interested_store,tests.wallet.test_wallet_key_val_store,tests.wallet.test_wallet_user_store,tests.wallet_tools,tests.weight_proof.test_weight_proof,tools.analyze-chain,tools.run_block,tools.test_full_sync] disallow_any_generics = False disallow_subclassing_any = False disallow_untyped_calls = False diff --git a/pylintrc b/pylintrc index 0e6ea29a872a..c109b2465e5a 100644 --- a/pylintrc +++ b/pylintrc @@ -250,7 +250,6 @@ ignored-modules=blspy, chiavdf, cryptography, aiohttp, - websockets, keyring, keyrings.cryptfile, bitstring, diff --git a/setup.py b/setup.py index 2ac24927f492..55dc64d8204c 100644 --- a/setup.py +++ b/setup.py @@ -3,15 +3,15 @@ dependencies = [ "multidict==5.1.0", # Avoid 5.2.0 due to Avast "aiofiles==0.7.0", # Async IO for files - "blspy==1.0.9", # Signature library - "chiavdf==1.0.5", # timelord and vdf verification + "blspy==1.0.10", # Signature library + "chiavdf==1.0.6", # timelord and vdf verification "chiabip158==1.1", # bip158-style wallet filters - "chiapos==1.0.9", # proof of space + "chiapos==1.0.10", # proof of space "clvm==0.9.7", "clvm_tools==0.4.4", # Currying, Program.to, other conveniences "chia_rs==0.1.1", "clvm-tools-rs==0.1.7", # Rust implementation of clvm_tools - "aiohttp==3.7.4", # HTTP server for full node rpc + "aiohttp==3.8.1", # HTTP server for full node rpc "aiosqlite==0.17.0", # asyncio wrapper for sqlite, to store blocks "bitstring==3.1.9", # Binary data management library "colorama==0.4.4", # Colorizes terminal output @@ -57,6 +57,7 @@ "black==21.12b0", "aiohttp_cors", # For blackd "ipython", # For asyncio debugging + "pyinstaller==4.9", "types-aiofiles", "types-click", "types-cryptography", diff --git a/tests/block_tools.py b/tests/block_tools.py index 079ddde28f9e..6c573dffe364 100644 --- a/tests/block_tools.py +++ b/tests/block_tools.py @@ -50,7 +50,7 @@ from chia.full_node.signage_point import SignagePoint from chia.plotting.util import PlotsRefreshParameter, PlotRefreshResult, PlotRefreshEvents, parse_plot_info from chia.plotting.manager import PlotManager -from chia.server.server import ssl_context_for_server +from chia.server.server import ssl_context_for_client from chia.types.blockchain_format.classgroup import ClassgroupElement from chia.types.blockchain_format.coin import Coin, hash_coin_list from chia.types.blockchain_format.foliage import Foliage, FoliageBlockData, FoliageTransactionBlock, TransactionsInfo @@ -364,7 +364,7 @@ def get_daemon_ssl_context(self) -> ssl.SSLContext: key_path = self.root_path / self.config["daemon_ssl"]["private_key"] ca_cert_path = self.root_path / self.config["private_ssl_ca"]["crt"] ca_key_path = self.root_path / self.config["private_ssl_ca"]["key"] - return ssl_context_for_server(ca_cert_path, ca_key_path, crt_path, key_path) + return ssl_context_for_client(ca_cert_path, ca_key_path, crt_path, key_path) def get_plot_signature(self, m: bytes32, plot_pk: G1Element) -> G2Element: """ @@ -419,6 +419,7 @@ def get_consecutive_blocks( self, num_blocks: int, block_list_input: List[FullBlock] = None, + *, farmer_reward_puzzle_hash: Optional[bytes32] = None, pool_reward_puzzle_hash: Optional[bytes32] = None, transaction_data: Optional[SpendBundle] = None, @@ -427,6 +428,7 @@ def get_consecutive_blocks( force_overflow: bool = False, skip_slots: int = 0, # Force at least this number of empty slots before the first SB guarantee_transaction_block: bool = False, # Force that this block must be a tx block + keep_going_until_tx_block: bool = False, # keep making new blocks until we find a tx block normalized_to_identity_cc_eos: bool = False, normalized_to_identity_icc_eos: bool = False, normalized_to_identity_cc_sp: bool = False, @@ -566,7 +568,8 @@ def get_consecutive_blocks( removals = None if transaction_data_included: transaction_data = None - if transaction_data is not None and not transaction_data_included: + previous_generator = None + if transaction_data is not None: additions = transaction_data.additions() removals = transaction_data.removals() assert start_timestamp is not None @@ -582,9 +585,10 @@ def get_consecutive_blocks( else: pool_target = PoolTarget(self.pool_ph, uint32(0)) + block_generator: Optional[BlockGenerator] if transaction_data is not None: if type(previous_generator) is CompressorArg: - block_generator: Optional[BlockGenerator] = best_solution_generator_from_template( + block_generator = best_solution_generator_from_template( previous_generator, transaction_data ) else: @@ -629,6 +633,8 @@ def get_consecutive_blocks( ) if block_record.is_transaction_block: transaction_data_included = True + previous_generator = None + keep_going_until_tx_block = False else: if guarantee_transaction_block: continue @@ -650,7 +656,7 @@ def get_consecutive_blocks( latest_block = blocks[full_block.header_hash] finished_sub_slots_at_ip = [] num_blocks -= 1 - if num_blocks == 0: + if num_blocks <= 0 and not keep_going_until_tx_block: return block_list # Finish the end of sub-slot and try again next sub-slot @@ -789,7 +795,7 @@ def get_consecutive_blocks( removals = None if transaction_data_included: transaction_data = None - if transaction_data is not None and not transaction_data_included: + if transaction_data is not None: additions = transaction_data.additions() removals = transaction_data.removals() sub_slots_finished += 1 @@ -856,6 +862,8 @@ def get_consecutive_blocks( ) else: block_generator = simple_solution_generator(transaction_data) + if type(previous_generator) is list: + block_generator = BlockGenerator(block_generator.program, [], previous_generator) aggregate_signature = transaction_data.aggregated_signature else: block_generator = None @@ -895,6 +903,8 @@ def get_consecutive_blocks( if block_record.is_transaction_block: transaction_data_included = True + previous_generator = None + keep_going_until_tx_block = False elif guarantee_transaction_block: continue if pending_ses: @@ -911,7 +921,7 @@ def get_consecutive_blocks( blocks_added_this_sub_slot += 1 log.info(f"Created block {block_record.height } ov=True, iters " f"{block_record.total_iters}") num_blocks -= 1 - if num_blocks == 0: + if num_blocks <= 0 and not keep_going_until_tx_block: return block_list blocks[full_block.header_hash] = block_record diff --git a/tests/blockchain/test_blockchain.py b/tests/blockchain/test_blockchain.py index f6d9852bc85f..c413b6440329 100644 --- a/tests/blockchain/test_blockchain.py +++ b/tests/blockchain/test_blockchain.py @@ -3280,7 +3280,13 @@ async def test_reorg_flip_flop(empty_blockchain, bt): ) spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, all_coins.pop()) - chain_a = bt.get_consecutive_blocks(5, chain_a, previous_generator=[uint32(10)], transaction_data=spend_bundle) + chain_a = bt.get_consecutive_blocks( + 5, + chain_a, + previous_generator=[uint32(10)], + transaction_data=spend_bundle, + guarantee_transaction_block=True, + ) spend_bundle = wallet_a.generate_signed_transaction(1000, receiver_puzzlehash, all_coins.pop()) chain_a = bt.get_consecutive_blocks( diff --git a/tests/clvm/test_program.py b/tests/clvm/test_program.py index 5960e7396aa0..76aa5f639f05 100644 --- a/tests/clvm/test_program.py +++ b/tests/clvm/test_program.py @@ -2,6 +2,9 @@ from chia.types.blockchain_format.program import Program from clvm.EvalError import EvalError +from clvm_tools.curry import uncurry +from clvm.operators import KEYWORD_TO_ATOM +from clvm_tools.binutils import assemble, disassemble class TestProgram(TestCase): @@ -19,3 +22,28 @@ def test_at(self): self.assertRaises(ValueError, lambda: p.at("q")) self.assertRaises(EvalError, lambda: p.at("ff")) + + +def check_idempotency(f, *args): + prg = Program.to(f) + curried = prg.curry(*args) + + r = disassemble(curried) + f_0, args_0 = uncurry(curried) + + assert disassemble(f_0) == disassemble(f) + assert disassemble(args_0) == disassemble(Program.to(list(args))) + return r + + +def test_curry_uncurry(): + PLUS = KEYWORD_TO_ATOM["+"][0] + f = assemble("(+ 2 5)") + actual_disassembly = check_idempotency(f, 200, 30) + assert actual_disassembly == f"(a (q {PLUS} 2 5) (c (q . 200) (c (q . 30) 1)))" + + f = assemble("(+ 2 5)") + args = assemble("(+ (q . 50) (q . 60))") + # passing "args" here wraps the arguments in a list + actual_disassembly = check_idempotency(f, args) + assert actual_disassembly == f"(a (q {PLUS} 2 5) (c (q {PLUS} (q . 50) (q . 60)) 1))" diff --git a/tests/core/daemon/test_daemon.py b/tests/core/daemon/test_daemon.py index 28d4226d5cd7..6218c6933a51 100644 --- a/tests/core/daemon/test_daemon.py +++ b/tests/core/daemon/test_daemon.py @@ -85,9 +85,6 @@ async def reader(ws, queue): read_handler.cancel() assert blockchain_state_found - # Suppress warning: "The explicit passing of coroutine objects to asyncio.wait() is deprecated since Python 3.8..." - # Can be removed when we upgrade to a newer version of websockets (9.1 works) - @pytest.mark.filterwarnings("ignore::DeprecationWarning:websockets.*") @pytest.mark.asyncio async def test_validate_keyring_passphrase_rpc(self, get_daemon_with_temp_keyring): local_b_tools: BlockTools = get_daemon_with_temp_keyring[0] @@ -169,9 +166,6 @@ async def check_empty_passphrase_case(response: aiohttp.http_websocket.WSMessage # Expect: validation failure await check_empty_passphrase_case(await ws.receive()) - # Suppress warning: "The explicit passing of coroutine objects to asyncio.wait() is deprecated since Python 3.8..." - # Can be removed when we upgrade to a newer version of websockets (9.1 works) - @pytest.mark.filterwarnings("ignore::DeprecationWarning:websockets.*") @pytest.mark.asyncio async def test_add_private_key(self, get_daemon_with_temp_keyring): local_b_tools: BlockTools = get_daemon_with_temp_keyring[0] diff --git a/tests/core/full_node/stores/test_coin_store.py b/tests/core/full_node/stores/test_coin_store.py index 169ce66210f4..47c27bae6b29 100644 --- a/tests/core/full_node/stores/test_coin_store.py +++ b/tests/core/full_node/stores/test_coin_store.py @@ -23,7 +23,6 @@ from chia.types.blockchain_format.sized_bytes import bytes32 from tests.util.db_connection import DBConnection - constants = test_constants WALLET_A = WalletTool(constants) @@ -238,36 +237,69 @@ async def test_rollback(self, cache_size: uint32, db_version, bt): async with DBConnection(db_version) as db_wrapper: coin_store = await CoinStore.create(db_wrapper, cache_size=uint32(cache_size)) - records: List[CoinRecord] = [] + selected_coin: Optional[CoinRecord] = None + all_coins: List[Coin] = [] for block in blocks: + all_coins += list(block.get_included_reward_coins()) if block.is_transaction_block(): removals: List[bytes32] = [] additions: List[Coin] = [] + assert block.foliage_transaction_block is not None + await coin_store.new_block( + block.height, + block.foliage_transaction_block.timestamp, + block.get_included_reward_coins(), + additions, + removals, + ) + coins = list(block.get_included_reward_coins()) + records: List[CoinRecord] = [await coin_store.get_coin_record(coin.name()) for coin in coins] + + spend_selected_coin = selected_coin is not None + if block.height != 0 and selected_coin is None: + # Select the first CoinRecord which will be spent at the next transaction block. + selected_coin = records[0] + await coin_store._set_spent([r.name for r in records[1:]], block.height) + else: + await coin_store._set_spent([r.name for r in records], block.height) - if block.is_transaction_block(): - assert block.foliage_transaction_block is not None - await coin_store.new_block( - block.height, - block.foliage_transaction_block.timestamp, - block.get_included_reward_coins(), - additions, - removals, - ) - - coins = block.get_included_reward_coins() - records = [await coin_store.get_coin_record(coin.name()) for coin in coins] - - await coin_store._set_spent([r.name for r in records], block.height) + if spend_selected_coin: + assert selected_coin is not None + await coin_store._set_spent([selected_coin.name], block.height) - records = [await coin_store.get_coin_record(coin.name()) for coin in coins] + records = [await coin_store.get_coin_record(coin.name()) for coin in coins] # update coin records for record in records: assert record is not None - assert record.spent - assert record.spent_block_index == block.height + if ( + selected_coin is not None + and selected_coin.name == record.name + and not selected_coin.confirmed_block_index < block.height + ): + assert not record.spent + else: + assert record.spent + assert record.spent_block_index == block.height + + if spend_selected_coin: + break - reorg_index = 8 - await coin_store.rollback_to_block(reorg_index) + assert selected_coin is not None + reorg_index = selected_coin.confirmed_block_index + + # Get all CoinRecords. + all_records: List[CoinRecord] = [await coin_store.get_coin_record(coin.name()) for coin in all_coins] + + # The reorg will revert the creation and spend of many coins. It will also revert the spend (but not the + # creation) of the selected coin. + changed_records = await coin_store.rollback_to_block(reorg_index) + changed_coin_records = [cr.coin for cr in changed_records] + assert selected_coin in changed_records + for coin_record in all_records: + if coin_record.confirmed_block_index > reorg_index: + assert coin_record.coin in changed_coin_records + if coin_record.spent_block_index > reorg_index: + assert coin_record.coin in changed_coin_records for block in blocks: if block.is_transaction_block(): @@ -277,7 +309,7 @@ async def test_rollback(self, cache_size: uint32, db_version, bt): if block.height <= reorg_index: for record in records: assert record is not None - assert record.spent + assert record.spent == (record.name != selected_coin.name) else: for record in records: assert record is None diff --git a/tests/core/full_node/test_mempool.py b/tests/core/full_node/test_mempool.py index a05f5b4e3392..0d9f4c7e96b7 100644 --- a/tests/core/full_node/test_mempool.py +++ b/tests/core/full_node/test_mempool.py @@ -642,14 +642,15 @@ async def test_correct_block_index(self, bt, one_node_one_block, wallet_a): async def test_block_index_garbage(self, bt, one_node_one_block, wallet_a): full_node_1, server_1 = one_node_one_block - # garbage at the end of the argument list is ignored + # garbage at the end of the argument list is ignored in consensus mode, + # but not in mempool-mode cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, [int_to_bytes(1), b"garbage"]) dic = {ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE: [cvp]} blocks, spend_bundle1, peer, status, err = await self.condition_tester(bt, one_node_one_block, wallet_a, dic) sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) - assert err is None - assert sb1 is spend_bundle1 - assert status == MempoolInclusionStatus.SUCCESS + assert err is Err.INVALID_CONDITION + assert sb1 is None + assert status == MempoolInclusionStatus.FAILED @pytest.mark.asyncio async def test_negative_block_index(self, bt, one_node_one_block, wallet_a): @@ -708,7 +709,8 @@ async def test_correct_block_age(self, bt, one_node_one_block, wallet_a): async def test_block_age_garbage(self, bt, one_node_one_block, wallet_a): full_node_1, server_1 = one_node_one_block - # garbage at the end of the argument list is ignored + # garbage at the end of the argument list is ignored in consensus mode, + # but not in mempool mode cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, [int_to_bytes(1), b"garbage"]) dic = {cvp.opcode: [cvp]} blocks, spend_bundle1, peer, status, err = await self.condition_tester( @@ -716,9 +718,9 @@ async def test_block_age_garbage(self, bt, one_node_one_block, wallet_a): ) sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) - assert err is None - assert sb1 is spend_bundle1 - assert status == MempoolInclusionStatus.SUCCESS + assert err is Err.INVALID_CONDITION + assert sb1 is None + assert status == MempoolInclusionStatus.FAILED @pytest.mark.asyncio async def test_negative_block_age(self, bt, one_node_one_block, wallet_a): @@ -762,7 +764,8 @@ async def test_my_id_garbage(self, bt, one_node_one_block, wallet_a): _ = await next_block(full_node_1, wallet_a, bt) _ = await next_block(full_node_1, wallet_a, bt) coin = await next_block(full_node_1, wallet_a, bt) - # garbage at the end of the argument list is ignored + # garbage at the end of the argument list is ignored in consensus mode, + # but not in mempool mode cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_COIN_ID, [coin.name(), b"garbage"]) dic = {cvp.opcode: [cvp]} blocks, spend_bundle1, peer, status, err = await self.condition_tester( @@ -770,9 +773,9 @@ async def test_my_id_garbage(self, bt, one_node_one_block, wallet_a): ) sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) - assert err is None - assert sb1 is spend_bundle1 - assert status == MempoolInclusionStatus.SUCCESS + assert err is Err.INVALID_CONDITION + assert sb1 is None + assert status == MempoolInclusionStatus.FAILED @pytest.mark.asyncio async def test_invalid_my_id(self, bt, one_node_one_block, wallet_a): @@ -885,14 +888,15 @@ async def test_assert_time_garbage(self, bt, one_node_one_block, wallet_a): full_node_1, server_1 = one_node_one_block time_now = full_node_1.full_node.blockchain.get_peak().timestamp + 5 - # garbage at the end of the argument list is ignored + # garbage at the end of the argument list is ignored in consensus mode, + # but not in mempool mode cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_ABSOLUTE, [int_to_bytes(time_now), b"garbage"]) dic = {cvp.opcode: [cvp]} blocks, spend_bundle1, peer, status, err = await self.condition_tester(bt, one_node_one_block, wallet_a, dic) sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) - assert err is None - assert sb1 is spend_bundle1 - assert status == MempoolInclusionStatus.SUCCESS + assert err is Err.INVALID_CONDITION + assert sb1 is None + assert status == MempoolInclusionStatus.FAILED @pytest.mark.asyncio async def test_assert_time_relative_exceeds(self, bt, one_node_one_block, wallet_a): @@ -927,15 +931,16 @@ async def test_assert_time_relative_garbage(self, bt, one_node_one_block, wallet full_node_1, server_1 = one_node_one_block time_relative = 0 - # garbage at the end of the arguments is ignored + # garbage at the end of the arguments is ignored in consensus mode, but + # not in mempool mode cvp = ConditionWithArgs(ConditionOpcode.ASSERT_SECONDS_RELATIVE, [int_to_bytes(time_relative), b"garbage"]) dic = {cvp.opcode: [cvp]} blocks, spend_bundle1, peer, status, err = await self.condition_tester(bt, one_node_one_block, wallet_a, dic) sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) - assert err is None - assert sb1 is spend_bundle1 - assert status == MempoolInclusionStatus.SUCCESS + assert err is Err.INVALID_CONDITION + assert sb1 is None + assert status == MempoolInclusionStatus.FAILED @pytest.mark.asyncio async def test_assert_time_relative_missing_arg(self, bt, one_node_one_block, wallet_a): @@ -990,17 +995,34 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: assert status == MempoolInclusionStatus.SUCCESS # ensure one spend can assert a coin announcement from another spend, even - # though the conditions have garbage (ignored) at the end + # though the conditions have garbage at the end @pytest.mark.asyncio - async def test_coin_announcement_garbage(self, bt, one_node_one_block, wallet_a): + @pytest.mark.parametrize( + "assert_garbage,announce_garbage,expected,expected_included", + [ + (True, False, Err.INVALID_CONDITION, MempoolInclusionStatus.FAILED), + (False, True, Err.INVALID_CONDITION, MempoolInclusionStatus.FAILED), + (False, False, None, MempoolInclusionStatus.SUCCESS), + ], + ) + async def test_coin_announcement_garbage( + self, assert_garbage, announce_garbage, expected, expected_included, bt, one_node_one_block, wallet_a + ): def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = Announcement(coin_2.name(), b"test") - # garbage at the end is ignored - cvp = ConditionWithArgs(ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, [announce.name(), b"garbage"]) + # garbage at the end is ignored in consensus mode, but not in + # mempool mode + cvp = ConditionWithArgs( + ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, + [bytes(announce.name())] + ([b"garbage"] if announce_garbage else []), + ) dic = {cvp.opcode: [cvp]} - # garbage at the end is ignored - cvp2 = ConditionWithArgs(ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, [b"test", b"garbage"]) + # garbage at the end is ignored in consensus mode, but not in + # mempool mode + cvp2 = ConditionWithArgs( + ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, [b"test"] + ([b"garbage"] if assert_garbage else []) + ) dic2 = {cvp.opcode: [cvp2]} spend_bundle1 = generate_test_spend_bundle(wallet_a, coin_1, dic) spend_bundle2 = generate_test_spend_bundle(wallet_a, coin_2, dic2) @@ -1009,11 +1031,9 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: full_node_1, server_1 = one_node_one_block blocks, bundle, status, err = await self.condition_tester2(bt, one_node_one_block, wallet_a, test_fun) - mempool_bundle = full_node_1.full_node.mempool_manager.get_spendbundle(bundle.name()) - assert err is None - assert mempool_bundle is bundle - assert status == MempoolInclusionStatus.SUCCESS + assert err is expected + assert status == expected_included @pytest.mark.asyncio async def test_coin_announcement_missing_arg(self, bt, one_node_one_block, wallet_a): @@ -1175,17 +1195,34 @@ def test_fun(coin_1: Coin, coin_2: Coin): assert status == MempoolInclusionStatus.SUCCESS @pytest.mark.asyncio - async def test_puzzle_announcement_garbage(self, bt, one_node_one_block, wallet_a): + @pytest.mark.parametrize( + "assert_garbage,announce_garbage,expected,expected_included", + [ + (True, False, Err.INVALID_CONDITION, MempoolInclusionStatus.FAILED), + (False, True, Err.INVALID_CONDITION, MempoolInclusionStatus.FAILED), + (False, False, None, MempoolInclusionStatus.SUCCESS), + ], + ) + async def test_puzzle_announcement_garbage( + self, assert_garbage, announce_garbage, expected, expected_included, bt, one_node_one_block, wallet_a + ): full_node_1, server_1 = one_node_one_block def test_fun(coin_1: Coin, coin_2: Coin): announce = Announcement(coin_2.puzzle_hash, bytes(0x80)) - # garbage at the end is ignored - cvp = ConditionWithArgs(ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, [announce.name(), b"garbage"]) + # garbage at the end is ignored in consensus mode, but not in + # mempool mode + cvp = ConditionWithArgs( + ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, + [bytes(announce.name())] + ([b"garbage"] if assert_garbage else []), + ) dic = {cvp.opcode: [cvp]} - # garbage at the end is ignored - cvp2 = ConditionWithArgs(ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, [bytes(0x80), b"garbage"]) + # garbage at the end is ignored in consensus mode, but not in + # mempool mode + cvp2 = ConditionWithArgs( + ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, [bytes(0x80)] + ([b"garbage"] if announce_garbage else []) + ) dic2 = {cvp.opcode: [cvp2]} spend_bundle1 = generate_test_spend_bundle(wallet_a, coin_1, dic) spend_bundle2 = generate_test_spend_bundle(wallet_a, coin_2, dic2) @@ -1193,11 +1230,9 @@ def test_fun(coin_1: Coin, coin_2: Coin): return SpendBundle.aggregate([spend_bundle1, spend_bundle2]) blocks, bundle, status, err = await self.condition_tester2(bt, one_node_one_block, wallet_a, test_fun) - mempool_bundle = full_node_1.full_node.mempool_manager.get_spendbundle(bundle.name()) - assert err is None - assert mempool_bundle is bundle - assert status == MempoolInclusionStatus.SUCCESS + assert err is expected + assert status == expected_included @pytest.mark.asyncio async def test_puzzle_announcement_missing_arg(self, bt, one_node_one_block, wallet_a): @@ -1330,7 +1365,8 @@ async def test_assert_fee_condition(self, bt, one_node_one_block, wallet_a): async def test_assert_fee_condition_garbage(self, bt, one_node_one_block, wallet_a): full_node_1, server_1 = one_node_one_block - # garbage at the end of the arguments is ignored + # garbage at the end of the arguments is ignored in consensus mode, but + # not in mempool mode cvp = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, [int_to_bytes(10), b"garbage"]) dic = {cvp.opcode: [cvp]} blocks, spend_bundle1, peer, status, err = await self.condition_tester( @@ -1338,9 +1374,9 @@ async def test_assert_fee_condition_garbage(self, bt, one_node_one_block, wallet ) mempool_bundle = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) - assert err is None - assert mempool_bundle is not None - assert status == MempoolInclusionStatus.SUCCESS + assert err is Err.INVALID_CONDITION + assert mempool_bundle is None + assert status == MempoolInclusionStatus.FAILED @pytest.mark.asyncio async def test_assert_fee_condition_missing_arg(self, bt, one_node_one_block, wallet_a): @@ -1583,7 +1619,8 @@ async def test_my_parent_garbage(self, bt, one_node_one_block, wallet_a): _ = await next_block(full_node_1, wallet_a, bt) _ = await next_block(full_node_1, wallet_a, bt) coin = await next_block(full_node_1, wallet_a, bt) - # garbage at the end of the arguments list is allowed but stripped + # garbage at the end of the arguments list is allowed in consensus mode, + # but not in mempool mode cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_PARENT_ID, [coin.parent_coin_info, b"garbage"]) dic = {cvp.opcode: [cvp]} blocks, spend_bundle1, peer, status, err = await self.condition_tester( @@ -1592,9 +1629,9 @@ async def test_my_parent_garbage(self, bt, one_node_one_block, wallet_a): sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) - assert err is None - assert sb1 is spend_bundle1 - assert status == MempoolInclusionStatus.SUCCESS + assert err is Err.INVALID_CONDITION + assert sb1 is None + assert status == MempoolInclusionStatus.FAILED @pytest.mark.asyncio async def test_my_parent_missing_arg(self, bt, one_node_one_block, wallet_a): @@ -1669,9 +1706,9 @@ async def test_my_puzhash_garbage(self, bt, one_node_one_block, wallet_a): sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) - assert err is None - assert sb1 is spend_bundle1 - assert status == MempoolInclusionStatus.SUCCESS + assert err is Err.INVALID_CONDITION + assert sb1 is None + assert status == MempoolInclusionStatus.FAILED @pytest.mark.asyncio async def test_my_puzhash_missing_arg(self, bt, one_node_one_block, wallet_a): @@ -1736,7 +1773,8 @@ async def test_my_amount_garbage(self, bt, one_node_one_block, wallet_a): _ = await next_block(full_node_1, wallet_a, bt) _ = await next_block(full_node_1, wallet_a, bt) coin = await next_block(full_node_1, wallet_a, bt) - # garbage at the end of the arguments list is allowed but stripped + # garbage at the end of the arguments list is allowed in consensus mode, + # but not in mempool mode cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_AMOUNT, [int_to_bytes(coin.amount), b"garbage"]) dic = {cvp.opcode: [cvp]} blocks, spend_bundle1, peer, status, err = await self.condition_tester( @@ -1745,9 +1783,9 @@ async def test_my_amount_garbage(self, bt, one_node_one_block, wallet_a): sb1 = full_node_1.full_node.mempool_manager.get_spendbundle(spend_bundle1.name()) - assert err is None - assert sb1 is spend_bundle1 - assert status == MempoolInclusionStatus.SUCCESS + assert err is Err.INVALID_CONDITION + assert sb1 is None + assert status == MempoolInclusionStatus.FAILED @pytest.mark.asyncio async def test_my_amount_missing_arg(self, bt, one_node_one_block, wallet_a): diff --git a/tests/core/test_farmer_harvester_rpc.py b/tests/core/test_farmer_harvester_rpc.py index e312698fe007..df22f0237f39 100644 --- a/tests/core/test_farmer_harvester_rpc.py +++ b/tests/core/test_farmer_harvester_rpc.py @@ -17,7 +17,7 @@ from chia.util.config import load_config, lock_and_load_config, save_config from chia.util.hash import std_hash from chia.util.ints import uint8, uint16, uint32, uint64 -from chia.wallet.derive_keys import master_sk_to_wallet_sk +from chia.wallet.derive_keys import master_sk_to_wallet_sk, master_sk_to_wallet_sk_unhardened from tests.setup_nodes import setup_harvester_farmer, test_constants from tests.time_out_assert import time_out_assert, time_out_assert_custom_interval from tests.util.rpc import validate_get_routes @@ -181,36 +181,48 @@ async def test_farmer_reward_target_endpoints(bt, harvester_farmer_environment): targets_1 = await farmer_rpc_client.get_reward_targets(False) assert "have_pool_sk" not in targets_1 assert "have_farmer_sk" not in targets_1 - targets_2 = await farmer_rpc_client.get_reward_targets(True) + targets_2 = await farmer_rpc_client.get_reward_targets(True, 2) assert targets_2["have_pool_sk"] and targets_2["have_farmer_sk"] - new_ph: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.farmer_master_sk, uint32(10)).get_g1()) - new_ph_2: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.pool_master_sk, uint32(472)).get_g1()) + new_ph: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.farmer_master_sk, uint32(2)).get_g1()) + new_ph_2: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.pool_master_sk, uint32(7)).get_g1()) await farmer_rpc_client.set_reward_targets(encode_puzzle_hash(new_ph, "xch"), encode_puzzle_hash(new_ph_2, "xch")) - targets_3 = await farmer_rpc_client.get_reward_targets(True) + targets_3 = await farmer_rpc_client.get_reward_targets(True, 10) assert decode_puzzle_hash(targets_3["farmer_target"]) == new_ph assert decode_puzzle_hash(targets_3["pool_target"]) == new_ph_2 assert targets_3["have_pool_sk"] and targets_3["have_farmer_sk"] - new_ph_3: bytes32 = create_puzzlehash_for_pk(master_sk_to_wallet_sk(bt.pool_master_sk, uint32(1888)).get_g1()) - await farmer_rpc_client.set_reward_targets(None, encode_puzzle_hash(new_ph_3, "xch")) - targets_4 = await farmer_rpc_client.get_reward_targets(True) - assert decode_puzzle_hash(targets_4["farmer_target"]) == new_ph - assert decode_puzzle_hash(targets_4["pool_target"]) == new_ph_3 - assert not targets_4["have_pool_sk"] and targets_3["have_farmer_sk"] + # limit the derivation search to 3 should fail to find the pool sk + targets_4 = await farmer_rpc_client.get_reward_targets(True, 3) + assert not targets_4["have_pool_sk"] and targets_4["have_farmer_sk"] + + # check observer addresses + observer_farmer: bytes32 = create_puzzlehash_for_pk( + master_sk_to_wallet_sk_unhardened(bt.farmer_master_sk, uint32(2)).get_g1() + ) + observer_pool: bytes32 = create_puzzlehash_for_pk( + master_sk_to_wallet_sk_unhardened(bt.pool_master_sk, uint32(7)).get_g1() + ) + await farmer_rpc_client.set_reward_targets( + encode_puzzle_hash(observer_farmer, "xch"), encode_puzzle_hash(observer_pool, "xch") + ) + targets = await farmer_rpc_client.get_reward_targets(True, 10) + assert decode_puzzle_hash(targets["farmer_target"]) == observer_farmer + assert decode_puzzle_hash(targets["pool_target"]) == observer_pool + assert targets["have_pool_sk"] and targets["have_farmer_sk"] root_path = farmer_api.farmer._root_path config = load_config(root_path, "config.yaml") - assert config["farmer"]["xch_target_address"] == encode_puzzle_hash(new_ph, "xch") - assert config["pool"]["xch_target_address"] == encode_puzzle_hash(new_ph_3, "xch") + assert config["farmer"]["xch_target_address"] == encode_puzzle_hash(observer_farmer, "xch") + assert config["pool"]["xch_target_address"] == encode_puzzle_hash(observer_pool, "xch") - new_ph_3_encoded = encode_puzzle_hash(new_ph_3, "xch") - added_char = new_ph_3_encoded + "a" + new_ph_2_encoded = encode_puzzle_hash(new_ph_2, "xch") + added_char = new_ph_2_encoded + "a" with pytest.raises(ValueError): await farmer_rpc_client.set_reward_targets(None, added_char) - replaced_char = new_ph_3_encoded[0:-1] + "a" + replaced_char = new_ph_2_encoded[0:-1] + "a" with pytest.raises(ValueError): await farmer_rpc_client.set_reward_targets(None, replaced_char) diff --git a/tests/core/util/test_config.py b/tests/core/util/test_config.py index e2493f8c861a..c1c80ce51dc6 100644 --- a/tests/core/util/test_config.py +++ b/tests/core/util/test_config.py @@ -138,7 +138,7 @@ def run_reader_and_writer_tasks(root_path: Path, default_config: Dict): Subprocess entry point. This function spins-off threads to perform read/write tasks concurrently, possibly leading to synchronization issues accessing config data. """ - asyncio.get_event_loop().run_until_complete(create_reader_and_writer_tasks(root_path, default_config)) + asyncio.run(create_reader_and_writer_tasks(root_path, default_config)) @pytest.fixture(scope="function") diff --git a/tests/core/util/test_streamable.py b/tests/core/util/test_streamable.py index 65b3255212ee..28b0a3ba6644 100644 --- a/tests/core/util/test_streamable.py +++ b/tests/core/util/test_streamable.py @@ -1,10 +1,13 @@ +from __future__ import annotations + +import io from dataclasses import dataclass from typing import Dict, List, Optional, Tuple -import io -import pytest +import pytest from clvm_tools import binutils from pytest import raises +from typing_extensions import Literal from chia.protocols.wallet_protocol import RespondRemovals from chia.types.blockchain_format.coin import Coin @@ -16,19 +19,20 @@ from chia.util.streamable import ( DefinitionError, Streamable, - streamable, + is_type_List, + is_type_SpecificOptional, parse_bool, - parse_uint32, - write_uint32, - parse_optional, parse_bytes, parse_list, - parse_tuple, + parse_optional, parse_size_hints, parse_str, - is_type_List, - is_type_SpecificOptional, + parse_tuple, + parse_uint32, + streamable, + write_uint32, ) +from tests.block_tools import BlockTools from tests.setup_nodes import test_constants @@ -59,22 +63,26 @@ class TestClassDict(Streamable): a: Dict[str, str] +@dataclass(frozen=True) +class DataclassOnly: + a: uint8 + + def test_pure_dataclass_not_supported() -> None: - @dataclass(frozen=True) - class DataClassOnly: - a: uint8 with raises(NotImplementedError): @streamable @dataclass(frozen=True) class TestClassDataclass(Streamable): - a: DataClassOnly + a: DataclassOnly + + +class PlainClass: + a: uint8 def test_plain_class_not_supported() -> None: - class PlainClass: - a: uint8 with raises(NotImplementedError): @@ -84,74 +92,81 @@ class TestClassPlain(Streamable): a: PlainClass -def test_basic_list(): +def test_basic_list() -> None: a = [1, 2, 3] assert is_type_List(type(a)) assert is_type_List(List) assert is_type_List(List[int]) assert is_type_List(List[uint8]) assert is_type_List(list) - assert not is_type_List(Tuple) + assert not is_type_List(type(Tuple)) assert not is_type_List(tuple) assert not is_type_List(dict) -def test_not_lists(): +def test_not_lists() -> None: assert not is_type_List(Dict) -def test_basic_optional(): +def test_basic_optional() -> None: assert is_type_SpecificOptional(Optional[int]) assert is_type_SpecificOptional(Optional[Optional[int]]) assert not is_type_SpecificOptional(List[int]) -def test_StrictDataClass(): +def test_StrictDataClass() -> None: @streamable @dataclass(frozen=True) class TestClass1(Streamable): a: uint8 b: str - good: TestClass1 = TestClass1(24, "!@12") + # we want to test invalid here, hence the ignore. + good: TestClass1 = TestClass1(24, "!@12") # type: ignore[arg-type] assert TestClass1.__name__ == "TestClass1" assert good assert good.a == 24 assert good.b == "!@12" - good2 = TestClass1(52, bytes([1, 2, 3])) + # we want to test invalid here, hence the ignore. + good2 = TestClass1(52, bytes([1, 2, 3])) # type: ignore[arg-type] assert good2.b == str(bytes([1, 2, 3])) -def test_StrictDataClassBad(): +def test_StrictDataClassBad() -> None: @streamable @dataclass(frozen=True) class TestClass2(Streamable): a: uint8 b = 0 - assert TestClass2(25) + # we want to test invalid here, hence the ignore. + assert TestClass2(25) # type: ignore[arg-type] + # we want to test invalid here, hence the ignore. with raises(TypeError): - TestClass2(1, 2) # pylint: disable=too-many-function-args + TestClass2(1, 2) # type: ignore[call-arg,arg-type] # pylint: disable=too-many-function-args -def test_StrictDataClassLists(): +def test_StrictDataClassLists() -> None: @streamable @dataclass(frozen=True) class TestClass(Streamable): a: List[uint8] b: List[List[uint8]] - assert TestClass([1, 2, 3], [[uint8(200), uint8(25)], [uint8(25)]]) + # we want to test invalid here, hence the ignore. + assert TestClass([1, 2, 3], [[uint8(200), uint8(25)], [uint8(25)]]) # type: ignore[list-item] + # we want to test invalid here, hence the ignore. with raises(ValueError): - TestClass({"1": 1}, [[uint8(200), uint8(25)], [uint8(25)]]) + TestClass({"1": 1}, [[uint8(200), uint8(25)], [uint8(25)]]) # type: ignore[arg-type] + # we want to test invalid here, hence the ignore. with raises(ValueError): - TestClass([1, 2, 3], [uint8(200), uint8(25)]) + TestClass([1, 2, 3], [uint8(200), uint8(25)]) # type: ignore[list-item] -def test_StrictDataClassOptional(): +def test_StrictDataClassOptional() -> None: @streamable @dataclass(frozen=True) class TestClass(Streamable): @@ -160,11 +175,12 @@ class TestClass(Streamable): c: Optional[Optional[uint8]] d: Optional[Optional[uint8]] - good = TestClass(12, None, 13, None) + # we want to test invalid here, hence the ignore. + good = TestClass(12, None, 13, None) # type: ignore[arg-type] assert good -def test_basic(): +def test_basic() -> None: @streamable @dataclass(frozen=True) class TestClass(Streamable): @@ -176,13 +192,14 @@ class TestClass(Streamable): f: Optional[uint32] g: Tuple[uint32, str, bytes] - a = TestClass(24, 352, [1, 2, 4], [[1, 2, 3], [3, 4]], 728, None, (383, "hello", b"goodbye")) + # we want to test invalid here, hence the ignore. + a = TestClass(24, 352, [1, 2, 4], [[1, 2, 3], [3, 4]], 728, None, (383, "hello", b"goodbye")) # type: ignore[arg-type,list-item] # noqa: E501 b: bytes = bytes(a) assert a == TestClass.from_bytes(b) -def test_variable_size(): +def test_variable_size() -> None: @streamable @dataclass(frozen=True) class TestClass2(Streamable): @@ -201,7 +218,7 @@ class TestClass3(Streamable): a: int -def test_json(bt): +def test_json(bt: BlockTools) -> None: block = bt.create_genesis_block(test_constants, bytes32([0] * 32), uint64(0)) dict_block = block.to_json_dict() assert FullBlock.from_json_dict(dict_block) == block @@ -226,42 +243,44 @@ class OptionalTestClass(Streamable): (None, None, None), ], ) -def test_optional_json(a: Optional[str], b: Optional[bool], c: Optional[List[Optional[str]]]): +def test_optional_json(a: Optional[str], b: Optional[bool], c: Optional[List[Optional[str]]]) -> None: obj: OptionalTestClass = OptionalTestClass.from_json_dict({"a": a, "b": b, "c": c}) assert obj.a == a assert obj.b == b assert obj.c == c -def test_recursive_json(): - @streamable - @dataclass(frozen=True) - class TestClass1(Streamable): - a: List[uint32] +@streamable +@dataclass(frozen=True) +class TestClassRecursive1(Streamable): + a: List[uint32] + + +@streamable +@dataclass(frozen=True) +class TestClassRecursive2(Streamable): + a: uint32 + b: List[Optional[List[TestClassRecursive1]]] + c: bytes32 - @streamable - @dataclass(frozen=True) - class TestClass2(Streamable): - a: uint32 - b: List[Optional[List[TestClass1]]] - c: bytes32 - tc1_a = TestClass1([uint32(1), uint32(2)]) - tc1_b = TestClass1([uint32(4), uint32(5)]) - tc1_c = TestClass1([uint32(7), uint32(8)]) +def test_recursive_json() -> None: + tc1_a = TestClassRecursive1([uint32(1), uint32(2)]) + tc1_b = TestClassRecursive1([uint32(4), uint32(5)]) + tc1_c = TestClassRecursive1([uint32(7), uint32(8)]) - tc2 = TestClass2(uint32(5), [[tc1_a], [tc1_b, tc1_c], None], bytes32(bytes([1] * 32))) - assert TestClass2.from_json_dict(tc2.to_json_dict()) == tc2 + tc2 = TestClassRecursive2(uint32(5), [[tc1_a], [tc1_b, tc1_c], None], bytes32(bytes([1] * 32))) + assert TestClassRecursive2.from_json_dict(tc2.to_json_dict()) == tc2 -def test_recursive_types(): +def test_recursive_types() -> None: coin: Optional[Coin] = None l1 = [(bytes32([2] * 32), coin)] rr = RespondRemovals(uint32(1), bytes32([1] * 32), l1, None) RespondRemovals(rr.height, rr.header_hash, rr.coins, rr.proofs) -def test_ambiguous_deserialization_optionals(): +def test_ambiguous_deserialization_optionals() -> None: with raises(AssertionError): SubEpochChallengeSegment.from_bytes(b"\x00\x00\x00\x03\xff\xff\xff\xff") @@ -278,7 +297,7 @@ class TestClassOptional(Streamable): TestClassOptional.from_bytes(bytes([1, 2])) -def test_ambiguous_deserialization_int(): +def test_ambiguous_deserialization_int() -> None: @streamable @dataclass(frozen=True) class TestClassUint(Streamable): @@ -289,7 +308,7 @@ class TestClassUint(Streamable): TestClassUint.from_bytes(b"\x00\x00") -def test_ambiguous_deserialization_list(): +def test_ambiguous_deserialization_list() -> None: @streamable @dataclass(frozen=True) class TestClassList(Streamable): @@ -300,7 +319,7 @@ class TestClassList(Streamable): TestClassList.from_bytes(bytes([0, 0, 100, 24])) -def test_ambiguous_deserialization_tuple(): +def test_ambiguous_deserialization_tuple() -> None: @streamable @dataclass(frozen=True) class TestClassTuple(Streamable): @@ -311,7 +330,7 @@ class TestClassTuple(Streamable): TestClassTuple.from_bytes(bytes([0, 0, 100, 24])) -def test_ambiguous_deserialization_str(): +def test_ambiguous_deserialization_str() -> None: @streamable @dataclass(frozen=True) class TestClassStr(Streamable): @@ -322,7 +341,7 @@ class TestClassStr(Streamable): TestClassStr.from_bytes(bytes([0, 0, 100, 24, 52])) -def test_ambiguous_deserialization_bytes(): +def test_ambiguous_deserialization_bytes() -> None: @streamable @dataclass(frozen=True) class TestClassBytes(Streamable): @@ -339,7 +358,7 @@ class TestClassBytes(Streamable): TestClassBytes.from_bytes(bytes([0, 0, 0, 2, 52, 21])) -def test_ambiguous_deserialization_bool(): +def test_ambiguous_deserialization_bool() -> None: @streamable @dataclass(frozen=True) class TestClassBool(Streamable): @@ -353,13 +372,13 @@ class TestClassBool(Streamable): TestClassBool.from_bytes(bytes([1])) -def test_ambiguous_deserialization_program(): +def test_ambiguous_deserialization_program() -> None: @streamable @dataclass(frozen=True) class TestClassProgram(Streamable): a: Program - program = Program.to(binutils.assemble("()")) + program = Program.to(binutils.assemble("()")) # type: ignore[no-untyped-call] # TODO, add typing in clvm_tools TestClassProgram.from_bytes(bytes(program)) @@ -367,7 +386,7 @@ class TestClassProgram(Streamable): TestClassProgram.from_bytes(bytes(program) + b"9") -def test_streamable_empty(): +def test_streamable_empty() -> None: @streamable @dataclass(frozen=True) class A(Streamable): @@ -376,7 +395,7 @@ class A(Streamable): assert A.from_bytes(bytes(A())) == A() -def test_parse_bool(): +def test_parse_bool() -> None: assert not parse_bool(io.BytesIO(b"\x00")) assert parse_bool(io.BytesIO(b"\x01")) @@ -391,7 +410,7 @@ def test_parse_bool(): parse_bool(io.BytesIO(b"\x02")) -def test_uint32(): +def test_uint32() -> None: assert parse_uint32(io.BytesIO(b"\x00\x00\x00\x00")) == 0 assert parse_uint32(io.BytesIO(b"\x00\x00\x00\x01")) == 1 assert parse_uint32(io.BytesIO(b"\x00\x00\x00\x01"), "little") == 16777216 @@ -399,7 +418,7 @@ def test_uint32(): assert parse_uint32(io.BytesIO(b"\x01\x00\x00\x00"), "little") == 1 assert parse_uint32(io.BytesIO(b"\xff\xff\xff\xff"), "little") == 4294967295 - def test_write(value, byteorder): + def test_write(value: int, byteorder: Literal["little", "big"]) -> None: f = io.BytesIO() write_uint32(f, uint32(value), byteorder) f.seek(0) @@ -420,7 +439,7 @@ def test_write(value, byteorder): parse_uint32(io.BytesIO(b"\x00\x00\x00")) -def test_parse_optional(): +def test_parse_optional() -> None: assert parse_optional(io.BytesIO(b"\x00"), parse_bool) is None assert parse_optional(io.BytesIO(b"\x01\x01"), parse_bool) assert not parse_optional(io.BytesIO(b"\x01\x00"), parse_bool) @@ -437,7 +456,7 @@ def test_parse_optional(): parse_optional(io.BytesIO(b"\xff\x00"), parse_bool) -def test_parse_bytes(): +def test_parse_bytes() -> None: assert parse_bytes(io.BytesIO(b"\x00\x00\x00\x00")) == b"" assert parse_bytes(io.BytesIO(b"\x00\x00\x00\x01\xff")) == b"\xff" @@ -463,7 +482,7 @@ def test_parse_bytes(): parse_bytes(io.BytesIO(b"\x00\x00\x02\x01" + b"a" * 512)) -def test_parse_list(): +def test_parse_list() -> None: assert parse_list(io.BytesIO(b"\x00\x00\x00\x00"), parse_bool) == [] assert parse_list(io.BytesIO(b"\x00\x00\x00\x01\x01"), parse_bool) == [True] @@ -484,7 +503,7 @@ def test_parse_list(): parse_list(io.BytesIO(b"\x00\x00\x00\x01\x02"), parse_bool) -def test_parse_tuple(): +def test_parse_tuple() -> None: assert parse_tuple(io.BytesIO(b""), []) == () assert parse_tuple(io.BytesIO(b"\x00\x00"), [parse_bool, parse_bool]) == (False, False) @@ -499,33 +518,35 @@ def test_parse_tuple(): parse_tuple(io.BytesIO(b"\x00"), [parse_bool, parse_bool]) -def test_parse_size_hints(): - class TestFromBytes: - b: bytes +class TestFromBytes: + b: bytes - @classmethod - def from_bytes(cls, b): - ret = TestFromBytes() - ret.b = b - return ret + @classmethod + def from_bytes(cls, b: bytes) -> TestFromBytes: + ret = TestFromBytes() + ret.b = b + return ret + +class FailFromBytes: + @classmethod + def from_bytes(cls, b: bytes) -> FailFromBytes: + raise ValueError() + + +def test_parse_size_hints() -> None: assert parse_size_hints(io.BytesIO(b"1337"), TestFromBytes, 4).b == b"1337" # EOF with raises(AssertionError): parse_size_hints(io.BytesIO(b"133"), TestFromBytes, 4) - class FailFromBytes: - @classmethod - def from_bytes(cls, b): - raise ValueError() - # error in underlying type with raises(ValueError): parse_size_hints(io.BytesIO(b"1337"), FailFromBytes, 4) -def test_parse_str(): +def test_parse_str() -> None: assert parse_str(io.BytesIO(b"\x00\x00\x00\x00")) == "" assert parse_str(io.BytesIO(b"\x00\x00\x00\x01a")) == "a" @@ -551,7 +572,7 @@ def test_parse_str(): parse_str(io.BytesIO(b"\x00\x00\x02\x01" + b"a" * 512)) -def test_wrong_decorator_order(): +def test_wrong_decorator_order() -> None: with raises(DefinitionError): @@ -561,7 +582,7 @@ class WrongDecoratorOrder(Streamable): pass -def test_dataclass_not_frozen(): +def test_dataclass_not_frozen() -> None: with raises(DefinitionError): @@ -571,7 +592,7 @@ class DataclassNotFrozen(Streamable): pass -def test_dataclass_missing(): +def test_dataclass_missing() -> None: with raises(DefinitionError): @@ -580,11 +601,11 @@ class DataclassMissing(Streamable): pass -def test_streamable_inheritance_missing(): +def test_streamable_inheritance_missing() -> None: with raises(DefinitionError): - + # we want to test invalid here, hence the ignore. @streamable @dataclass(frozen=True) - class StreamableInheritanceMissing: + class StreamableInheritanceMissing: # type: ignore[type-var] pass diff --git a/tests/plotting/test_plot_manager.py b/tests/plotting/test_plot_manager.py index fea50cc4e171..41a9a363c6c1 100644 --- a/tests/plotting/test_plot_manager.py +++ b/tests/plotting/test_plot_manager.py @@ -19,7 +19,7 @@ ) from chia.util.config import create_default_chia_config from chia.util.path import mkdir -from chia.plotting.manager import PlotManager +from chia.plotting.manager import Cache, PlotManager from tests.block_tools import get_plot_dir from tests.plotting.util import get_test_plots from tests.time_out_assert import time_out_assert @@ -185,7 +185,6 @@ async def run_test_case( assert len(get_plot_directories(env.root_path)) == expected_directories await env.refresh_tester.run(expected_result) assert len(env.refresh_tester.plot_manager.plots) == expect_total_plots - assert len(env.refresh_tester.plot_manager.cache) == expect_total_plots assert len(env.refresh_tester.plot_manager.get_duplicates()) == expect_duplicates assert len(env.refresh_tester.plot_manager.failed_to_open_filenames) == 0 @@ -478,10 +477,13 @@ async def test_plot_info_caching(test_plot_environment, bt): assert env.refresh_tester.plot_manager.cache.path().exists() refresh_tester: PlotRefreshTester = PlotRefreshTester(env.root_path) plot_manager = refresh_tester.plot_manager + plot_manager.set_public_keys(bt.plot_manager.farmer_public_keys, bt.plot_manager.pool_public_keys) plot_manager.cache.load() assert len(plot_manager.cache) == len(env.refresh_tester.plot_manager.cache) - for plot_id, cache_entry in env.refresh_tester.plot_manager.cache.items(): - cache_entry_new = plot_manager.cache.get(plot_id) + for path, cache_entry in env.refresh_tester.plot_manager.cache.items(): + cache_entry_new = plot_manager.cache.get(path) + assert bytes(cache_entry_new.prover) == bytes(cache_entry.prover) + assert cache_entry_new.farmer_public_key == cache_entry.farmer_public_key assert cache_entry_new.pool_public_key == cache_entry.pool_public_key assert cache_entry_new.pool_contract_puzzle_hash == cache_entry.pool_contract_puzzle_hash assert cache_entry_new.plot_public_key == cache_entry.plot_public_key @@ -515,6 +517,40 @@ async def test_plot_info_caching(test_plot_environment, bt): plot_manager.stop_refreshing() +@pytest.mark.asyncio +async def test_cache_lifetime(test_plot_environment: TestEnvironment) -> None: + # Load a directory to produce a cache file + env: TestEnvironment = test_plot_environment + expected_result = PlotRefreshResult() + add_plot_directory(env.root_path, str(env.dir_1.path)) + expected_result.loaded = env.dir_1.plot_info_list() # type: ignore[assignment] + expected_result.removed = [] + expected_result.processed = len(env.dir_1) + expected_result.remaining = 0 + await env.refresh_tester.run(expected_result) + expected_result.loaded = [] + cache_v1: Cache = env.refresh_tester.plot_manager.cache + assert len(cache_v1) > 0 + count_before = len(cache_v1) + # Remove half of the plots in dir1 + for path in env.dir_1.path_list()[0 : int(len(env.dir_1) / 2)]: + expected_result.processed -= 1 + expected_result.removed.append(path) + unlink(path) + # Modify the `last_use` timestamp of all cache entries to let them expire + last_use_before = time.time() - Cache.expiry_seconds - 1 + for cache_entry in cache_v1.values(): + cache_entry.last_use = last_use_before + assert cache_entry.expired(Cache.expiry_seconds) + # The next refresh cycle will now lead to half of the cache entries being removed because they are expired and + # the related plots do not longer exist. + await env.refresh_tester.run(expected_result) + assert len(cache_v1) == count_before - len(expected_result.removed) + # The other half of the cache entries should have a different `last_use` value now. + for cache_entry in cache_v1.values(): + assert cache_entry.last_use != last_use_before + + @pytest.mark.parametrize( ["event_to_raise"], [ diff --git a/tests/runner_templates/build-test-macos b/tests/runner_templates/build-test-macos index 536ab47fcfa5..94ea0d294c3e 100644 --- a/tests/runner_templates/build-test-macos +++ b/tests/runner_templates/build-test-macos @@ -79,7 +79,7 @@ INSTALL_TIMELORD - name: Test TEST_NAME code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test TEST_DIR --durations=10 PYTEST_PARALLEL_ARGS -m "not benchmark" + venv/bin/coverage run --rcfile=.coveragerc --module pytest TEST_DIR --durations=10 PYTEST_PARALLEL_ARGS -m "not benchmark" - name: Process coverage data run: | @@ -91,7 +91,7 @@ INSTALL_TIMELORD venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/tests/runner_templates/build-test-ubuntu b/tests/runner_templates/build-test-ubuntu index bc587a89aef1..1fc0c78a13f2 100644 --- a/tests/runner_templates/build-test-ubuntu +++ b/tests/runner_templates/build-test-ubuntu @@ -78,7 +78,7 @@ INSTALL_TIMELORD - name: Test TEST_NAME code with pytest run: | . ./activate - venv/bin/coverage run --rcfile=.coveragerc ./venv/bin/py.test TEST_DIR --durations=10 PYTEST_PARALLEL_ARGS -m "not benchmark" DISABLE_PYTEST_MONITOR + venv/bin/coverage run --rcfile=.coveragerc --module pytest TEST_DIR --durations=10 PYTEST_PARALLEL_ARGS -m "not benchmark" DISABLE_PYTEST_MONITOR - name: Process coverage data run: | @@ -90,7 +90,7 @@ INSTALL_TIMELORD venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/tests/tools/test_full_sync.py b/tests/tools/test_full_sync.py index 5b6dcbfa8e44..ad64138ecdae 100644 --- a/tests/tools/test_full_sync.py +++ b/tests/tools/test_full_sync.py @@ -10,4 +10,4 @@ def test_full_sync_test(): file_path = os.path.realpath(__file__) db_file = Path(file_path).parent / "test-blockchain-db.sqlite" - asyncio.run(run_sync_test(db_file, db_version=2, profile=False, single_thread=False)) + asyncio.run(run_sync_test(db_file, db_version=2, profile=False, single_thread=False, test_constants=False)) diff --git a/tests/util/alert_server.py b/tests/util/alert_server.py index fbf25bf50d2c..4f0f2d7ad786 100644 --- a/tests/util/alert_server.py +++ b/tests/util/alert_server.py @@ -77,7 +77,7 @@ def main(): ) quit() - return asyncio.get_event_loop().run_until_complete(run_and_wait(file_path, port)) + return asyncio.run(run_and_wait(file_path, port)) if __name__ == "__main__": diff --git a/tests/util/socket.py b/tests/util/socket.py index 84630c6cad91..74526a1dde20 100644 --- a/tests/util/socket.py +++ b/tests/util/socket.py @@ -9,7 +9,7 @@ def find_available_listen_port(name: str = "free") -> int: global recent_ports while True: - port = secrets.randbits(15) + 2000 + port = secrets.randbelow(0xFFFF - 1024) + 1024 if port in recent_ports: continue diff --git a/tests/wallet/cat_wallet/test_cat_wallet.py b/tests/wallet/cat_wallet/test_cat_wallet.py index a8c57100a84c..bfde40d42b32 100644 --- a/tests/wallet/cat_wallet/test_cat_wallet.py +++ b/tests/wallet/cat_wallet/test_cat_wallet.py @@ -98,6 +98,46 @@ async def test_cat_creation(self, self_hostname, two_wallet_nodes, trusted): assert new_cat_wallet.cat_info.my_tail == cat_wallet.cat_info.my_tail assert await cat_wallet.lineage_store.get_all_lineage_proofs() == all_lineage + @pytest.mark.asyncio + async def test_cat_creation_unique_lineage_store(self, self_hostname, two_wallet_nodes): + num_blocks = 3 + full_nodes, wallets = two_wallet_nodes + full_node_api = full_nodes[0] + full_node_server = full_node_api.server + wallet_node, wallet_server = wallets[0] + wallet = wallet_node.wallet_state_manager.main_wallet + ph = await wallet.get_new_puzzlehash() + wallet_node.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} + + await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) + for i in range(0, num_blocks): + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph)) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(32 * b"0")) + + funds = sum( + [ + calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) + for i in range(1, num_blocks + 1) + ] + ) + + await time_out_assert(15, wallet.get_confirmed_balance, funds) + await time_out_assert(10, wallet_is_synced, True, wallet_node, full_node_api) + + async with wallet_node.wallet_state_manager.lock: + cat_wallet_1: CATWallet = await CATWallet.create_new_cat_wallet( + wallet_node.wallet_state_manager, wallet, {"identifier": "genesis_by_id"}, uint64(100) + ) + cat_wallet_2: CATWallet = await CATWallet.create_new_cat_wallet( + wallet_node.wallet_state_manager, wallet, {"identifier": "genesis_by_id"}, uint64(200) + ) + + proofs_1 = await cat_wallet_1.lineage_store.get_all_lineage_proofs() + proofs_2 = await cat_wallet_2.lineage_store.get_all_lineage_proofs() + assert len(proofs_1) == len(proofs_2) + assert proofs_1 != proofs_2 + assert cat_wallet_1.lineage_store.table_name != cat_wallet_2.lineage_store.table_name + @pytest.mark.parametrize( "trusted", [True, False], diff --git a/tests/wallet/rpc/test_wallet_rpc.py b/tests/wallet/rpc/test_wallet_rpc.py index 87eb78283944..ac631d6c4cff 100644 --- a/tests/wallet/rpc/test_wallet_rpc.py +++ b/tests/wallet/rpc/test_wallet_rpc.py @@ -26,7 +26,7 @@ from chia.util.ints import uint16, uint32, uint64 from chia.wallet.cat_wallet.cat_constants import DEFAULT_CATS from chia.wallet.cat_wallet.cat_wallet import CATWallet -from chia.wallet.derive_keys import master_sk_to_wallet_sk +from chia.wallet.derive_keys import master_sk_to_wallet_sk, master_sk_to_wallet_sk_unhardened from chia.wallet.trading.trade_status import TradeStatus from chia.wallet.transaction_record import TransactionRecord from chia.wallet.transaction_sorting import SortKey @@ -643,7 +643,37 @@ async def tx_in_mempool_2(): assert sk_dict["used_for_pool_rewards"] is True # Check unknown key - sk_dict = await client.check_delete_key(123456) + sk_dict = await client.check_delete_key(123456, 10) + assert sk_dict["fingerprint"] == 123456 + assert sk_dict["used_for_farmer_rewards"] is False + assert sk_dict["used_for_pool_rewards"] is False + + # Add in observer reward addresses into farmer and pool for testing delete key checks + # set farmer to first private key + sk = await wallet_node.get_key_for_fingerprint(pks[0]) + test_ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk_unhardened(sk, uint32(0)).get_g1()) + with lock_and_load_config(wallet_node.root_path, "config.yaml") as test_config: + test_config["farmer"]["xch_target_address"] = encode_puzzle_hash(test_ph, "txch") + # set pool to second private key + sk = await wallet_node.get_key_for_fingerprint(pks[1]) + test_ph = create_puzzlehash_for_pk(master_sk_to_wallet_sk_unhardened(sk, uint32(0)).get_g1()) + test_config["pool"]["xch_target_address"] = encode_puzzle_hash(test_ph, "txch") + save_config(wallet_node.root_path, "config.yaml", test_config) + + # Check first key + sk_dict = await client.check_delete_key(pks[0]) + assert sk_dict["fingerprint"] == pks[0] + assert sk_dict["used_for_farmer_rewards"] is True + assert sk_dict["used_for_pool_rewards"] is False + + # Check second key + sk_dict = await client.check_delete_key(pks[1]) + assert sk_dict["fingerprint"] == pks[1] + assert sk_dict["used_for_farmer_rewards"] is False + assert sk_dict["used_for_pool_rewards"] is True + + # Check unknown key + sk_dict = await client.check_delete_key(123456, 10) assert sk_dict["fingerprint"] == 123456 assert sk_dict["used_for_farmer_rewards"] is False assert sk_dict["used_for_pool_rewards"] is False diff --git a/tests/wallet/sync/test_wallet_sync.py b/tests/wallet/sync/test_wallet_sync.py index c98f78354cc7..43e7fcd4b7f4 100644 --- a/tests/wallet/sync/test_wallet_sync.py +++ b/tests/wallet/sync/test_wallet_sync.py @@ -27,27 +27,26 @@ def wallet_height_at_least(wallet_node, h): class TestWalletSync: - @pytest.mark.parametrize( - "trusted", - [True, False], - ) @pytest.mark.asyncio - async def test_basic_sync_wallet(self, bt, wallet_node, default_400_blocks, trusted, self_hostname): + async def test_basic_sync_wallet(self, bt, two_wallet_nodes, default_400_blocks, self_hostname): + full_nodes, wallets = two_wallet_nodes + full_node_api = full_nodes[0] + full_node_server = full_node_api.full_node.server + + # Trusted node sync + wallets[0][0].config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} - full_node_api, wallet_node, full_node_server, wallet_server = wallet_node + # Untrusted node sync + wallets[1][0].config["trusted_peers"] = {} for block in default_400_blocks: await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(block)) - if trusted: - wallet_node.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} - else: - wallet_node.config["trusted_peers"] = {} - await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) + for wallet_node, wallet_server in wallets: + await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) - # The second node should eventually catch up to the first one, and have the - # same tip at height num_blocks - 1. - await time_out_assert(100, wallet_height_at_least, True, wallet_node, len(default_400_blocks) - 1) + for wallet_node, wallet_server in wallets: + await time_out_assert(100, wallet_height_at_least, True, wallet_node, len(default_400_blocks) - 1) # Tests a reorg with the wallet num_blocks = 30 @@ -55,181 +54,185 @@ async def test_basic_sync_wallet(self, bt, wallet_node, default_400_blocks, trus for i in range(1, len(blocks_reorg)): await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(blocks_reorg[i])) - await disconnect_all_and_reconnect(wallet_server, full_node_server, self_hostname) + for wallet_node, wallet_server in wallets: + await disconnect_all_and_reconnect(wallet_server, full_node_server, self_hostname) - await time_out_assert( - 100, wallet_height_at_least, True, wallet_node, len(default_400_blocks) + num_blocks - 5 - 1 - ) + for wallet_node, wallet_server in wallets: + await time_out_assert( + 100, wallet_height_at_least, True, wallet_node, len(default_400_blocks) + num_blocks - 5 - 1 + ) - @pytest.mark.parametrize( - "trusted", - [True, False], - ) @pytest.mark.asyncio - async def test_almost_recent(self, bt, wallet_node, default_1000_blocks, trusted, self_hostname): + async def test_almost_recent(self, bt, two_wallet_nodes, default_400_blocks, self_hostname): # Tests the edge case of receiving funds right before the recent blocks in weight proof - full_node_api, wallet_node, full_node_server, wallet_server = wallet_node + full_nodes, wallets = two_wallet_nodes + full_node_api = full_nodes[0] + full_node_server = full_node_api.full_node.server - for block in default_1000_blocks: + # Trusted node sync + wallets[0][0].config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} + + # Untrusted node sync + wallets[1][0].config["trusted_peers"] = {} + + base_num_blocks = 400 + for block in default_400_blocks: await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(block)) - wallet = wallet_node.wallet_state_manager.main_wallet - ph = await wallet.get_new_puzzlehash() + all_blocks = default_400_blocks + both_phs = [] + for wallet_node, wallet_server in wallets: + wallet = wallet_node.wallet_state_manager.main_wallet + both_phs.append(await wallet.get_new_puzzlehash()) - if trusted: - wallet_node.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} - else: - wallet_node.config["trusted_peers"] = {} + for i in range(20): + # Tests a reorg with the wallet + ph = both_phs[i % 2] + all_blocks = bt.get_consecutive_blocks(1, block_list_input=all_blocks, pool_reward_puzzle_hash=ph) + await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(all_blocks[-1])) - # Tests a reorg with the wallet - num_blocks = 20 new_blocks = bt.get_consecutive_blocks( - num_blocks, block_list_input=default_1000_blocks, pool_reward_puzzle_hash=ph + test_constants.WEIGHT_PROOF_RECENT_BLOCKS + 10, block_list_input=all_blocks ) - for i in range(1000, len(new_blocks)): + for i in range(base_num_blocks + 20, len(new_blocks)): await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[i])) - new_blocks = bt.get_consecutive_blocks( - test_constants.WEIGHT_PROOF_RECENT_BLOCKS + 10, block_list_input=new_blocks - ) - for i in range(1020, len(new_blocks)): - await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(new_blocks[i])) + for wallet_node, wallet_server in wallets: + wallet = wallet_node.wallet_state_manager.main_wallet + await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) + await time_out_assert(30, wallet.get_confirmed_balance, 10 * calculate_pool_reward(uint32(1000))) - await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) + @pytest.mark.asyncio + async def test_backtrack_sync_wallet(self, two_wallet_nodes, default_400_blocks, self_hostname): + full_nodes, wallets = two_wallet_nodes + full_node_api = full_nodes[0] + full_node_server = full_node_api.full_node.server - await time_out_assert(30, wallet.get_confirmed_balance, 20 * calculate_pool_reward(uint32(1000))) + # Trusted node sync + wallets[0][0].config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} + + # Untrusted node sync + wallets[1][0].config["trusted_peers"] = {} - @pytest.mark.parametrize( - "trusted", - [True, False], - ) - @pytest.mark.asyncio - async def test_backtrack_sync_wallet(self, wallet_node, default_400_blocks, trusted, self_hostname): - full_node_api, wallet_node, full_node_server, wallet_server = wallet_node for block in default_400_blocks[:20]: await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(block)) - if trusted: - wallet_node.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} - else: - wallet_node.config["trusted_peers"] = {} - await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) + for wallet_node, wallet_server in wallets: + await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) - # The second node should eventually catch up to the first one, and have the - # same tip at height num_blocks - 1. - await time_out_assert(100, wallet_height_at_least, True, wallet_node, 19) + for wallet_node, wallet_server in wallets: + await time_out_assert(100, wallet_height_at_least, True, wallet_node, 19) # Tests a reorg with the wallet - @pytest.mark.parametrize( - "trusted", - [True, False], - ) @pytest.mark.asyncio - async def test_short_batch_sync_wallet(self, wallet_node, default_400_blocks, trusted, self_hostname): - full_node_api, wallet_node, full_node_server, wallet_server = wallet_node + async def test_short_batch_sync_wallet(self, two_wallet_nodes, default_400_blocks, self_hostname): + full_nodes, wallets = two_wallet_nodes + full_node_api = full_nodes[0] + full_node_server = full_node_api.full_node.server + + # Trusted node sync + wallets[0][0].config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} + + # Untrusted node sync + wallets[1][0].config["trusted_peers"] = {} for block in default_400_blocks[:200]: await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(block)) - if trusted: - wallet_node.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} - else: - wallet_node.config["trusted_peers"] = {} - await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) + for wallet_node, wallet_server in wallets: + await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) - # The second node should eventually catch up to the first one, and have the - # same tip at height num_blocks - 1. - await time_out_assert(100, wallet_height_at_least, True, wallet_node, 199) - # Tests a reorg with the wallet + for wallet_node, wallet_server in wallets: + await time_out_assert(100, wallet_height_at_least, True, wallet_node, 199) - @pytest.mark.parametrize( - "trusted", - [True, False], - ) @pytest.mark.asyncio - async def test_long_sync_wallet( - self, bt, wallet_node, default_1000_blocks, default_400_blocks, trusted, self_hostname - ): + async def test_long_sync_wallet(self, bt, two_wallet_nodes, default_1000_blocks, default_400_blocks, self_hostname): + full_nodes, wallets = two_wallet_nodes + full_node_api = full_nodes[0] + full_node_server = full_node_api.full_node.server - full_node_api, wallet_node, full_node_server, wallet_server = wallet_node + # Trusted node sync + wallets[0][0].config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} + + # Untrusted node sync + wallets[1][0].config["trusted_peers"] = {} for block in default_400_blocks: await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(block)) - if trusted: - wallet_node.config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} - else: - wallet_node.config["trusted_peers"] = {} - await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) + for wallet_node, wallet_server in wallets: + await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) - # The second node should eventually catch up to the first one, and have the - # same tip at height num_blocks - 1. - await time_out_assert(600, wallet_height_at_least, True, wallet_node, len(default_400_blocks) - 1) + for wallet_node, wallet_server in wallets: + await time_out_assert(600, wallet_height_at_least, True, wallet_node, len(default_400_blocks) - 1) # Tests a long reorg for block in default_1000_blocks: await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(block)) - await disconnect_all_and_reconnect(wallet_server, full_node_server, self_hostname) + for wallet_node, wallet_server in wallets: + await disconnect_all_and_reconnect(wallet_server, full_node_server, self_hostname) - log.info(f"wallet node height is {wallet_node.wallet_state_manager.blockchain.get_peak_height()}") - await time_out_assert(600, wallet_height_at_least, True, wallet_node, len(default_1000_blocks) - 1) + log.info(f"wallet node height is {wallet_node.wallet_state_manager.blockchain.get_peak_height()}") + await time_out_assert(600, wallet_height_at_least, True, wallet_node, len(default_1000_blocks) - 1) - await disconnect_all_and_reconnect(wallet_server, full_node_server, self_hostname) + await disconnect_all_and_reconnect(wallet_server, full_node_server, self_hostname) # Tests a short reorg num_blocks = 30 blocks_reorg = bt.get_consecutive_blocks(num_blocks, block_list_input=default_1000_blocks[:-5]) - for i in range(1, len(blocks_reorg)): + for i in range(len(blocks_reorg) - num_blocks - 10, len(blocks_reorg)): await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(blocks_reorg[i])) - await time_out_assert( - 600, wallet_height_at_least, True, wallet_node, len(default_1000_blocks) + num_blocks - 5 - 1 - ) + for wallet_node, wallet_server in wallets: + await time_out_assert( + 600, wallet_height_at_least, True, wallet_node, len(default_1000_blocks) + num_blocks - 5 - 1 + ) - @pytest.mark.parametrize( - "trusted", - [True, False], - ) @pytest.mark.asyncio - async def test_wallet_reorg_sync(self, bt, wallet_node_simulator, default_400_blocks, trusted, self_hostname): + async def test_wallet_reorg_sync(self, bt, two_wallet_nodes, default_400_blocks, self_hostname): num_blocks = 5 - full_nodes, wallets = wallet_node_simulator + full_nodes, wallets = two_wallet_nodes full_node_api = full_nodes[0] - wallet_node, server_2 = wallets[0] - fn_server = full_node_api.full_node.server - wsm: WalletStateManager = wallet_node.wallet_state_manager - wallet = wsm.main_wallet - ph = await wallet.get_new_puzzlehash() + full_node_server = full_node_api.full_node.server + + # Trusted node sync + wallets[0][0].config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} - if trusted: - wallet_node.config["trusted_peers"] = {fn_server.node_id.hex(): fn_server.node_id.hex()} - else: - wallet_node.config["trusted_peers"] = {} + # Untrusted node sync + wallets[1][0].config["trusted_peers"] = {} - await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None) + phs = [] + for wallet_node, wallet_server in wallets: + wallet = wallet_node.wallet_state_manager.main_wallet + phs.append(await wallet.get_new_puzzlehash()) + await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) # Insert 400 blocks for block in default_400_blocks: await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(block)) # Farm few more with reward + for i in range(0, num_blocks - 1): + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(phs[0])) + for i in range(0, num_blocks): - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph)) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(phs[1])) # Confirm we have the funds funds = sum( [calculate_pool_reward(uint32(i)) + calculate_base_farmer_reward(uint32(i)) for i in range(1, num_blocks)] ) - await time_out_assert(5, wallet.get_confirmed_balance, funds) - - async def get_tx_count(wallet_id): + async def get_tx_count(wsm, wallet_id): txs = await wsm.get_all_transactions(wallet_id) return len(txs) - await time_out_assert(5, get_tx_count, 2 * (num_blocks - 1), 1) + for wallet_node, wallet_server in wallets: + wallet = wallet_node.wallet_state_manager.main_wallet + await time_out_assert(5, wallet.get_confirmed_balance, funds) + await time_out_assert(5, get_tx_count, 2 * (num_blocks - 1), wallet_node.wallet_state_manager, 1) # Reorg blocks that carry reward num_blocks = 30 @@ -238,31 +241,25 @@ async def get_tx_count(wallet_id): for block in blocks_reorg[-30:]: await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(block)) - await time_out_assert(5, get_tx_count, 0, 1) - await time_out_assert(5, wallet.get_confirmed_balance, 0) + for wallet_node, wallet_server in wallets: + wallet = wallet_node.wallet_state_manager.main_wallet + await time_out_assert(5, get_tx_count, 0, wallet_node.wallet_state_manager, 1) + await time_out_assert(5, wallet.get_confirmed_balance, 0) - @pytest.mark.parametrize( - "trusted", - [False], - ) @pytest.mark.asyncio - async def test_wallet_reorg_get_coinbase( - self, bt, wallet_node_simulator, default_400_blocks, trusted, self_hostname - ): - full_nodes, wallets = wallet_node_simulator + async def test_wallet_reorg_get_coinbase(self, bt, two_wallet_nodes, default_400_blocks, self_hostname): + full_nodes, wallets = two_wallet_nodes full_node_api = full_nodes[0] - wallet_node, server_2 = wallets[0] - fn_server = full_node_api.full_node.server - wsm = wallet_node.wallet_state_manager - wallet = wallet_node.wallet_state_manager.main_wallet - ph = await wallet.get_new_puzzlehash() + full_node_server = full_node_api.full_node.server - if trusted: - wallet_node.config["trusted_peers"] = {fn_server.node_id.hex(): fn_server.node_id.hex()} - else: - wallet_node.config["trusted_peers"] = {} + # Trusted node sync + wallets[0][0].config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} - await server_2.start_client(PeerInfo(self_hostname, uint16(fn_server._port)), None) + # Untrusted node sync + wallets[1][0].config["trusted_peers"] = {} + + for wallet_node, wallet_server in wallets: + await wallet_server.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) # Insert 400 blocks for block in default_400_blocks: @@ -275,30 +272,37 @@ async def test_wallet_reorg_get_coinbase( for block in blocks_reorg[:-5]: await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(block)) - async def get_tx_count(wallet_id): + async def get_tx_count(wsm, wallet_id): txs = await wsm.get_all_transactions(wallet_id) return len(txs) - await time_out_assert(10, get_tx_count, 0, 1) - await time_out_assert(30, wallet_is_synced, True, wallet_node, full_node_api) + for wallet_node, wallet_server in wallets: + await time_out_assert(10, get_tx_count, 0, wallet_node.wallet_state_manager, 1) + await time_out_assert(30, wallet_is_synced, True, wallet_node, full_node_api) num_blocks_reorg_1 = 40 - blocks_reorg_1 = bt.get_consecutive_blocks( - 1, pool_reward_puzzle_hash=ph, farmer_reward_puzzle_hash=ph, block_list_input=blocks_reorg[:-30] - ) - blocks_reorg_2 = bt.get_consecutive_blocks(num_blocks_reorg_1, block_list_input=blocks_reorg_1) - - for block in blocks_reorg_2[-41:]: - await asyncio.sleep(0.4) + all_blocks_reorg_2 = blocks_reorg[:-30] + for wallet_node, wallet_server in wallets: + wallet = wallet_node.wallet_state_manager.main_wallet + ph = await wallet.get_new_puzzlehash() + all_blocks_reorg_2 = bt.get_consecutive_blocks( + 1, pool_reward_puzzle_hash=ph, farmer_reward_puzzle_hash=ph, block_list_input=all_blocks_reorg_2 + ) + blocks_reorg_2 = bt.get_consecutive_blocks(num_blocks_reorg_1, block_list_input=all_blocks_reorg_2) + + for block in blocks_reorg_2[-44:]: await full_node_api.full_node.respond_block(full_node_protocol.RespondBlock(block)) - await disconnect_all_and_reconnect(server_2, fn_server, self_hostname) + for wallet_node, wallet_server in wallets: + await disconnect_all_and_reconnect(wallet_server, full_node_server, self_hostname) # Confirm we have the funds - funds = calculate_pool_reward(uint32(len(blocks_reorg_1))) + calculate_base_farmer_reward( - uint32(len(blocks_reorg_1)) + funds = calculate_pool_reward(uint32(len(all_blocks_reorg_2))) + calculate_base_farmer_reward( + uint32(len(all_blocks_reorg_2)) ) - await time_out_assert(60, wallet_is_synced, True, wallet_node, full_node_api) - await time_out_assert(20, get_tx_count, 2, 1) - await time_out_assert(20, wallet.get_confirmed_balance, funds) + for wallet_node, wallet_server in wallets: + wallet = wallet_node.wallet_state_manager.main_wallet + await time_out_assert(60, wallet_is_synced, True, wallet_node, full_node_api) + await time_out_assert(20, get_tx_count, 2, wallet_node.wallet_state_manager, 1) + await time_out_assert(20, wallet.get_confirmed_balance, funds) diff --git a/tests/wallet/test_singleton_lifecycle.py b/tests/wallet/test_singleton_lifecycle.py index d332a20c1417..1cf8a8c18476 100644 --- a/tests/wallet/test_singleton_lifecycle.py +++ b/tests/wallet/test_singleton_lifecycle.py @@ -113,8 +113,7 @@ def test_only_odd_coins_0(): conditions = Program.to(condition_list) coin_spend = CoinSpend(farmed_coin, ANYONE_CAN_SPEND_PUZZLE, conditions) spend_bundle = SpendBundle.aggregate([launcher_spend_bundle, SpendBundle([coin_spend], G2Element())]) - run = asyncio.get_event_loop().run_until_complete - coins_added, coins_removed = run(check_spend_bundle_validity(bt.constants, blocks, spend_bundle)) + coins_added, coins_removed = asyncio.run(check_spend_bundle_validity(bt.constants, blocks, spend_bundle)) coin_set_added = set([_.coin for _ in coins_added]) coin_set_removed = set([_.coin for _ in coins_removed]) diff --git a/tests/wallet_tools.py b/tests/wallet_tools.py index 6600dee0e36d..aff5ab4cbeca 100644 --- a/tests/wallet_tools.py +++ b/tests/wallet_tools.py @@ -30,6 +30,7 @@ class WalletTool: next_address = 0 pubkey_num_lookup: Dict[bytes, uint32] = {} + puzzle_pk_cache: Dict[bytes32, PrivateKey] = {} def __init__(self, constants: ConsensusConstants, sk: Optional[PrivateKey] = None): self.constants = constants @@ -48,16 +49,13 @@ def get_next_address_index(self) -> uint32: return self.next_address def get_private_key_for_puzzle_hash(self, puzzle_hash: bytes32) -> PrivateKey: - if puzzle_hash in self.puzzle_pk_cache: - child = self.puzzle_pk_cache[puzzle_hash] - private = master_sk_to_wallet_sk(self.private_key, uint32(child)) - # pubkey = private.get_g1() - return private - else: - for child in range(self.next_address): - pubkey = master_sk_to_wallet_sk(self.private_key, uint32(child)).get_g1() - if puzzle_hash == puzzle_for_pk(bytes(pubkey)).get_tree_hash(): - return master_sk_to_wallet_sk(self.private_key, uint32(child)) + sk = self.puzzle_pk_cache.get(puzzle_hash) + if sk: + return sk + for child in range(self.next_address): + pubkey = master_sk_to_wallet_sk(self.private_key, uint32(child)).get_g1() + if puzzle_hash == puzzle_for_pk(bytes(pubkey)).get_tree_hash(): + return master_sk_to_wallet_sk(self.private_key, uint32(child)) raise ValueError(f"Do not have the keys for puzzle hash {puzzle_hash}") def puzzle_for_pk(self, pubkey: bytes) -> Program: @@ -65,12 +63,13 @@ def puzzle_for_pk(self, pubkey: bytes) -> Program: def get_new_puzzle(self) -> Program: next_address_index: uint32 = self.get_next_address_index() - pubkey: G1Element = master_sk_to_wallet_sk(self.private_key, next_address_index).get_g1() + sk: PrivateKey = master_sk_to_wallet_sk(self.private_key, next_address_index) + pubkey: G1Element = sk.get_g1() self.pubkey_num_lookup[bytes(pubkey)] = next_address_index puzzle: Program = puzzle_for_pk(pubkey) - self.puzzle_pk_cache[puzzle.get_tree_hash()] = next_address_index + self.puzzle_pk_cache[puzzle.get_tree_hash()] = sk return puzzle def get_new_puzzlehash(self) -> bytes32: diff --git a/tools/generate_chain.py b/tools/generate_chain.py new file mode 100644 index 000000000000..4e7a8e942944 --- /dev/null +++ b/tools/generate_chain.py @@ -0,0 +1,142 @@ +import cProfile +import random +import sqlite3 +import time +from contextlib import closing, contextmanager +from pathlib import Path +from typing import Iterator, List + +import zstd + +from chia.types.blockchain_format.coin import Coin +from chia.types.spend_bundle import SpendBundle +from chia.util.chia_logging import initialize_logging +from chia.util.ints import uint64 +from chia.util.path import mkdir +from tests.block_tools import create_block_tools +from tests.util.keyring import TempKeyring +from tools.test_constants import test_constants + + +@contextmanager +def enable_profiler(profile: bool) -> Iterator[None]: + if not profile: + yield + return + + with cProfile.Profile() as pr: + yield + + pr.create_stats() + pr.dump_stats("generate-chain.profile") + + +root_path = Path("./test-chain").resolve() +mkdir(root_path) +with TempKeyring() as keychain: + + bt = create_block_tools(constants=test_constants, root_path=root_path, keychain=keychain) + initialize_logging( + "generate_chain", {"log_level": "DEBUG", "log_stdout": False, "log_syslog": False}, root_path=root_path + ) + + with closing(sqlite3.connect("stress-test-blockchain.sqlite")) as db: + + print("initializing v2 block store") + db.execute( + "CREATE TABLE full_blocks(" + "header_hash blob PRIMARY KEY," + "prev_hash blob," + "height bigint," + "in_main_chain tinyint," + "block blob)" + ) + + wallet = bt.get_farmer_wallet_tool() + coinbase_puzzlehash = wallet.get_new_puzzlehash() + + blocks = bt.get_consecutive_blocks( + 3, + farmer_reward_puzzle_hash=coinbase_puzzlehash, + pool_reward_puzzle_hash=coinbase_puzzlehash, + guarantee_transaction_block=True, + genesis_timestamp=uint64(1234567890), + time_per_block=30, + ) + + unspent_coins: List[Coin] = [] + + for b in blocks: + for coin in b.get_included_reward_coins(): + if coin.puzzle_hash == coinbase_puzzlehash: + unspent_coins.append(coin) + db.execute( + "INSERT INTO full_blocks VALUES(?, ?, ?, ?, ?)", + ( + b.header_hash, + b.prev_header_hash, + b.height, + 1, # in_main_chain + zstd.compress(bytes(b)), + ), + ) + db.commit() + + # build 2000 transaction blocks + with enable_profiler(False): + for k in range(2000): + + start_time = time.monotonic() + + print(f"block: {len(blocks)} unspent: {len(unspent_coins)}") + new_coins: List[Coin] = [] + spend_bundles: List[SpendBundle] = [] + for i in range(1010): + if unspent_coins == []: + break + c = unspent_coins.pop(random.randrange(len(unspent_coins))) + receiver = wallet.get_new_puzzlehash() + bundle = wallet.generate_signed_transaction(uint64(c.amount // 2), receiver, c) + new_coins.extend(bundle.additions()) + spend_bundles.append(bundle) + + coinbase_puzzlehash = wallet.get_new_puzzlehash() + blocks = bt.get_consecutive_blocks( + 1, + blocks, + farmer_reward_puzzle_hash=coinbase_puzzlehash, + pool_reward_puzzle_hash=coinbase_puzzlehash, + guarantee_transaction_block=True, + transaction_data=SpendBundle.aggregate(spend_bundles), + time_per_block=30, + ) + + b = blocks[-1] + for coin in b.get_included_reward_coins(): + if coin.puzzle_hash == coinbase_puzzlehash: + unspent_coins.append(coin) + unspent_coins.extend(new_coins) + + if b.transactions_info: + fill_rate = b.transactions_info.cost / test_constants.MAX_BLOCK_COST_CLVM + else: + fill_rate = 0 + + end_time = time.monotonic() + + print( + f"included {i} spend bundles. fill_rate: {fill_rate*100:.1f}% " + f"new coins: {len(new_coins)} time: {end_time - start_time:0.2f}s" + ) + + db.execute( + "INSERT INTO full_blocks VALUES(?, ?, ?, ?, ?)", + ( + b.header_hash, + b.prev_header_hash, + b.height, + 1, # in_main_chain + zstd.compress(bytes(b)), + ), + ) + db.commit() diff --git a/tools/test_constants.py b/tools/test_constants.py new file mode 100644 index 000000000000..a2c2be4e8c1a --- /dev/null +++ b/tools/test_constants.py @@ -0,0 +1,18 @@ +from chia.consensus.default_constants import DEFAULT_CONSTANTS + +test_constants = DEFAULT_CONSTANTS.replace( + **{ + "MIN_PLOT_SIZE": 20, + "MIN_BLOCKS_PER_CHALLENGE_BLOCK": 12, + "DISCRIMINANT_SIZE_BITS": 16, + "SUB_EPOCH_BLOCKS": 170, + "WEIGHT_PROOF_THRESHOLD": 2, + "WEIGHT_PROOF_RECENT_BLOCKS": 380, + "DIFFICULTY_CONSTANT_FACTOR": 33554432, + "NUM_SPS_SUB_SLOT": 16, # Must be a power of 2 + "MAX_SUB_SLOT_BLOCKS": 50, + "EPOCH_BLOCKS": 340, + "SUB_SLOT_ITERS_STARTING": 2 ** 10, # Must be a multiple of 64 + "NUMBER_ZERO_BITS_PLOT_FILTER": 1, # H(plot signature of the challenge) must start with these many zeroes + } +) diff --git a/tools/test_full_sync.py b/tools/test_full_sync.py index d5e1a0741350..d4088b9bf521 100755 --- a/tools/test_full_sync.py +++ b/tools/test_full_sync.py @@ -18,6 +18,7 @@ from chia.full_node.full_node import FullNode from chia.types.full_block import FullBlock from chia.util.config import load_config +from tools.test_constants import test_constants as TEST_CONSTANTS class ExitOnError(logging.Handler): @@ -46,7 +47,7 @@ def enable_profiler(profile: bool, counter: int) -> Iterator[None]: pr.dump_stats(f"slow-batch-{counter:05d}.profile") -async def run_sync_test(file: Path, db_version, profile: bool, single_thread: bool) -> None: +async def run_sync_test(file: Path, db_version, profile: bool, single_thread: bool, test_constants: bool) -> None: logger = logging.getLogger() logger.setLevel(logging.WARNING) @@ -67,8 +68,11 @@ async def run_sync_test(file: Path, db_version, profile: bool, single_thread: bo chia_init(root_path, should_check_keys=False, v1_db=(db_version == 1)) config = load_config(root_path, "config.yaml") - overrides = config["network_overrides"]["constants"][config["selected_network"]] - constants = DEFAULT_CONSTANTS.replace_str_to_bytes(**overrides) + if test_constants: + constants = TEST_CONSTANTS + else: + overrides = config["network_overrides"]["constants"][config["selected_network"]] + constants = DEFAULT_CONSTANTS.replace_str_to_bytes(**overrides) if single_thread: config["full_node"]["single_threaded"] = True config["full_node"]["db_sync"] = "off" @@ -85,7 +89,7 @@ async def run_sync_test(file: Path, db_version, profile: bool, single_thread: bo counter = 0 height = 0 async with aiosqlite.connect(file) as in_db: - + await in_db.execute("pragma query_only") rows = await in_db.execute( "SELECT header_hash, height, block FROM full_blocks WHERE in_main_chain=1 ORDER BY height" ) @@ -135,6 +139,13 @@ def main() -> None: @click.argument("file", type=click.Path(), required=True) @click.option("--db-version", type=int, required=False, default=2, help="the DB version to use in simulated node") @click.option("--profile", is_flag=True, required=False, default=False, help="dump CPU profiles for slow batches") +@click.option( + "--test-constants", + is_flag=True, + required=False, + default=False, + help="expect the blockchain database to be blocks using the test constants", +) @click.option( "--single-thread", is_flag=True, @@ -142,11 +153,11 @@ def main() -> None: default=False, help="run node in a single process, to include validation in profiles", ) -def run(file: Path, db_version: int, profile: bool, single_thread: bool) -> None: +def run(file: Path, db_version: int, profile: bool, single_thread: bool, test_constants: bool) -> None: """ The FILE parameter should point to an existing blockchain database file (in v2 format) """ - asyncio.run(run_sync_test(Path(file), db_version, profile, single_thread)) + asyncio.run(run_sync_test(Path(file), db_version, profile, single_thread, test_constants)) @main.command("analyze", short_help="generate call stacks for all profiles dumped to current directory")