Skip to content

go back to <4 as the python version upper limit #22361

go back to <4 as the python version upper limit

go back to <4 as the python version upper limit #22361

Workflow file for this run

name: 🧪 test
on:
push:
paths-ignore:
- "**.md"
branches:
- long_lived/**
- main
- release/**
tags:
- "**"
pull_request:
paths-ignore:
- "**.md"
branches:
- "**"
workflow_dispatch:
inputs:
build-job-matrix-arguments:
description: "extra arguments for build-job-matrix.py"
default: ""
required: false
type: string
per:
description: "individual job scope"
default: "directory"
required: false
type: choice
options:
- directory
- file
only:
description: "only run this job"
default: ""
required: false
type: string
duplicates:
description: "times to run each job"
default: 1
required: false
type: number
run-linux:
description: "run Linux tests"
default: true
required: false
type: boolean
run-macos-intel:
description: "run macOS-intel tests"
default: true
required: false
type: boolean
run-macos-arm:
description: "run macOS-arm tests"
default: true
required: false
type: boolean
run-windows:
description: "run Windows tests"
default: true
required: false
type: boolean
full-python-matrix:
description: "run full Python version matrix"
default: false
required: false
type: boolean
schedule:
- cron: "0 13 * * *"
concurrency:
group: ${{ github.event_name == 'pull_request' && format('{0}-{1}', github.workflow_ref, github.event.pull_request.number) || github.run_id }}
cancel-in-progress: true
jobs:
configure:
name: Configure matrix
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Python environment
uses: Chia-Network/actions/setup-python@main
with:
python-version: "3.10"
- name: Generate matrix configuration
id: configure
run: |
python chia/_tests/build-job-matrix.py --per directory --verbose ${{ inputs.only && format('--only {0}', inputs.only) || '' }} ${{ inputs.duplicates > 1 && format('--duplicates {0}', inputs.duplicates) || '' }} ${{ inputs.build-job-matrix-arguments }} > matrix.json
cat matrix.json
echo "configuration=$(cat matrix.json)" >> "$GITHUB_OUTPUT"
echo matrix_mode=${{
( github.repository_owner == 'Chia-Network' && github.repository != 'Chia-Network/chia-blockchain' )
&& 'limited'
|| (
( github.event_name == 'schedule' || inputs.full-python-matrix )
|| ( github.repository_owner == 'Chia-Network' && github.repository == 'Chia-Network/chia-blockchain' && startsWith(github.ref, 'refs/heads/release/') )
|| ( github.repository_owner == 'Chia-Network' && github.repository == 'Chia-Network/chia-blockchain' && startsWith(github.base_ref, 'release/') )
)
&& 'all'
|| 'main'
}} >> "$GITHUB_OUTPUT"
outputs:
configuration: ${{ steps.configure.outputs.configuration }}
matrix_mode: ${{ steps.configure.outputs.matrix_mode }}
macos-intel:
if: github.event_name != 'workflow_dispatch' || inputs.run-macos-intel
uses: ./.github/workflows/test-single.yml
needs: configure
with:
os-emoji: 🍎
matrix: macos-intel
name: macOS Intel
file_name: macos
concurrency-name: macos-intel
configuration: ${{ needs.configure.outputs.configuration }}
matrix_mode: ${{ needs.configure.outputs.matrix_mode }}
runs-on: macos-13
arch: intel
arch-emoji: 🌀
macos-arm:
if: github.event_name != 'workflow_dispatch' || inputs.run-macos-arm
uses: ./.github/workflows/test-single.yml
needs: configure
with:
os-emoji: 🍎
matrix: macos
name: macOS ARM
file_name: macos-arm
concurrency-name: macos-arm
configuration: ${{ needs.configure.outputs.configuration }}
matrix_mode: ${{ needs.configure.outputs.matrix_mode }}
runs-on: macos-14
arch: arm
arch-emoji: 💪
collect-junit: false
ubuntu:
if: github.event_name != 'workflow_dispatch' || inputs.run-linux
uses: ./.github/workflows/test-single.yml
needs: configure
with:
os-emoji: 🐧
matrix: ubuntu
name: Ubuntu
file_name: ubuntu
concurrency-name: ubuntu
configuration: ${{ needs.configure.outputs.configuration }}
matrix_mode: ${{ needs.configure.outputs.matrix_mode }}
runs-on: ubuntu-latest
arch: intel
arch-emoji: 🌀
collect-junit: false
windows:
if: github.event_name != 'workflow_dispatch' || inputs.run-windows
uses: ./.github/workflows/test-single.yml
needs: configure
with:
os-emoji: 🪟
matrix: windows
name: Windows
file_name: windows
concurrency-name: windows
configuration: ${{ needs.configure.outputs.configuration }}
matrix_mode: ${{ needs.configure.outputs.matrix_mode }}
runs-on: windows-latest
arch: intel
arch-emoji: 🌀
collect-junit: false
coverage:
name: ${{ matrix.os.emoji }} Coverage - ${{ matrix.python.name }}
runs-on: ${{ matrix.os.runs-on }}
needs:
- macos-intel
- macos-arm
- ubuntu
- windows
strategy:
fail-fast: false
matrix:
os:
- emoji: 🐧
matrix: ubuntu
name: Ubuntu
runs-on: ubuntu-latest
python:
- name: "3.10"
action: "3.10"
apt: "3.10"
install_sh: "3.10"
matrix: "3.10"
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set Env
uses: Chia-Network/actions/setjobenv@main
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Download Results
if: false
uses: actions/download-artifact@v4
with:
merge-multiple: true
pattern: junit-data-*
path: junit-data
- name: Format JUnit data and prepare results
if: false
run: |
sudo snap install yq
find junit-data -maxdepth 1 -name '*.xml' -print0 | xargs -0 --max-procs=10 --replace={} yq eval '.testsuites.testsuite.testcase |= sort_by(.+@classname, .+@name)' --inplace {}
sudo apt-get install junitparser
mkdir junit-results
junitparser merge junit-data/*.xml junit-results/junit.xml
find junit-results -maxdepth 1 -name '*.xml' -print0 | xargs -0 --max-procs=10 --replace={} yq eval '.testsuites.testsuite |= sort_by(.+@name) | .testsuites.testsuite[].testcase |= sort_by(.+@classname, .+@name)' --inplace {}
- name: Publish formatted JUnit data
if: false
uses: actions/upload-artifact@v4
with:
name: junit-data
path: junit-data/*
if-no-files-found: error
- name: Download Coverage
uses: actions/download-artifact@v4
with:
merge-multiple: true
pattern: coverage-data-*
path: coverage-data
- name: Set up ${{ matrix.python.name }}
uses: Chia-Network/actions/setup-python@main
with:
python-version: ${{ matrix.python.action }}
- uses: ./.github/actions/install
with:
python-version: ${{ matrix.python.action }}
development: true
- uses: chia-network/actions/activate-venv@main
- name: Add time out assert results to workflow summary
if: always() && false
run: |
python -m chia._tests.process_junit --limit 50 --type time_out_assert --xml junit-results/junit.xml --markdown --link-prefix ${{ github.event.repository.html_url }}/blob/${{ github.sha }}/ --link-line-separator \#L >> "$GITHUB_STEP_SUMMARY"
python -m chia._tests.process_junit --type time_out_assert --xml junit-results/junit.xml --markdown --link-prefix ${{ github.event.repository.html_url }}/blob/${{ github.sha }}/ --link-line-separator \#L >> junit-results/time_out_assert.md
- name: Publish JUnit results
if: always() && false
uses: actions/upload-artifact@v4
with:
name: junit-results
path: junit-results/*
if-no-files-found: error
- name: Coverage Processing
run: |
coverage combine --rcfile=.coveragerc --data-file=coverage-reports/.coverage coverage-data/
coverage xml --rcfile=.coveragerc --data-file=coverage-reports/.coverage -o coverage-reports/coverage.xml
coverage html --rcfile=.coveragerc --data-file=coverage-reports/.coverage --directory coverage-reports/html/
- uses: coverallsapp/github-action@v2
if: always()
env:
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
- name: Coverage report (chia/)
if: always()
run: |
set -o pipefail
coverage report --rcfile=.coveragerc --data-file=coverage-reports/.coverage --include='chia/**/*' --omit='chia/_tests/**/*' --show-missing | tee coverage-reports/coverage-chia-stdout
- name: Coverage report (chia/_tests/)
if: always()
run: |
set -o pipefail
coverage report --rcfile=.coveragerc --data-file=coverage-reports/.coverage --include='chia/_tests/**/*' --show-missing | tee coverage-reports/coverage-tests-stdout
- name: Identify parent commit
id: parent-commit
run: |
echo "hash=$(git rev-parse HEAD~1)" >> "$GITHUB_OUTPUT"
- name: Coverage report (diff)
if: (github.base_ref != '' || github.event.before != '') && always()
env:
compare-branch: ${{ github.base_ref == '' && steps.parent-commit.outputs.hash || format('origin/{0}', github.base_ref) }}
run: |
set -o pipefail
diff-cover --config-file=.diffcover.toml --compare-branch=${{ env.compare-branch }} --fail-under=100 --html-report=coverage-reports/diff-cover.html --markdown-report=coverage-reports/diff-cover.md coverage-reports/coverage.xml | tee coverage-reports/diff-cover-stdout
COV_STATUS="${PIPESTATUS[0]}"
echo "COV_STATUS=$COV_STATUS" >> "$GITHUB_ENV"
if [[ $COV_STATUS != '0' && "$GITHUB_BASE_REF" != '' ]]; then
exit 1
fi
- name: Remove previous coverage report comment and label from PR
if: github.base_ref != '' && always()
shell: bash
env:
COV_STATUS: ${{ env.COV_STATUS }}
GH_TOKEN: ${{ github.token }}
ORG_REPO: ${{ github.repository }}
run: |
PR_NUM=$(jq -r '.number' "$GITHUB_EVENT_PATH")
COMMENTS=$(gh api -X GET /repos/"${ORG_REPO}"/issues/"${PR_NUM}"/comments)
COMMENT_ID=$(echo "$COMMENTS" | jq '.[] | select(.user.login == "github-actions[bot]" and (.body | tostring | contains("<!-- COVERAGE_COMMENT_'"${PR_NUM}"' -->"))) | .id')
COVERAGE_LABEL=$(gh pr view "$PR_NUM" --json labels --jq ' .labels[].name | select(. == "coverage-diff")')
if [[ -n "$COMMENT_ID" ]]; then
gh api -X DELETE /repos/"${ORG_REPO}"/issues/comments/"${COMMENT_ID}"
fi
if [[ "$COV_STATUS" == '0' ]]; then
if [[ "$COVERAGE_LABEL" == "coverage-diff" ]]; then
gh pr edit "$PR_NUM" --remove-label "coverage-diff"
fi
fi
- name: Add diff coverage report comment to PR
if: github.base_ref != '' && always()
env:
BRANCH_NAME: ${{ github.sha }}
COV_STATUS: ${{ env.COV_STATUS }}
GH_TOKEN: ${{ github.token }}
ORG_REPO: ${{ github.repository }}
run: |
if [[ $COV_STATUS != '0' ]]; then
BASE_URL="https://github.com/${ORG_REPO}/blob/${BRANCH_NAME}"
PR_NUM=$(jq -r '.number' "$GITHUB_EVENT_PATH")
COVERAGE_LABEL=$(gh pr view "$PR_NUM" --json labels --jq ' .labels[].name | select(. == "coverage-diff")')
CONTENT=$(<coverage-reports/diff-cover-stdout)
OUTPUT="<!-- COVERAGE_COMMENT_${PR_NUM} -->\n| File | Coverage | Missing Lines |\n|------|----------|---------------|\n"
TOTAL_LINES="Unknown"
MISSING_LINES="Unknown"
TOTAL_COVERAGE="Unknown"
IFS=$'\n'
for LINE in $CONTENT; do
if [[ $LINE =~ ^(.*\.py)\ \(([0-9\.]+%)\)\:\ Missing\ lines\ (.*)$ ]]; then
FILE="${BASH_REMATCH[1]}"
COVERAGE="${BASH_REMATCH[2]}"
MISSING="${BASH_REMATCH[3]}"
MISSING_TRANSFORMED="lines "
IFS=',' read -ra MISSING_PARTS <<< "$MISSING"
for PART in "${MISSING_PARTS[@]}"; do
if [[ $PART =~ ^([0-9]+)\-([0-9]+)$ ]]; then
MISSING_TRANSFORMED+="[${BASH_REMATCH[1]}-${BASH_REMATCH[2]}](${BASE_URL}/${FILE}#L${BASH_REMATCH[1]}-L${BASH_REMATCH[2]}), "
else
MISSING_TRANSFORMED+="[${PART}](${BASE_URL}/${FILE}#L${PART}), "
fi
done
MISSING_TRANSFORMED=${MISSING_TRANSFORMED%, }
OUTPUT+="| [\`${FILE}\`](${BASE_URL}/${FILE}) | ${COVERAGE} | ${MISSING_TRANSFORMED} |\n"
elif [[ $LINE =~ ^Total:\ \ \ (.*)\ lines$ ]]; then
TOTAL_LINES="${BASH_REMATCH[1]} lines"
elif [[ $LINE =~ ^Missing:\ (.*)\ lines$ ]]; then
MISSING_LINES="${BASH_REMATCH[1]} lines"
elif [[ $LINE =~ ^Coverage:\ (.*)%$ ]]; then
TOTAL_COVERAGE="${BASH_REMATCH[1]}%"
fi
done
OUTPUT+="\n| Total | Missing | Coverage |\n|-------|---------|----------|\n| ${TOTAL_LINES} | ${MISSING_LINES} | ${TOTAL_COVERAGE} |\n"
echo -e "$OUTPUT" > temp_comment.txt
gh pr comment "$PR_NUM" -F temp_comment.txt
if [[ $COVERAGE_LABEL != "coverage-diff" ]]; then
gh pr edit "$PR_NUM" --add-label "coverage-diff"
fi
fi
- name: Add diff coverage report to workflow summary
if: (github.base_ref != '' || github.event.before != '') && always()
run: |
cat coverage-reports/diff-cover.md >> "$GITHUB_STEP_SUMMARY"
- name: Publish coverage reports
if: always()
uses: actions/upload-artifact@v4
with:
name: coverage-reports
path: coverage-reports/*
if-no-files-found: error