From d3fa0492c9b04e06e34958aebb5bfa78876f8ae1 Mon Sep 17 00:00:00 2001 From: richardsheridan Date: Sun, 5 Mar 2023 10:42:14 -0500 Subject: [PATCH 001/162] Drop dependabot in favor of automated pip-tools --- .github/dependabot.yml | 36 ------------ .github/workflows/bump_deps.yml | 52 ++++++++++++++++++ .github/workflows/ci.yml | 33 ----------- ci.sh | 2 +- .../tests}/astrill-codesigning-cert.cer | Bin 5 files changed, 53 insertions(+), 70 deletions(-) delete mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/bump_deps.yml rename {.github/workflows => trio/tests}/astrill-codesigning-cert.cer (100%) diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index 0c2930b120..0000000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,36 +0,0 @@ -version: 2 -updates: -- package-ecosystem: pip - directory: "/" - schedule: - interval: daily - open-pull-requests-limit: 10 - allow: - - dependency-type: direct - - dependency-type: indirect - ignore: - - dependency-name: pytest - versions: - - ">= 4.6.1.a, < 4.6.2" - - dependency-name: astroid - versions: - - 2.5.2 - - dependency-name: sphinx - versions: - - 3.4.3 - - 3.5.0 - - 3.5.1 - - 3.5.2 - - 3.5.3 - - dependency-name: regex - versions: - - 2021.3.17 - - dependency-name: pygments - versions: - - 2.8.0 - - dependency-name: cryptography - versions: - - 3.4.5 - - dependency-name: pytest - versions: - - 6.2.2 diff --git a/.github/workflows/bump_deps.yml b/.github/workflows/bump_deps.yml new file mode 100644 index 0000000000..86f9b35c8d --- /dev/null +++ b/.github/workflows/bump_deps.yml @@ -0,0 +1,52 @@ +name: Bump dependencies + +on: + workflow_dispatch: + schedule: + - cron: '0 0 1 * *' + +jobs: + bump: + name: bump requirements + timeout-minutes: 10 + runs-on: 'ubuntu-latest' + # https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#changing-github_token-permissions + permissions: + pull-requests: write + issues: write + repository-projects: write + contents: write + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Setup python + uses: actions/setup-python@v4 + with: + python-version: "3.8" + - name: Bump dependencies + run: | + python -m pip install -r test-requirements.txt + pip-compile test-requirements.in + pip-compile docs-requirements.in + - name: Black + run: | + python -m pip install -r test-requirements.txt # may contain new black ver + black setup.py trio + - name: Commit changes + run: | + git switch --force-create bump_from_${GITHUB_SHA:0:6} + git config user.name 'github-actions[bot]' + git config user.email '41898282+github-actions[bot]@users.noreply.github.com' + git commit -am "Dependency updates" + gh pr create --base master --label dependencies \ + --title "Bump dependencies from commit ${GITHUB_SHA:0:6}" \ + --body "" + # gh pr create returns before the pr is ready, we have to poll until success + # https://github.com/cli/cli/issues/2619#issuecomment-1240543096 + for BACKOFF in 1 2 4 8 15 15 15 15; do + sleep $BACKOFF + if gh pr merge --auto --squash; then + break + fi + done + exit 1 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 145ee55b11..839b84a39e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -121,39 +121,6 @@ jobs: # Should match 'name:' up above JOB_NAME: 'Ubuntu (${{ matrix.python }}${{ matrix.extra_name }})' - autofmt: - name: Autoformat dependabot PR - timeout-minutes: 10 - if: github.actor == 'dependabot[bot]' - runs-on: 'ubuntu-latest' - # https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#changing-github_token-permissions - permissions: - pull-requests: write - issues: write - repository-projects: write - contents: write - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.head.ref }} - - name: Setup python - uses: actions/setup-python@v2 - with: - python-version: "3.8" - - name: Check formatting - run: | - python -m pip install -r test-requirements.txt - ./check.sh - - name: Commit autoformatter changes - if: failure() - run: | - black setup.py trio - git config user.name 'github-actions[bot]' - git config user.email '41898282+github-actions[bot]@users.noreply.github.com' - git commit -am "Autoformatter changes" - git push - macOS: name: 'macOS (${{ matrix.python }})' timeout-minutes: 10 diff --git a/ci.sh b/ci.sh index d4f9df3a94..8eb570b584 100755 --- a/ci.sh +++ b/ci.sh @@ -115,7 +115,7 @@ else # when installing, and then running 'certmgr.msc' and exporting the # certificate. See: # http://www.migee.com/2010/09/24/solution-for-unattendedsilent-installs-and-would-you-like-to-install-this-device-software/ - certutil -addstore "TrustedPublisher" .github/workflows/astrill-codesigning-cert.cer + certutil -addstore "TrustedPublisher" trio/tests/astrill-codesigning-cert.cer # Double-slashes are how you tell windows-bash that you want a single # slash, and don't treat this as a unix-style filename that needs to # be replaced by a windows-style filename. diff --git a/.github/workflows/astrill-codesigning-cert.cer b/trio/tests/astrill-codesigning-cert.cer similarity index 100% rename from .github/workflows/astrill-codesigning-cert.cer rename to trio/tests/astrill-codesigning-cert.cer From 9328bff0ccb2af94eb64573bd16a5c5b3732ad6e Mon Sep 17 00:00:00 2001 From: richardsheridan Date: Sun, 5 Mar 2023 14:07:35 -0500 Subject: [PATCH 002/162] bugfix based on off-repo testing --- .github/workflows/bump_deps.yml | 33 +++++++++++++++++++++++---------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/.github/workflows/bump_deps.yml b/.github/workflows/bump_deps.yml index 86f9b35c8d..e173f98089 100644 --- a/.github/workflows/bump_deps.yml +++ b/.github/workflows/bump_deps.yml @@ -34,19 +34,32 @@ jobs: black setup.py trio - name: Commit changes run: | - git switch --force-create bump_from_${GITHUB_SHA:0:6} + git switch --force-create autodeps/bump_from_${GITHUB_SHA:0:6} git config user.name 'github-actions[bot]' git config user.email '41898282+github-actions[bot]@users.noreply.github.com' git commit -am "Dependency updates" - gh pr create --base master --label dependencies \ + git push --force --set-upstream origin autodeps/bump_from_${GITHUB_SHA:0:6} + - name: Make automerge PR + env: + GH_TOKEN: ${{ github.token }} + run: | + # git push returns before github is ready for a pr, we have to poll until success + for BACKOFF in 1 2 4 8 15 15 15 15 0; do + sleep $BACKOFF + if gh pr create \ + --label dependencies --body "" \ --title "Bump dependencies from commit ${GITHUB_SHA:0:6}" \ - --body "" + ; then + break + fi + done + (($BACKOFF)) || (echo "Could not create the PR" && false) # gh pr create returns before the pr is ready, we have to poll until success - # https://github.com/cli/cli/issues/2619#issuecomment-1240543096 - for BACKOFF in 1 2 4 8 15 15 15 15; do - sleep $BACKOFF - if gh pr merge --auto --squash; then - break - fi + # https://github.com/cli/cli/issues/2619#issuecomment-1240543096 + for BACKOFF in 1 2 4 8 15 15 15 15 0; do + sleep $BACKOFF + if gh pr merge --auto --squash; then + break + fi done - exit 1 + (($BACKOFF)) || (echo "Could not set automerge" && false) From b574e60af574ea815dcafb684d1eb8360078d218 Mon Sep 17 00:00:00 2001 From: richardsheridan Date: Mon, 6 Mar 2023 21:56:49 -0500 Subject: [PATCH 003/162] rename workflow --- .github/workflows/{bump_deps.yml => autodeps.yml} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename .github/workflows/{bump_deps.yml => autodeps.yml} (97%) diff --git a/.github/workflows/bump_deps.yml b/.github/workflows/autodeps.yml similarity index 97% rename from .github/workflows/bump_deps.yml rename to .github/workflows/autodeps.yml index e173f98089..4644324db2 100644 --- a/.github/workflows/bump_deps.yml +++ b/.github/workflows/autodeps.yml @@ -1,4 +1,4 @@ -name: Bump dependencies +name: Autodeps on: workflow_dispatch: @@ -6,8 +6,8 @@ on: - cron: '0 0 1 * *' jobs: - bump: - name: bump requirements + Autodeps: + name: Autodeps timeout-minutes: 10 runs-on: 'ubuntu-latest' # https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#changing-github_token-permissions From aedbc7d5356263f66342993ac54a07178bc446d6 Mon Sep 17 00:00:00 2001 From: richardsheridan Date: Mon, 6 Mar 2023 22:01:19 -0500 Subject: [PATCH 004/162] only commit and make automerge pr if there are changes otherwise this workflow would fail in the rare case that no dependencies update within a month --- .github/workflows/autodeps.yml | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/.github/workflows/autodeps.yml b/.github/workflows/autodeps.yml index 4644324db2..3534905201 100644 --- a/.github/workflows/autodeps.yml +++ b/.github/workflows/autodeps.yml @@ -32,17 +32,23 @@ jobs: run: | python -m pip install -r test-requirements.txt # may contain new black ver black setup.py trio - - name: Commit changes + - name: Commit changes and create automerge PR + env: + GH_TOKEN: ${{ github.token }} run: | + # https://stackoverflow.com/a/3879077/4504950 + if ! git diff-index --quiet HEAD; then + echo "No changes to commit!" + exit 0 + fi + + # setup git repo git switch --force-create autodeps/bump_from_${GITHUB_SHA:0:6} git config user.name 'github-actions[bot]' git config user.email '41898282+github-actions[bot]@users.noreply.github.com' git commit -am "Dependency updates" git push --force --set-upstream origin autodeps/bump_from_${GITHUB_SHA:0:6} - - name: Make automerge PR - env: - GH_TOKEN: ${{ github.token }} - run: | + # git push returns before github is ready for a pr, we have to poll until success for BACKOFF in 1 2 4 8 15 15 15 15 0; do sleep $BACKOFF From 74cef56b9c5955f337ce39f613868820981768e1 Mon Sep 17 00:00:00 2001 From: richardsheridan Date: Mon, 6 Mar 2023 22:06:57 -0500 Subject: [PATCH 005/162] refactor for readability --- .github/workflows/autodeps.yml | 26 +++++++++++++++++++------- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/.github/workflows/autodeps.yml b/.github/workflows/autodeps.yml index 3534905201..1f93fa03aa 100644 --- a/.github/workflows/autodeps.yml +++ b/.github/workflows/autodeps.yml @@ -25,12 +25,15 @@ jobs: python-version: "3.8" - name: Bump dependencies run: | + python -m pip install -U pip python -m pip install -r test-requirements.txt pip-compile test-requirements.in pip-compile docs-requirements.in - name: Black run: | - python -m pip install -r test-requirements.txt # may contain new black ver + # The new dependencies may contain a new black version. + # Commit any changes immediately. + python -m pip install -r test-requirements.txt black setup.py trio - name: Commit changes and create automerge PR env: @@ -49,8 +52,8 @@ jobs: git commit -am "Dependency updates" git push --force --set-upstream origin autodeps/bump_from_${GITHUB_SHA:0:6} - # git push returns before github is ready for a pr, we have to poll until success - for BACKOFF in 1 2 4 8 15 15 15 15 0; do + # git push returns before github is ready for a pr, so we poll until success + for BACKOFF in 1 2 4 8 0; do sleep $BACKOFF if gh pr create \ --label dependencies --body "" \ @@ -59,13 +62,22 @@ jobs: break fi done - (($BACKOFF)) || (echo "Could not create the PR" && false) - # gh pr create returns before the pr is ready, we have to poll until success + + if [ $BACKOFF -eq 0 ]; then + echo "Could not create the PR" + exit 1 + fi + + # gh pr create returns before the pr is ready, so we again poll until success # https://github.com/cli/cli/issues/2619#issuecomment-1240543096 - for BACKOFF in 1 2 4 8 15 15 15 15 0; do + for BACKOFF in 1 2 4 8 0; do sleep $BACKOFF if gh pr merge --auto --squash; then break fi done - (($BACKOFF)) || (echo "Could not set automerge" && false) + + if [ $BACKOFF -eq 0 ]; then + echo "Could not set automerge" + exit 1 + fi From e45c9d5d82c77daca1b82167968c47c40d27bd9f Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Sat, 11 Mar 2023 15:04:16 +0000 Subject: [PATCH 006/162] Attempt to fix PyPy CI (#2536) * Attempt to fix PyPy CI * Fix typo for job name * Try out setup-python's nightly pypy support * Remove redundant shell scripting * Final changes to CI workflow * Bump @actions/setup-python to maybe fix issue installing PyPy nightly on Windows * Make Windows CI pass * Update exports for PyPy on Windows * Bump timeout for CI on MacOS * Lower time taken for test_handshake_over_terrible_network * Run black The segfault on Windows is something to look into eventually: not now. --- .github/workflows/ci.yml | 60 +++++++++++++++++--------------- ci.sh | 34 ------------------ trio/_core/tests/test_windows.py | 2 +- trio/_core/tests/tutil.py | 2 +- trio/socket.py | 6 +++- trio/tests/test_dtls.py | 4 ++- 6 files changed, 41 insertions(+), 67 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ebc22d6850..170e9245db 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,11 +14,16 @@ jobs: strategy: fail-fast: false matrix: - python: ['3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.8-nightly', 'pypy-3.9-nightly'] arch: ['x86', 'x64'] lsp: [''] lsp_extract_file: [''] extra_name: [''] + exclude: + - python: 'pypy-3.8-nightly' + arch: 'x86' + - python: 'pypy-3.9-nightly' + arch: 'x86' include: - python: '3.8' arch: 'x64' @@ -35,16 +40,20 @@ jobs: # lsp: 'http://download.pctools.com/mirror/updates/9.0.0.2308-SDavfree-lite_en.exe' # lsp_extract_file: '' # extra_name: ', with non-IFS LSP' - - python: '3.8' # <- not actually used - arch: 'x64' - pypy_nightly_branch: 'py3.8' - extra_name: ', pypy 3.8 nightly' - + continue-on-error: >- + ${{ + ( + endsWith(matrix.python, '-dev') + || endsWith(matrix.python, '-nightly') + ) + && true + || false + }} steps: - name: Checkout uses: actions/checkout@v3 - name: Setup python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: # This allows the matrix to specify just the major.minor version while still # expanding it to get the latest patch version including alpha releases. @@ -74,29 +83,19 @@ jobs: strategy: fail-fast: false matrix: - python: ['pypy-3.7', 'pypy-3.8', 'pypy-3.9', '3.7', '3.8', '3.9', '3.10', '3.11', '3.12-dev'] + python: ['pypy-3.7', 'pypy-3.8', 'pypy-3.9', '3.7', '3.8', '3.9', '3.10', '3.11', '3.12-dev', 'pypy-3.8-nightly', 'pypy-3.9-nightly'] check_formatting: ['0'] - pypy_nightly_branch: [''] extra_name: [''] include: - python: '3.8' check_formatting: '1' extra_name: ', check formatting' - - python: '3.7' # <- not actually used - pypy_nightly_branch: 'py3.7' - extra_name: ', pypy 3.7 nightly' - - python: '3.8' # <- not actually used - pypy_nightly_branch: 'py3.8' - extra_name: ', pypy 3.8 nightly' - - python: '3.9' # <- not actually used - pypy_nightly_branch: 'py3.9' - extra_name: ', pypy 3.9 nightly' continue-on-error: >- ${{ ( matrix.check_formatting == '1' - || matrix.pypy_nightly_branch == 'py3.7' || endsWith(matrix.python, '-dev') + || endsWith(matrix.python, '-nightly') ) && true || false @@ -105,7 +104,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 if: "!endsWith(matrix.python, '-dev')" with: python-version: ${{ fromJSON(format('["{0}", "{1}"]', format('{0}.0-alpha - {0}.X', matrix.python), matrix.python))[startsWith(matrix.python, 'pypy')] }} @@ -119,7 +118,6 @@ jobs: - name: Run tests run: ./ci.sh env: - PYPY_NIGHTLY_BRANCH: '${{ matrix.pypy_nightly_branch }}' CHECK_FORMATTING: '${{ matrix.check_formatting }}' # Should match 'name:' up above JOB_NAME: 'Ubuntu (${{ matrix.python }}${{ matrix.extra_name }})' @@ -159,22 +157,26 @@ jobs: macOS: name: 'macOS (${{ matrix.python }})' - timeout-minutes: 10 + timeout-minutes: 15 runs-on: 'macos-latest' strategy: fail-fast: false matrix: - python: ['3.7', '3.8', '3.9', '3.10'] - include: - - python: '3.8' # <- not actually used - arch: 'x64' - pypy_nightly_branch: 'py3.8' - extra_name: ', pypy 3.8 nightly' + python: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.8-nightly', 'pypy-3.9-nightly'] + continue-on-error: >- + ${{ + ( + endsWith(matrix.python, '-dev') + || endsWith(matrix.python, '-nightly') + ) + && true + || false + }} steps: - name: Checkout uses: actions/checkout@v3 - name: Setup python - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ fromJSON(format('["{0}", "{1}"]', format('{0}.0-alpha - {0}.X', matrix.python), matrix.python))[startsWith(matrix.python, 'pypy')] }} cache: pip diff --git a/ci.sh b/ci.sh index d4f9df3a94..f8c4f24d7d 100755 --- a/ci.sh +++ b/ci.sh @@ -26,40 +26,6 @@ function curl-harder() { return 1 } -################################################################ -# Bootstrap python environment, if necessary -################################################################ - -### PyPy nightly ### - -if [ "$PYPY_NIGHTLY_BRANCH" != "" ]; then - JOB_NAME="pypy_nightly_${PYPY_NIGHTLY_BRANCH}" - curl-harder -o pypy.tar.bz2 http://buildbot.pypy.org/nightly/${PYPY_NIGHTLY_BRANCH}/pypy-c-jit-latest-linux64.tar.bz2 - if [ ! -s pypy.tar.bz2 ]; then - # We know: - # - curl succeeded (200 response code) - # - nonetheless, pypy.tar.bz2 does not exist, or contains no data - # This isn't going to work, and the failure is not informative of - # anything involving Trio. - ls -l - echo "PyPy3 nightly build failed to download – something is wrong on their end." - echo "Skipping testing against the nightly build for right now." - exit 0 - fi - tar xaf pypy.tar.bz2 - # something like "pypy-c-jit-89963-748aa3022295-linux64" - PYPY_DIR=$(echo pypy-c-jit-*) - PYTHON_EXE=$PYPY_DIR/bin/pypy3 - - if ! ($PYTHON_EXE -m ensurepip \ - && $PYTHON_EXE -m pip install virtualenv \ - && $PYTHON_EXE -m virtualenv testenv); then - echo "pypy nightly is broken; skipping tests" - exit 0 - fi - source testenv/bin/activate -fi - ################################################################ # We have a Python environment! ################################################################ diff --git a/trio/_core/tests/test_windows.py b/trio/_core/tests/test_windows.py index bd81ef0f33..eb94b82b87 100644 --- a/trio/_core/tests/test_windows.py +++ b/trio/_core/tests/test_windows.py @@ -91,7 +91,7 @@ async def read_region(start, end): assert buffer == data - with pytest.raises(BufferError): + with pytest.raises((BufferError, TypeError)): await _core.readinto_overlapped(handle, b"immutable") finally: kernel32.CloseHandle(handle) diff --git a/trio/_core/tests/tutil.py b/trio/_core/tests/tutil.py index 016e0fd3e1..74760df38f 100644 --- a/trio/_core/tests/tutil.py +++ b/trio/_core/tests/tutil.py @@ -62,7 +62,7 @@ def gc_collect_harder(): # garbage collection, because executing their __del__ method to print the # warning can cause them to be resurrected. So we call collect a few times # to make sure. - for _ in range(4): + for _ in range(5): gc.collect() diff --git a/trio/socket.py b/trio/socket.py index 4bbc7d14f6..b9e9308dd9 100644 --- a/trio/socket.py +++ b/trio/socket.py @@ -118,7 +118,11 @@ SCM_J1939_DEST_ADDR, SCM_J1939_DEST_NAME, SCM_J1939_ERRQUEUE, SCM_J1939_PRIO, SO_J1939_ERRQUEUE, SO_J1939_FILTER, SO_J1939_PROMISC, SO_J1939_SEND_PRIO, UDPLITE_RECV_CSCOV, UDPLITE_SEND_CSCOV, IP_RECVTOS, - TCP_KEEPALIVE, SO_INCOMING_CPU + TCP_KEEPALIVE, SO_INCOMING_CPU, FD_ACCEPT, FD_CLOSE, FD_CLOSE_BIT, + FD_CONNECT, FD_CONNECT_BIT, FD_READ, FD_WRITE, INFINITE, + WSA_FLAG_OVERLAPPED, WSA_INVALID_HANDLE, WSA_INVALID_PARAMETER, + WSA_IO_INCOMPLETE, WSA_IO_PENDING, WSA_NOT_ENOUGH_MEMORY, + WSA_OPERATION_ABORTED, WSA_WAIT_FAILED, WSA_WAIT_TIMEOUT, ) # fmt: on except ImportError: diff --git a/trio/tests/test_dtls.py b/trio/tests/test_dtls.py index 680a8793eb..e771d8acbe 100644 --- a/trio/tests/test_dtls.py +++ b/trio/tests/test_dtls.py @@ -3,6 +3,7 @@ import trio.testing from trio import DTLSEndpoint import random +import sys import attr from contextlib import asynccontextmanager from itertools import count @@ -101,7 +102,8 @@ async def test_smoke(ipv6): @slow async def test_handshake_over_terrible_network(autojump_clock): - HANDSHAKES = 1000 + # PyPy is not fast enough + HANDSHAKES = 500 if sys.implementation.name == "pypy" else 1000 r = random.Random(0) fn = FakeNet() fn.enable() From 55abf377a88b2dc49b613635b547d5147257a2ac Mon Sep 17 00:00:00 2001 From: richardsheridan Date: Sat, 11 Mar 2023 13:10:36 -0500 Subject: [PATCH 007/162] use autojump_threshold in test_handshake_over_terrible_network observation: any autojump threshold over 0 takes MUCH longer than 0 hypothesis: 10 second timeout cscope at the end of test is spuriously cancelling attempts on slow machines experiment: set a minimal threshold and scale back # of handshakes to finish in a reasonable time, check if CI timeouts are avoided --- trio/tests/test_dtls.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/trio/tests/test_dtls.py b/trio/tests/test_dtls.py index e771d8acbe..d8ef2e4078 100644 --- a/trio/tests/test_dtls.py +++ b/trio/tests/test_dtls.py @@ -103,10 +103,12 @@ async def test_smoke(ipv6): @slow async def test_handshake_over_terrible_network(autojump_clock): # PyPy is not fast enough - HANDSHAKES = 500 if sys.implementation.name == "pypy" else 1000 + HANDSHAKES = 50 if sys.implementation.name == "pypy" else 100 r = random.Random(0) fn = FakeNet() fn.enable() + # avoid spurious timeouts on slow machines + autojump_clock.autojump_threshold = 0.001 async with dtls_echo_server() as (_, address): async with trio.open_nursery() as nursery: From 4aace6d2d45cada9fc86706495bcfec9c244eff7 Mon Sep 17 00:00:00 2001 From: richardsheridan Date: Sat, 11 Mar 2023 20:19:17 -0500 Subject: [PATCH 008/162] unify HANDSHAKES on all platforms --- trio/tests/test_dtls.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/trio/tests/test_dtls.py b/trio/tests/test_dtls.py index d8ef2e4078..1420ea43d1 100644 --- a/trio/tests/test_dtls.py +++ b/trio/tests/test_dtls.py @@ -3,7 +3,6 @@ import trio.testing from trio import DTLSEndpoint import random -import sys import attr from contextlib import asynccontextmanager from itertools import count @@ -102,8 +101,7 @@ async def test_smoke(ipv6): @slow async def test_handshake_over_terrible_network(autojump_clock): - # PyPy is not fast enough - HANDSHAKES = 50 if sys.implementation.name == "pypy" else 100 + HANDSHAKES = 100 r = random.Random(0) fn = FakeNet() fn.enable() From bc955e9ebd137fb2ad9faf896619c01d2ad33201 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Mar 2023 11:02:04 +0000 Subject: [PATCH 009/162] Bump urllib3 from 1.26.14 to 1.26.15 Bumps [urllib3](https://github.com/urllib3/urllib3) from 1.26.14 to 1.26.15. - [Release notes](https://github.com/urllib3/urllib3/releases) - [Changelog](https://github.com/urllib3/urllib3/blob/main/CHANGES.rst) - [Commits](https://github.com/urllib3/urllib3/compare/1.26.14...1.26.15) --- updated-dependencies: - dependency-name: urllib3 dependency-type: indirect update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- docs-requirements.txt | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 4c197c69b3..6c31f1a452 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -38,8 +38,6 @@ imagesize==1.4.1 # via sphinx immutables==0.19 # via -r docs-requirements.in -importlib-metadata==6.0.0 - # via sphinx incremental==22.10.0 # via towncrier jinja2==3.0.3 @@ -55,8 +53,6 @@ packaging==23.0 # via sphinx pygments==2.14.0 # via sphinx -pytz==2022.7.1 - # via babel requests==2.28.2 # via sphinx sniffio==1.3.0 @@ -88,14 +84,10 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-trio==1.1.2 # via -r docs-requirements.in -tomli==2.0.1 - # via towncrier towncrier==22.12.0 # via -r docs-requirements.in -urllib3==1.26.14 +urllib3==1.26.15 # via requests -zipp==3.15.0 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools From fdb78255d10756841aa9c3205c67c2c1df133688 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 13 Mar 2023 11:03:36 +0000 Subject: [PATCH 010/162] Autoformatter changes --- docs-requirements.txt | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs-requirements.txt b/docs-requirements.txt index 6c31f1a452..8799ed87db 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -38,6 +38,8 @@ imagesize==1.4.1 # via sphinx immutables==0.19 # via -r docs-requirements.in +importlib-metadata==6.0.0 + # via sphinx incremental==22.10.0 # via towncrier jinja2==3.0.3 @@ -53,6 +55,8 @@ packaging==23.0 # via sphinx pygments==2.14.0 # via sphinx +pytz==2022.7.1 + # via babel requests==2.28.2 # via sphinx sniffio==1.3.0 @@ -84,10 +88,14 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-trio==1.1.2 # via -r docs-requirements.in +tomli==2.0.1 + # via towncrier towncrier==22.12.0 # via -r docs-requirements.in urllib3==1.26.15 # via requests +zipp==3.15.0 + # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools From cc231fb3dc29bbf2514868f88fa061e0cb6753ca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Mar 2023 11:05:42 +0000 Subject: [PATCH 011/162] Bump platformdirs from 3.1.0 to 3.1.1 Bumps [platformdirs](https://github.com/platformdirs/platformdirs) from 3.1.0 to 3.1.1. - [Release notes](https://github.com/platformdirs/platformdirs/releases) - [Changelog](https://github.com/platformdirs/platformdirs/blob/main/CHANGES.rst) - [Commits](https://github.com/platformdirs/platformdirs/compare/3.1.0...3.1.1) --- updated-dependencies: - dependency-name: platformdirs dependency-type: indirect update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index cc80d23675..84f99158e7 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.0 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -91,7 +87,7 @@ pickleshare==0.7.5 # via ipython pip-tools==6.12.3 # via -r test-requirements.in -platformdirs==3.1.0 +platformdirs==3.1.1 # via # black # pylint @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -146,10 +134,7 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.38.4 From 64fd8fb53f5c26017d1500f7f518c912db133a45 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 13 Mar 2023 11:07:09 +0000 Subject: [PATCH 012/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index 84f99158e7..8bc6656ab2 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.38.4 From 6a5efbdfb4ca81fbe2772a2fe8ec401dd81a5e9d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Mar 2023 13:43:30 +0000 Subject: [PATCH 013/162] Bump exceptiongroup from 1.1.0 to 1.1.1 Bumps [exceptiongroup](https://github.com/agronholm/exceptiongroup) from 1.1.0 to 1.1.1. - [Release notes](https://github.com/agronholm/exceptiongroup/releases) - [Changelog](https://github.com/agronholm/exceptiongroup/blob/main/CHANGES.rst) - [Commits](https://github.com/agronholm/exceptiongroup/compare/1.1.0...1.1.1) --- updated-dependencies: - dependency-name: exceptiongroup dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- docs-requirements.txt | 10 +--------- test-requirements.txt | 16 +--------------- 2 files changed, 2 insertions(+), 24 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 8799ed87db..ee040135b5 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -28,7 +28,7 @@ docutils==0.18.1 # via # sphinx # sphinx-rtd-theme -exceptiongroup==1.1.0 +exceptiongroup==1.1.1 # via -r docs-requirements.in idna==3.4 # via @@ -38,8 +38,6 @@ imagesize==1.4.1 # via sphinx immutables==0.19 # via -r docs-requirements.in -importlib-metadata==6.0.0 - # via sphinx incremental==22.10.0 # via towncrier jinja2==3.0.3 @@ -55,8 +53,6 @@ packaging==23.0 # via sphinx pygments==2.14.0 # via sphinx -pytz==2022.7.1 - # via babel requests==2.28.2 # via sphinx sniffio==1.3.0 @@ -88,14 +84,10 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-trio==1.1.2 # via -r docs-requirements.in -tomli==2.0.1 - # via towncrier towncrier==22.12.0 # via -r docs-requirements.in urllib3==1.26.15 # via requests -zipp==3.15.0 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/test-requirements.txt b/test-requirements.txt index 8bc6656ab2..b6c8cc7955 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,10 +39,7 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest +exceptiongroup==1.1.1 flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -125,14 +122,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -146,10 +135,7 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.38.4 From 1754f41ef524ed077b01332fa8bc9d04680bfebe Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 13 Mar 2023 13:44:41 +0000 Subject: [PATCH 014/162] Autoformatter changes --- docs-requirements.txt | 8 ++++++++ test-requirements.txt | 16 +++++++++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index ee040135b5..2929220236 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -38,6 +38,8 @@ imagesize==1.4.1 # via sphinx immutables==0.19 # via -r docs-requirements.in +importlib-metadata==6.0.0 + # via sphinx incremental==22.10.0 # via towncrier jinja2==3.0.3 @@ -53,6 +55,8 @@ packaging==23.0 # via sphinx pygments==2.14.0 # via sphinx +pytz==2022.7.1 + # via babel requests==2.28.2 # via sphinx sniffio==1.3.0 @@ -84,10 +88,14 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-trio==1.1.2 # via -r docs-requirements.in +tomli==2.0.1 + # via towncrier towncrier==22.12.0 # via -r docs-requirements.in urllib3==1.26.15 # via requests +zipp==3.15.0 + # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/test-requirements.txt b/test-requirements.txt index b6c8cc7955..8bc6656ab2 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,7 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -122,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -135,7 +146,10 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.38.4 From eb0d415601586973df336f8758fb352935282a03 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Mar 2023 11:04:33 +0000 Subject: [PATCH 015/162] Bump wheel from 0.38.4 to 0.40.0 Bumps [wheel](https://github.com/pypa/wheel) from 0.38.4 to 0.40.0. - [Release notes](https://github.com/pypa/wheel/releases) - [Changelog](https://github.com/pypa/wheel/blob/main/docs/news.rst) - [Commits](https://github.com/pypa/wheel/compare/0.38.4...0.40.0) --- updated-dependencies: - dependency-name: wheel dependency-type: indirect update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 8bc6656ab2..6bdf5ed46e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -146,13 +134,10 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit -wheel==0.38.4 +wheel==0.40.0 # via pip-tools wrapt==1.15.0 # via astroid From f43edfd54f76df68ad9673ced223246b9ccdab18 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 15 Mar 2023 11:05:55 +0000 Subject: [PATCH 016/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index 6bdf5ed46e..1d21102b69 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 6e873b5054b9e718d57609ff1efaa46a0f308431 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Mar 2023 11:06:57 +0000 Subject: [PATCH 017/162] Bump pathspec from 0.11.0 to 0.11.1 Bumps [pathspec](https://github.com/cpburnz/python-pathspec) from 0.11.0 to 0.11.1. - [Release notes](https://github.com/cpburnz/python-pathspec/releases) - [Changelog](https://github.com/cpburnz/python-pathspec/blob/master/CHANGES.rst) - [Commits](https://github.com/cpburnz/python-pathspec/compare/v0.11.0...v0.11.1) --- updated-dependencies: - dependency-name: pathspec dependency-type: indirect update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 8bc6656ab2..0c509d9988 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -83,7 +79,7 @@ packaging==23.0 # pytest parso==0.8.3 # via jedi -pathspec==0.11.0 +pathspec==0.11.1 # via black pexpect==4.8.0 # via ipython @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -146,10 +134,7 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.38.4 From c61d69431fb634f35c9b03f64800607f2b1af8b6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 15 Mar 2023 11:09:37 +0000 Subject: [PATCH 018/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index 0c509d9988..291474eef1 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.38.4 From 99021ec555654047df1c9e32939722129de659c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 23 Mar 2023 11:13:40 +0000 Subject: [PATCH 019/162] Bump pylint from 2.17.0 to 2.17.1 Bumps [pylint](https://github.com/PyCQA/pylint) from 2.17.0 to 2.17.1. - [Release notes](https://github.com/PyCQA/pylint/releases) - [Commits](https://github.com/PyCQA/pylint/compare/v2.17.0...v2.17.1) --- updated-dependencies: - dependency-name: pylint dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 27a7060d4b..31849d3f92 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -109,7 +105,7 @@ pyflakes==2.4.0 # via flake8 pygments==2.14.0 # via ipython -pylint==2.17.0 +pylint==2.17.1 # via -r test-requirements.in pyopenssl==23.0.0 # via -r test-requirements.in @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -146,10 +134,7 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 36f4745d7bbfbdb0421ee1f8d46972de72f8ad31 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 23 Mar 2023 11:15:07 +0000 Subject: [PATCH 020/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index 31849d3f92..5bcf3f98f0 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From d9dad04df0d0e368025b8dcd184f94b1c5c63d5c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 Mar 2023 11:09:39 +0000 Subject: [PATCH 021/162] Bump pyopenssl from 23.0.0 to 23.1.0 Bumps [pyopenssl](https://github.com/pyca/pyopenssl) from 23.0.0 to 23.1.0. - [Release notes](https://github.com/pyca/pyopenssl/releases) - [Changelog](https://github.com/pyca/pyopenssl/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/pyopenssl/compare/23.0.0...23.1.0) --- updated-dependencies: - dependency-name: pyopenssl dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 5bcf3f98f0..39f3e4fd3e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -111,7 +107,7 @@ pygments==2.14.0 # via ipython pylint==2.17.1 # via -r test-requirements.in -pyopenssl==23.0.0 +pyopenssl==23.1.0 # via -r test-requirements.in pyproject-hooks==1.0.0 # via build @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -146,10 +134,7 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From feb036f92ad22563b151968299996d65fdc78e89 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 24 Mar 2023 11:10:54 +0000 Subject: [PATCH 022/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index 39f3e4fd3e..3c9c13a444 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From a935864ac5611892f36a0a97aabb48e6e4d7c154 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:15:23 +0000 Subject: [PATCH 023/162] Bump astroid from 2.15.0 to 2.15.1 Bumps [astroid](https://github.com/PyCQA/astroid) from 2.15.0 to 2.15.1. - [Release notes](https://github.com/PyCQA/astroid/releases) - [Changelog](https://github.com/PyCQA/astroid/blob/main/ChangeLog) - [Commits](https://github.com/PyCQA/astroid/compare/v2.15.0...v2.15.1) --- updated-dependencies: - dependency-name: astroid dependency-type: indirect update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 3c9c13a444..318a07283d 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,7 +6,7 @@ # astor==0.8.1 # via -r test-requirements.in -astroid==2.15.0 +astroid==2.15.1 # via pylint async-generator==1.10 # via -r test-requirements.in @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -146,10 +134,7 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From d871855ce15a20519720711ec85454604540a3fe Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:16:40 +0000 Subject: [PATCH 024/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index 318a07283d..318b75f314 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From cf937f1196ffef20fc7ca5a6bee4cc4985be19b9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:18:09 +0000 Subject: [PATCH 025/162] Bump tomlkit from 0.11.6 to 0.11.7 Bumps [tomlkit](https://github.com/sdispater/tomlkit) from 0.11.6 to 0.11.7. - [Release notes](https://github.com/sdispater/tomlkit/releases) - [Changelog](https://github.com/sdispater/tomlkit/blob/master/CHANGELOG.md) - [Commits](https://github.com/sdispater/tomlkit/compare/0.11.6...0.11.7) --- updated-dependencies: - dependency-name: tomlkit dependency-type: indirect update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 3c9c13a444..0d082a753b 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -125,15 +121,7 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest -tomlkit==0.11.6 +tomlkit==0.11.7 # via pylint traitlets==5.9.0 # via @@ -146,10 +134,7 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From a0e96c5fb2dbdf454ef81d71a973a91d1f7d0da9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:19:31 +0000 Subject: [PATCH 026/162] Bump platformdirs from 3.1.1 to 3.2.0 Bumps [platformdirs](https://github.com/platformdirs/platformdirs) from 3.1.1 to 3.2.0. - [Release notes](https://github.com/platformdirs/platformdirs/releases) - [Changelog](https://github.com/platformdirs/platformdirs/blob/main/CHANGES.rst) - [Commits](https://github.com/platformdirs/platformdirs/compare/3.1.1...3.2.0) --- updated-dependencies: - dependency-name: platformdirs dependency-type: indirect update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 3c9c13a444..8dc73a8c5a 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -91,7 +87,7 @@ pickleshare==0.7.5 # via ipython pip-tools==6.12.3 # via -r test-requirements.in -platformdirs==3.1.1 +platformdirs==3.2.0 # via # black # pylint @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -146,10 +134,7 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From fd07f644a59b864218adb65479d471a871660a69 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:20:43 +0000 Subject: [PATCH 027/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index 0d082a753b..48a22bae78 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.7 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 418c0728c32250a92036e8b531d4b6bfd40e11ab Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:26:22 +0000 Subject: [PATCH 028/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index 8dc73a8c5a..a4bbee6640 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 58a9f4cdc70a693a36d026336c0b0eaf366c9827 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Mar 2023 11:10:10 +0000 Subject: [PATCH 029/162] Bump pyopenssl from 23.1.0 to 23.1.1 Bumps [pyopenssl](https://github.com/pyca/pyopenssl) from 23.1.0 to 23.1.1. - [Release notes](https://github.com/pyca/pyopenssl/releases) - [Changelog](https://github.com/pyca/pyopenssl/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/pyopenssl/compare/23.1.0...23.1.1) --- updated-dependencies: - dependency-name: pyopenssl dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 3c9c13a444..b6e180205b 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -111,7 +107,7 @@ pygments==2.14.0 # via ipython pylint==2.17.1 # via -r test-requirements.in -pyopenssl==23.1.0 +pyopenssl==23.1.1 # via -r test-requirements.in pyproject-hooks==1.0.0 # via build @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -146,10 +134,7 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 7d909945fad70ef4edc80836e7c52f40b86db0bc Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 28 Mar 2023 11:11:56 +0000 Subject: [PATCH 030/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index b6e180205b..b51ae6d5b2 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 14402b90258141d3f6bbff76538fc3fbcbfd5f97 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Mar 2023 11:45:25 +0000 Subject: [PATCH 031/162] Bump types-pyopenssl from 23.0.0.4 to 23.1.0.1 Bumps [types-pyopenssl](https://github.com/python/typeshed) from 23.0.0.4 to 23.1.0.1. - [Release notes](https://github.com/python/typeshed/releases) - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pyopenssl dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 3c9c13a444..86251c79f9 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -141,15 +129,12 @@ traitlets==5.9.0 # matplotlib-inline trustme==0.9.0 # via -r test-requirements.in -types-pyopenssl==23.0.0.4 ; implementation_name == "cpython" +types-pyopenssl==23.1.0.1 ; implementation_name == "cpython" # via -r test-requirements.in typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From bbdf1833284858d4c495d3bc0186865ca8e3e424 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 28 Mar 2023 12:14:29 +0000 Subject: [PATCH 032/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index 86251c79f9..9bec62707e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.6 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.1.0.1 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 52bc3d3080cc7b651b2608570a6d76c73bcfe6d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Mar 2023 11:12:40 +0000 Subject: [PATCH 033/162] Bump black from 23.1.0 to 23.3.0 Bumps [black](https://github.com/psf/black) from 23.1.0 to 23.3.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/compare/23.1.0...23.3.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index acd48974cc..a1d68d8f86 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -17,7 +17,7 @@ attrs==22.2.0 # pytest backcall==0.2.0 # via ipython -black==23.1.0 ; implementation_name == "cpython" +black==23.3.0 ; implementation_name == "cpython" # via -r test-requirements.in build==0.10.0 # via pip-tools @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.7 # via pylint traitlets==5.9.0 @@ -146,10 +134,7 @@ types-pyopenssl==23.1.0.1 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 01c094b5f11c96c4f774e7716a1952a63c584d75 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 29 Mar 2023 11:14:12 +0000 Subject: [PATCH 034/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index a1d68d8f86..b8d63d7955 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.7 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.1.0.1 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 81b992e2eea79cb57fd908b0c9c4cc8f0fd0778a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Apr 2023 11:08:25 +0000 Subject: [PATCH 035/162] Bump astroid from 2.15.1 to 2.15.2 Bumps [astroid](https://github.com/PyCQA/astroid) from 2.15.1 to 2.15.2. - [Release notes](https://github.com/PyCQA/astroid/releases) - [Changelog](https://github.com/pylint-dev/astroid/blob/main/ChangeLog) - [Commits](https://github.com/PyCQA/astroid/compare/v2.15.1...v2.15.2) --- updated-dependencies: - dependency-name: astroid dependency-type: indirect update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index b8d63d7955..c8a5797cd3 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,7 +6,7 @@ # astor==0.8.1 # via -r test-requirements.in -astroid==2.15.1 +astroid==2.15.2 # via pylint async-generator==1.10 # via -r test-requirements.in @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.7 # via pylint traitlets==5.9.0 @@ -146,10 +134,7 @@ types-pyopenssl==23.1.0.1 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From b5f911f9e7a0c41b78dc3037514b4c61015e66bd Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 3 Apr 2023 11:09:37 +0000 Subject: [PATCH 036/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index c8a5797cd3..9d3d0ed6d5 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.7 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.1.0.1 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 7e1327e0f607e7068dc0642e839371e77fa7216c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Apr 2023 14:55:26 +0000 Subject: [PATCH 037/162] Bump pylint from 2.17.1 to 2.17.2 Bumps [pylint](https://github.com/PyCQA/pylint) from 2.17.1 to 2.17.2. - [Release notes](https://github.com/PyCQA/pylint/releases) - [Commits](https://github.com/PyCQA/pylint/compare/v2.17.1...v2.17.2) --- updated-dependencies: - dependency-name: pylint dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- test-requirements.txt | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 9d3d0ed6d5..006872e0c0 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,10 +39,6 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" - # via - # -r test-requirements.in - # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -109,7 +105,7 @@ pyflakes==2.4.0 # via flake8 pygments==2.14.0 # via ipython -pylint==2.17.1 +pylint==2.17.2 # via -r test-requirements.in pyopenssl==23.1.1 # via -r test-requirements.in @@ -125,14 +121,6 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in -tomli==2.0.1 - # via - # black - # build - # coverage - # mypy - # pylint - # pytest tomlkit==0.11.7 # via pylint traitlets==5.9.0 @@ -146,10 +134,7 @@ types-pyopenssl==23.1.0.1 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in - # astroid - # black # mypy - # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 2939e2c01573a30f75506b8d5dd11bd57ae4031f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 3 Apr 2023 14:56:44 +0000 Subject: [PATCH 038/162] Autoformatter changes --- test-requirements.txt | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/test-requirements.txt b/test-requirements.txt index 006872e0c0..56438aefd6 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -39,6 +39,10 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint +exceptiongroup==1.1.1 ; python_version < "3.11" + # via + # -r test-requirements.in + # pytest flake8==4.0.1 # via -r test-requirements.in idna==3.4 @@ -121,6 +125,14 @@ sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +tomli==2.0.1 + # via + # black + # build + # coverage + # mypy + # pylint + # pytest tomlkit==0.11.7 # via pylint traitlets==5.9.0 @@ -134,7 +146,10 @@ types-pyopenssl==23.1.0.1 ; implementation_name == "cpython" typing-extensions==4.5.0 ; implementation_name == "cpython" # via # -r test-requirements.in + # astroid + # black # mypy + # pylint wcwidth==0.2.6 # via prompt-toolkit wheel==0.40.0 From 61d5d31e303f268cee6af9900d49a20f73e034de Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Mon, 1 May 2023 01:37:37 +0000 Subject: [PATCH 039/162] Update pip-tools --- test-requirements.in | 2 +- test-requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test-requirements.in b/test-requirements.in index eda15ef4b3..00a37afda7 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -16,7 +16,7 @@ mypy; implementation_name == "cpython" types-pyOpenSSL; implementation_name == "cpython" flake8 astor # code generation -pip-tools +pip-tools >= 6.13.0 # https://github.com/python-trio/trio/pull/654#issuecomment-420518745 # typed_ast is deprecated as of 3.8, and straight up doesn't compile on 3.10-dev as of 2021-12-13 diff --git a/test-requirements.txt b/test-requirements.txt index 56438aefd6..43e066a744 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -89,7 +89,7 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.12.3 +pip-tools==6.13.0 # via -r test-requirements.in platformdirs==3.2.0 # via From 001675a7698967dc404caa771803881efa483f9e Mon Sep 17 00:00:00 2001 From: richardsheridan Date: Mon, 1 May 2023 08:29:29 -0400 Subject: [PATCH 040/162] Invert no changes check Changes in the working tree report 1 with this command, and 1 is falsy in bash. --- .github/workflows/autodeps.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/autodeps.yml b/.github/workflows/autodeps.yml index 1f93fa03aa..0082d6a7d0 100644 --- a/.github/workflows/autodeps.yml +++ b/.github/workflows/autodeps.yml @@ -40,7 +40,7 @@ jobs: GH_TOKEN: ${{ github.token }} run: | # https://stackoverflow.com/a/3879077/4504950 - if ! git diff-index --quiet HEAD; then + if git diff-index --quiet HEAD; then echo "No changes to commit!" exit 0 fi From 2fb8e405fe0255058fc3ae3ef41a931cbd1388e2 Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Mon, 1 May 2023 17:28:56 +0000 Subject: [PATCH 041/162] Ensure our autodeps workflow adds the new files --- .github/workflows/autodeps.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/autodeps.yml b/.github/workflows/autodeps.yml index 0082d6a7d0..67e976261a 100644 --- a/.github/workflows/autodeps.yml +++ b/.github/workflows/autodeps.yml @@ -49,6 +49,7 @@ jobs: git switch --force-create autodeps/bump_from_${GITHUB_SHA:0:6} git config user.name 'github-actions[bot]' git config user.email '41898282+github-actions[bot]@users.noreply.github.com' + git add . git commit -am "Dependency updates" git push --force --set-upstream origin autodeps/bump_from_${GITHUB_SHA:0:6} From 67496a4da282c123717abbecf3f0e2acb0522a0c Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Thu, 4 May 2023 23:24:05 +0000 Subject: [PATCH 042/162] Some small improvements before debugging autodeps --- .github/workflows/autodeps.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/autodeps.yml b/.github/workflows/autodeps.yml index 67e976261a..9611cb9f1c 100644 --- a/.github/workflows/autodeps.yml +++ b/.github/workflows/autodeps.yml @@ -27,8 +27,8 @@ jobs: run: | python -m pip install -U pip python -m pip install -r test-requirements.txt - pip-compile test-requirements.in - pip-compile docs-requirements.in + pip-compile -U test-requirements.in + pip-compile -U docs-requirements.in - name: Black run: | # The new dependencies may contain a new black version. @@ -40,7 +40,7 @@ jobs: GH_TOKEN: ${{ github.token }} run: | # https://stackoverflow.com/a/3879077/4504950 - if git diff-index --quiet HEAD; then + if git diff-index --exit-code HEAD; then echo "No changes to commit!" exit 0 fi @@ -49,7 +49,6 @@ jobs: git switch --force-create autodeps/bump_from_${GITHUB_SHA:0:6} git config user.name 'github-actions[bot]' git config user.email '41898282+github-actions[bot]@users.noreply.github.com' - git add . git commit -am "Dependency updates" git push --force --set-upstream origin autodeps/bump_from_${GITHUB_SHA:0:6} From d91cc0ecedfb692fc478df84809bac054b601eff Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 5 May 2023 21:15:59 +0900 Subject: [PATCH 043/162] Bump dependencies from commit 967640 (#2639) * Dependency updates * Manually undo Python 3.8+ version bumps * Choose older isort version * Fix formatting * Try downgrading `cryptography` --------- Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: EXPLOSION --- docs-requirements.txt | 17 +++++++++-------- test-requirements.in | 8 +++++++- test-requirements.txt | 39 ++++++++++++++++++++------------------- 3 files changed, 36 insertions(+), 28 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 2929220236..7cf9dac27f 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -8,7 +8,7 @@ alabaster==0.7.13 # via sphinx async-generator==1.10 # via -r docs-requirements.in -attrs==22.2.0 +attrs==23.1.0 # via # -r docs-requirements.in # outcome @@ -38,7 +38,7 @@ imagesize==1.4.1 # via sphinx immutables==0.19 # via -r docs-requirements.in -importlib-metadata==6.0.0 +importlib-metadata==6.6.0 # via sphinx incremental==22.10.0 # via towncrier @@ -51,13 +51,13 @@ markupsafe==2.1.2 # via jinja2 outcome==1.2.0 # via -r docs-requirements.in -packaging==23.0 +packaging==23.1 # via sphinx -pygments==2.14.0 +pygments==2.15.1 # via sphinx -pytz==2022.7.1 +pytz==2023.3 # via babel -requests==2.28.2 +requests==2.30.0 # via sphinx sniffio==1.3.0 # via -r docs-requirements.in @@ -69,6 +69,7 @@ sphinx==6.1.3 # via # -r docs-requirements.in # sphinx-rtd-theme + # sphinxcontrib-jquery # sphinxcontrib-trio sphinx-rtd-theme==1.2.0 # via -r docs-requirements.in @@ -78,7 +79,7 @@ sphinxcontrib-devhelp==1.0.2 # via sphinx sphinxcontrib-htmlhelp==2.0.1 # via sphinx -sphinxcontrib-jquery==2.0.0 +sphinxcontrib-jquery==4.1 # via sphinx-rtd-theme sphinxcontrib-jsmath==1.0.1 # via sphinx @@ -92,7 +93,7 @@ tomli==2.0.1 # via towncrier towncrier==22.12.0 # via -r docs-requirements.in -urllib3==1.26.15 +urllib3==2.0.2 # via requests zipp==3.15.0 # via importlib-metadata diff --git a/test-requirements.in b/test-requirements.in index 00a37afda7..ee0924b75b 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -14,7 +14,7 @@ cryptography>=36.0.0 # 35.0.0 is transitive but fails black; implementation_name == "cpython" mypy; implementation_name == "cpython" types-pyOpenSSL; implementation_name == "cpython" -flake8 +flake8 < 6.0.0 # 6.0.0 drops python 3.7 astor # code generation pip-tools >= 6.13.0 @@ -32,3 +32,9 @@ idna outcome sniffio exceptiongroup >= 1.0.0rc9; python_version < "3.11" + +# isort 5.12.0 requires python 3.8 +isort < 5.12.0 + +# cryptography 40.0.2 (and presumably prior) segfaults on PyPy 3.7 +cryptography < 40.0.0 diff --git a/test-requirements.txt b/test-requirements.txt index 43e066a744..8e2d54758c 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,15 +6,14 @@ # astor==0.8.1 # via -r test-requirements.in -astroid==2.15.2 +astroid==2.15.4 # via pylint async-generator==1.10 # via -r test-requirements.in -attrs==22.2.0 +attrs==23.1.0 # via # -r test-requirements.in # outcome - # pytest backcall==0.2.0 # via ipython black==23.3.0 ; implementation_name == "cpython" @@ -27,7 +26,7 @@ click==8.1.3 # via # black # pip-tools -coverage[toml]==6.4.1 +coverage[toml]==7.2.5 # via pytest-cov cryptography==39.0.2 # via @@ -43,7 +42,7 @@ exceptiongroup==1.1.1 ; python_version < "3.11" # via # -r test-requirements.in # pytest -flake8==4.0.1 +flake8==5.0.4 # via -r test-requirements.in idna==3.4 # via @@ -53,8 +52,10 @@ iniconfig==2.0.0 # via pytest ipython==7.34.0 # via -r test-requirements.in -isort==5.10.1 - # via pylint +isort==5.11.5 + # via + # -r test-requirements.in + # pylint jedi==0.18.2 # via # -r test-requirements.in @@ -63,11 +64,11 @@ lazy-object-proxy==1.9.0 # via astroid matplotlib-inline==0.1.6 # via ipython -mccabe==0.6.1 +mccabe==0.7.0 # via # flake8 # pylint -mypy==1.1.1 ; implementation_name == "cpython" +mypy==1.2.0 ; implementation_name == "cpython" # via -r test-requirements.in mypy-extensions==1.0.0 ; implementation_name == "cpython" # via @@ -76,7 +77,7 @@ mypy-extensions==1.0.0 ; implementation_name == "cpython" # mypy outcome==1.2.0 # via -r test-requirements.in -packaging==23.0 +packaging==23.1 # via # black # build @@ -91,7 +92,7 @@ pickleshare==0.7.5 # via ipython pip-tools==6.13.0 # via -r test-requirements.in -platformdirs==3.2.0 +platformdirs==3.5.0 # via # black # pylint @@ -101,21 +102,21 @@ prompt-toolkit==3.0.38 # via ipython ptyprocess==0.7.0 # via pexpect -pycodestyle==2.8.0 +pycodestyle==2.9.1 # via flake8 pycparser==2.21 # via cffi -pyflakes==2.4.0 +pyflakes==2.5.0 # via flake8 -pygments==2.14.0 +pygments==2.15.1 # via ipython -pylint==2.17.2 +pylint==2.17.3 # via -r test-requirements.in pyopenssl==23.1.1 # via -r test-requirements.in pyproject-hooks==1.0.0 # via build -pytest==7.2.2 +pytest==7.3.1 # via # -r test-requirements.in # pytest-cov @@ -133,15 +134,15 @@ tomli==2.0.1 # mypy # pylint # pytest -tomlkit==0.11.7 +tomlkit==0.11.8 # via pylint traitlets==5.9.0 # via # ipython # matplotlib-inline -trustme==0.9.0 +trustme==1.0.0 # via -r test-requirements.in -types-pyopenssl==23.1.0.1 ; implementation_name == "cpython" +types-pyopenssl==23.1.0.2 ; implementation_name == "cpython" # via -r test-requirements.in typing-extensions==4.5.0 ; implementation_name == "cpython" # via From 9cb7b00a6fa1ff8b982d17e1648aa37f84a3ccf7 Mon Sep 17 00:00:00 2001 From: jakkdl Date: Fri, 5 May 2023 15:36:26 +0200 Subject: [PATCH 044/162] add flake8-trio to list of Tools and Utilities --- docs/source/awesome-trio-libraries.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/awesome-trio-libraries.rst b/docs/source/awesome-trio-libraries.rst index 50b3d698a3..7aca473dfd 100644 --- a/docs/source/awesome-trio-libraries.rst +++ b/docs/source/awesome-trio-libraries.rst @@ -100,6 +100,7 @@ Tools and Utilities ------------------- * `trio-typing `__ - Type hints for Trio and related projects. * `trio-util `__ - An assortment of utilities for the Trio async/await framework. +* `flake8-trio `__ - Highly opinionated linter for various sorts of problems in Trio and/or AnyIO. Can run as a flake8 plugin, or standalone with support for autofixing some errors. * `tricycle `__ - This is a library of interesting-but-maybe-not-yet-fully-proven extensions to Trio. * `tenacity `__ - Retrying library for Python with async/await support. * `perf-timer `__ - A code timer with Trio async support (see ``TrioPerfTimer``). Collects execution time of a block of code excluding time when the coroutine isn't scheduled, such as during blocking I/O and sleep. Also offers ``trio_perf_counter()`` for low-level timing. From abf058749a8f800a4b351ef0caae32bca25044c8 Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Sat, 6 May 2023 03:38:44 +0000 Subject: [PATCH 045/162] Make sure the index is synced before diff-index --- .github/workflows/autodeps.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/autodeps.yml b/.github/workflows/autodeps.yml index 9611cb9f1c..5e6327963b 100644 --- a/.github/workflows/autodeps.yml +++ b/.github/workflows/autodeps.yml @@ -39,6 +39,9 @@ jobs: env: GH_TOKEN: ${{ github.token }} run: | + # git diff-index needs a git update-index, which can be done more concisely with `git add`! + git add . + # https://stackoverflow.com/a/3879077/4504950 if git diff-index --exit-code HEAD; then echo "No changes to commit!" From 4ed742d2b99f0526e5a6ed81aa2969d9d62458da Mon Sep 17 00:00:00 2001 From: Jordan Speicher Date: Tue, 9 May 2023 15:06:52 -0500 Subject: [PATCH 046/162] docs: Fix library URL for flake8-trio --- docs/source/awesome-trio-libraries.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/awesome-trio-libraries.rst b/docs/source/awesome-trio-libraries.rst index 7aca473dfd..b3174c97a2 100644 --- a/docs/source/awesome-trio-libraries.rst +++ b/docs/source/awesome-trio-libraries.rst @@ -100,7 +100,7 @@ Tools and Utilities ------------------- * `trio-typing `__ - Type hints for Trio and related projects. * `trio-util `__ - An assortment of utilities for the Trio async/await framework. -* `flake8-trio `__ - Highly opinionated linter for various sorts of problems in Trio and/or AnyIO. Can run as a flake8 plugin, or standalone with support for autofixing some errors. +* `flake8-trio `__ - Highly opinionated linter for various sorts of problems in Trio and/or AnyIO. Can run as a flake8 plugin, or standalone with support for autofixing some errors. * `tricycle `__ - This is a library of interesting-but-maybe-not-yet-fully-proven extensions to Trio. * `tenacity `__ - Retrying library for Python with async/await support. * `perf-timer `__ - A code timer with Trio async support (see ``TrioPerfTimer``). Collects execution time of a block of code excluding time when the coroutine isn't scheduled, such as during blocking I/O and sleep. Also offers ``trio_perf_counter()`` for low-level timing. From 6582a2466977799a273b2c6fed83ff0ef0dd716c Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Wed, 10 May 2023 12:12:45 +0000 Subject: [PATCH 047/162] PR review --- .github/workflows/autodeps.yml | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/.github/workflows/autodeps.yml b/.github/workflows/autodeps.yml index 5e6327963b..37a9c4eeab 100644 --- a/.github/workflows/autodeps.yml +++ b/.github/workflows/autodeps.yml @@ -38,21 +38,17 @@ jobs: - name: Commit changes and create automerge PR env: GH_TOKEN: ${{ github.token }} - run: | - # git diff-index needs a git update-index, which can be done more concisely with `git add`! - git add . - - # https://stackoverflow.com/a/3879077/4504950 - if git diff-index --exit-code HEAD; then - echo "No changes to commit!" - exit 0 - fi - + run: | # setup git repo git switch --force-create autodeps/bump_from_${GITHUB_SHA:0:6} git config user.name 'github-actions[bot]' git config user.email '41898282+github-actions[bot]@users.noreply.github.com' - git commit -am "Dependency updates" + + if ! commit -am "Dependency updates"; then + echo "No changes to commit!" + exit 0 + fi + git push --force --set-upstream origin autodeps/bump_from_${GITHUB_SHA:0:6} # git push returns before github is ready for a pr, so we poll until success From d0d8d618157c5b9270f7f012ba809118cfc6ce9f Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Wed, 10 May 2023 12:14:36 +0000 Subject: [PATCH 048/162] Remove trailing whitespace --- .github/workflows/autodeps.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/autodeps.yml b/.github/workflows/autodeps.yml index 37a9c4eeab..3e9917dcaa 100644 --- a/.github/workflows/autodeps.yml +++ b/.github/workflows/autodeps.yml @@ -38,7 +38,7 @@ jobs: - name: Commit changes and create automerge PR env: GH_TOKEN: ${{ github.token }} - run: | + run: | # setup git repo git switch --force-create autodeps/bump_from_${GITHUB_SHA:0:6} git config user.name 'github-actions[bot]' From 6e736c1ad268222cfaa13a69a868536671c248d7 Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Sat, 13 May 2023 06:01:43 +0000 Subject: [PATCH 049/162] Readd accidental "git" delete --- .github/workflows/autodeps.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/autodeps.yml b/.github/workflows/autodeps.yml index 3e9917dcaa..40cf05726c 100644 --- a/.github/workflows/autodeps.yml +++ b/.github/workflows/autodeps.yml @@ -44,7 +44,7 @@ jobs: git config user.name 'github-actions[bot]' git config user.email '41898282+github-actions[bot]@users.noreply.github.com' - if ! commit -am "Dependency updates"; then + if ! git commit -am "Dependency updates"; then echo "No changes to commit!" exit 0 fi From 855f5fd72e9e126b8dae2348a918427d7a1f38cd Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Tue, 16 May 2023 14:09:02 +0200 Subject: [PATCH 050/162] Add stubs for wrapped methods to trio.Path, add tests for mypy seeing exported symbols, and test for seeing class members (#2631) * Make methods in trio.Path visible to static tools, add tests for mypy seeing exported symbols, and tests for seeing class members. --- trio/_path.py | 95 +++++++++++++-- trio/_path.pyi | 1 - trio/tests/test_exports.py | 242 +++++++++++++++++++++++++++++++++++-- 3 files changed, 319 insertions(+), 19 deletions(-) delete mode 100644 trio/_path.pyi diff --git a/trio/_path.py b/trio/_path.py index ea8cf98c34..7c338dbc97 100644 --- a/trio/_path.py +++ b/trio/_path.py @@ -1,12 +1,12 @@ -# type: ignore - -from functools import wraps, partial import os -import types import pathlib +import sys +import types +from functools import partial, wraps +from typing import TYPE_CHECKING, Awaitable, Callable, TypeVar, Any import trio -from trio._util import async_wraps, Final +from trio._util import Final, async_wraps # re-wrap return value from methods that return new instances of pathlib.Path @@ -156,11 +156,16 @@ class Path(metaclass=AsyncAutoWrapperType): def __init__(self, *args): self._wrapped = pathlib.Path(*args) - def __getattr__(self, name): - if name in self._forward: - value = getattr(self._wrapped, name) - return rewrap_path(value) - raise AttributeError(name) + # type checkers allow accessing any attributes on class instances with `__getattr__` + # so we hide it behind a type guard forcing it to rely on the hardcoded attribute + # list below. + if not TYPE_CHECKING: + + def __getattr__(self, name): + if name in self._forward: + value = getattr(self._wrapped, name) + return rewrap_path(value) + raise AttributeError(name) def __dir__(self): return super().__dir__() + self._forward @@ -182,6 +187,74 @@ async def open(self, *args, **kwargs): value = await trio.to_thread.run_sync(func) return trio.wrap_file(value) + if TYPE_CHECKING: + # the dunders listed in _forward_magic that aren't seen otherwise + __bytes__ = pathlib.Path.__bytes__ + __truediv__ = pathlib.Path.__truediv__ + __rtruediv__ = pathlib.Path.__rtruediv__ + + # These should be fully typed, either manually or with some magic wrapper + # function that copies the type of pathlib.Path except sticking an async in + # front of all of them. The latter is unfortunately not trivial, see attempts in + # https://github.com/python-trio/trio/issues/2630 + + # wrapped methods handled by __getattr__ + absolute: Any + as_posix: Any + as_uri: Any + chmod: Any + cwd: Any + exists: Any + expanduser: Any + glob: Any + home: Any + is_absolute: Any + is_block_device: Any + is_char_device: Any + is_dir: Any + is_fifo: Any + is_file: Any + is_reserved: Any + is_socket: Any + is_symlink: Any + iterdir: Any + joinpath: Any + lchmod: Any + lstat: Any + match: Any + mkdir: Any + read_bytes: Any + read_text: Any + relative_to: Any + rename: Any + replace: Any + resolve: Any + rglob: Any + rmdir: Any + samefile: Any + stat: Any + symlink_to: Any + touch: Any + unlink: Any + with_name: Any + with_suffix: Any + write_bytes: Any + write_text: Any + + if sys.platform != "win32": + group: Any + is_mount: Any + owner: Any + + if sys.version_info >= (3, 8): + link_to: Any + if sys.version_info >= (3, 9): + is_relative_to: Any + with_stem: Any + readlink: Any + if sys.version_info >= (3, 10): + hardlink_to: Any + Path.iterdir.__doc__ = """ Like :meth:`pathlib.Path.iterdir`, but async. @@ -203,4 +276,6 @@ async def open(self, *args, **kwargs): # sense than inventing our own special docstring for this. del Path.absolute.__doc__ +# TODO: This is likely not supported by all the static tools out there, see discussion in +# https://github.com/python-trio/trio/pull/2631#discussion_r1185612528 os.PathLike.register(Path) diff --git a/trio/_path.pyi b/trio/_path.pyi deleted file mode 100644 index 85a8e1f960..0000000000 --- a/trio/_path.pyi +++ /dev/null @@ -1 +0,0 @@ -class Path: ... diff --git a/trio/tests/test_exports.py b/trio/tests/test_exports.py index 026d6f5efa..8eb1131ee4 100644 --- a/trio/tests/test_exports.py +++ b/trio/tests/test_exports.py @@ -1,17 +1,22 @@ +import enum +import importlib +import inspect import re +import socket as stdlib_socket import sys -import importlib import types -import inspect -import enum +from pathlib import Path +from types import ModuleType +from typing import Any, Iterable import pytest import trio import trio.testing +from trio.tests.conftest import RUN_SLOW -from .. import _core -from .. import _util +from .. import _core, _util +from .._core.tests.tutil import slow def test_core_is_properly_reexported(): @@ -40,7 +45,7 @@ def public_modules(module): continue if not class_.__name__.startswith(module.__name__): # pragma: no cover continue - if class_ is module: + if class_ is module: # pragma: no cover continue # We should rename the trio.tests module (#274), but until then we use # a special-case hack: @@ -65,12 +70,12 @@ def public_modules(module): reason="skip static introspection tools on Python dev/alpha releases", ) @pytest.mark.parametrize("modname", PUBLIC_MODULE_NAMES) -@pytest.mark.parametrize("tool", ["pylint", "jedi"]) +@pytest.mark.parametrize("tool", ["pylint", "jedi", "mypy"]) @pytest.mark.filterwarnings( # https://github.com/pypa/setuptools/issues/3274 "ignore:module 'sre_constants' is deprecated:DeprecationWarning", ) -def test_static_tool_sees_all_symbols(tool, modname): +def test_static_tool_sees_all_symbols(tool, modname, tmpdir): module = importlib.import_module(modname) def no_underscores(symbols): @@ -96,6 +101,37 @@ def no_underscores(symbols): script = jedi.Script(f"import {modname}; {modname}.") completions = script.complete() static_names = no_underscores(c.name for c in completions) + elif tool == "mypy": + if not RUN_SLOW: # pragma: no cover + pytest.skip("use --run-slow to check against mypy") + if sys.implementation.name != "cpython": + pytest.skip("mypy not installed in tests on pypy") + + # create py.typed file + py_typed_path = Path(trio.__file__).parent / "py.typed" + py_typed_exists = py_typed_path.exists() + if not py_typed_exists: # pragma: no cover + py_typed_path.write_text("") + + # mypy behaves strangely when passed a huge semicolon-separated line with `-c` + # so we use a tmpfile + tmpfile = tmpdir / "check_mypy.py" + tmpfile.write_text( + f"import {modname}\n" + + "".join(f"{modname}.{name}\n" for name in runtime_names), + encoding="utf8", + ) + from mypy.api import run + + res = run(["--config-file=", "--follow-imports=silent", str(tmpfile)]) + + # clean up created py.typed file + if not py_typed_exists: # pragma: no cover + py_typed_path.unlink() + + # check that there were no errors (exit code 0), otherwise print the errors + assert res[2] == 0, res[0] + return else: # pragma: no cover assert False @@ -114,6 +150,196 @@ def no_underscores(symbols): assert False +# this could be sped up by only invoking mypy once per module, or even once for all +# modules, instead of once per class. +@slow +# see comment on test_static_tool_sees_all_symbols +@pytest.mark.redistributors_should_skip +# pylint/jedi often have trouble with alpha releases, where Python's internals +# are in flux, grammar may not have settled down, etc. +@pytest.mark.skipif( + sys.version_info.releaselevel == "alpha", + reason="skip static introspection tools on Python dev/alpha releases", +) +@pytest.mark.parametrize("module_name", PUBLIC_MODULE_NAMES) +@pytest.mark.parametrize("tool", ["jedi", "mypy"]) +def test_static_tool_sees_class_members(tool, module_name, tmpdir) -> None: + module = PUBLIC_MODULES[PUBLIC_MODULE_NAMES.index(module_name)] + + # ignore hidden, but not dunder, symbols + def no_hidden(symbols): + return { + symbol + for symbol in symbols + if (not symbol.startswith("_")) or symbol.startswith("__") + } + + py_typed_path = Path(trio.__file__).parent / "py.typed" + py_typed_exists = py_typed_path.exists() + + if tool == "mypy": + if sys.implementation.name != "cpython": + pytest.skip("mypy not installed in tests on pypy") + # create py.typed file + # not marked with no-cover pragma, remove this logic when trio is marked + # with py.typed proper + if not py_typed_exists: + py_typed_path.write_text("") + + errors: dict[str, object] = {} + for class_name, class_ in module.__dict__.items(): + if not isinstance(class_, type): + continue + if module_name == "trio.socket" and class_name in dir(stdlib_socket): + continue + # Deprecated classes are exported with a leading underscore + # We don't care about errors in _MultiError as that's on its way out anyway + if class_name.startswith("_"): # pragma: no cover + continue + + # dir() and inspect.getmembers doesn't display properties from the metaclass + # also ignore some dunder methods that tend to differ but are of no consequence + ignore_names = set(dir(type(class_))) | { + "__annotations__", + "__attrs_attrs__", + "__attrs_own_setattr__", + "__class_getitem__", + "__getstate__", + "__match_args__", + "__order__", + "__orig_bases__", + "__parameters__", + "__setstate__", + "__slots__", + "__weakref__", + } + + # pypy seems to have some additional dunders that differ + if sys.implementation.name == "pypy": + ignore_names |= { + "__basicsize__", + "__dictoffset__", + "__itemsize__", + "__sizeof__", + "__weakrefoffset__", + "__unicode__", + } + + # inspect.getmembers sees `name` and `value` in Enums, otherwise + # it behaves the same way as `dir` + # runtime_names = no_underscores(dir(class_)) + runtime_names = ( + no_hidden(x[0] for x in inspect.getmembers(class_)) - ignore_names + ) + + if tool == "jedi": + import jedi + + script = jedi.Script( + f"from {module_name} import {class_name}; {class_name}." + ) + completions = script.complete() + static_names = no_hidden(c.name for c in completions) - ignore_names + + missing = runtime_names - static_names + extra = static_names - runtime_names + if BaseException in class_.__mro__ and sys.version_info > (3, 11): + missing.remove("add_note") + + # TODO: why is this? Is it a problem? + # see https://github.com/python-trio/trio/pull/2631#discussion_r1185615916 + if class_ == trio.StapledStream: + extra.remove("receive_stream") + extra.remove("send_stream") + + # intentionally hidden behind type guard + if class_ == trio.Path: + missing.remove("__getattr__") + + if missing or extra: # pragma: no cover + errors[f"{module_name}.{class_name}"] = { + "missing": missing, + "extra": extra, + } + elif tool == "mypy": + tmpfile = tmpdir / "check_mypy.py" + sorted_runtime_names = list(sorted(runtime_names)) + content = f"from {module_name} import {class_name}\n" + "".join( + f"{class_name}.{name}\n" for name in sorted_runtime_names + ) + tmpfile.write_text(content, encoding="utf8") + from mypy.api import run + + res = run( + [ + "--config-file=", + "--follow-imports=silent", + "--disable-error-code=operator", + "--soft-error-limit=-1", + "--no-error-summary", + str(tmpfile), + ] + ) + # no errors + if res[2] == 0: + continue + + # get each line of output, containing an error for a symbol, + # stripping of trailing newline + it = iter(res[0].split("\n")[:-1]) + for output_line in it: + # split out the three last fields to not have problems with windows + # drives or other paths with any `:` + _, line, error_type, message = output_line.rsplit(":", 3) + + # -2 due to lines being 1-indexed and to skip the import line + symbol = ( + f"{module_name}.{class_name}." + sorted_runtime_names[int(line) - 2] + ) + + # The POSIX-only attributes get listed in `dir(trio.Path)` since + # they're in `dir(pathlib.Path)` on win32 cpython. This should *maybe* + # be fixed in the future, but for now we ignore it. + if ( + symbol + in ("trio.Path.group", "trio.Path.owner", "trio.Path.is_mount") + and sys.platform == "win32" + and sys.implementation.name == "cpython" + ): + continue + + # intentionally hidden from type checkers, lest they accept any attribute + if symbol == "trio.Path.__getattr__": + continue + + # a bunch of symbols have this error, e.g. trio.lowlevel.Task.context + # It's not a problem: it's just complaining we're accessing + # instance-only attributes on a class! + # See this test for a minimized version that causes this error: + # https://github.com/python/mypy/blob/c517b86b9ba7487e7758f187cf31478e7aeaad47/test-data/unit/check-slots.test#L515-L523. + + if "conflicts with class variable access" in message: + continue + + errors[symbol] = error_type + ":" + message # pragma: no cover + + else: # pragma: no cover + assert False, "unknown tool" + + # clean up created py.typed file + if tool == "mypy" and not py_typed_exists: + py_typed_path.unlink() + + # `assert not errors` will not print the full content of errors, even with + # `--verbose`, so we manually print it + if errors: # pragma: no cover + from pprint import pprint + + print(f"\n{tool} can't see the following symbols in {module_name}:") + pprint(errors) + assert not errors + + def test_classes_are_final(): for module in PUBLIC_MODULES: for name, class_ in module.__dict__.items(): From dc37b792eda68cd6ee1240fe2984290adb8e9eca Mon Sep 17 00:00:00 2001 From: jakkdl Date: Wed, 5 Apr 2023 15:22:05 +0200 Subject: [PATCH 051/162] move **/tests/ to **/_tests/ --- .coveragerc | 2 +- MANIFEST.in | 2 +- ci.sh | 4 +-- trio/_core/{tests => _tests}/__init__.py | 0 trio/_core/{tests => _tests}/test_asyncgen.py | 0 .../{tests => _tests}/test_guest_mode.py | 0 .../{tests => _tests}/test_instrumentation.py | 0 trio/_core/{tests => _tests}/test_io.py | 0 trio/_core/{tests => _tests}/test_ki.py | 0 trio/_core/{tests => _tests}/test_local.py | 0 .../{tests => _tests}/test_mock_clock.py | 0 .../{tests => _tests}/test_multierror.py | 0 .../test_multierror_scripts/__init__.py | 0 .../test_multierror_scripts/_common.py | 0 .../apport_excepthook.py | 0 .../ipython_custom_exc.py | 0 .../simple_excepthook.py | 0 .../simple_excepthook_IPython.py | 0 .../{tests => _tests}/test_parking_lot.py | 0 trio/_core/{tests => _tests}/test_run.py | 0 .../{tests => _tests}/test_thread_cache.py | 0 trio/_core/{tests => _tests}/test_tutil.py | 0 .../{tests => _tests}/test_unbounded_queue.py | 0 trio/_core/{tests => _tests}/test_util.py | 0 trio/_core/{tests => _tests}/test_windows.py | 0 trio/_core/{tests => _tests}/tutil.py | 4 +-- trio/_core/tests/conftest.py | 25 ------------------ trio/{tests => _tests}/__init__.py | 0 .../astrill-codesigning-cert.cer | Bin .../module_with_deprecations.py | 0 .../conftest.py => _tests/pytest_plugin.py} | 2 +- trio/{tests => _tests}/test_abc.py | 0 trio/{tests => _tests}/test_channel.py | 0 trio/{tests => _tests}/test_contextvars.py | 0 trio/{tests => _tests}/test_deprecate.py | 0 trio/{tests => _tests}/test_dtls.py | 2 +- trio/{tests => _tests}/test_exports.py | 11 +------- trio/{tests => _tests}/test_fakenet.py | 0 trio/{tests => _tests}/test_file_io.py | 0 .../test_highlevel_generic.py | 0 .../test_highlevel_open_tcp_listeners.py | 2 +- .../test_highlevel_open_tcp_stream.py | 0 .../test_highlevel_open_unix_stream.py | 0 .../test_highlevel_serve_listeners.py | 0 .../test_highlevel_socket.py | 0 .../test_highlevel_ssl_helpers.py | 0 trio/{tests => _tests}/test_path.py | 0 .../test_scheduler_determinism.py | 0 trio/{tests => _tests}/test_signals.py | 0 trio/{tests => _tests}/test_socket.py | 2 +- trio/{tests => _tests}/test_ssl.py | 2 +- trio/{tests => _tests}/test_subprocess.py | 2 +- trio/{tests => _tests}/test_sync.py | 0 trio/{tests => _tests}/test_testing.py | 2 +- trio/{tests => _tests}/test_threads.py | 8 +++--- trio/{tests => _tests}/test_timeouts.py | 2 +- trio/{tests => _tests}/test_tracing.py | 0 trio/{tests => _tests}/test_unix_pipes.py | 2 +- trio/{tests => _tests}/test_util.py | 2 +- .../{tests => _tests}/test_wait_for_object.py | 2 +- trio/{tests => _tests}/test_windows_pipes.py | 2 +- trio/{tests => _tests}/tools/__init__.py | 0 .../tools/test_gen_exports.py | 0 trio/tests.py | 10 +++++++ 64 files changed, 33 insertions(+), 57 deletions(-) rename trio/_core/{tests => _tests}/__init__.py (100%) rename trio/_core/{tests => _tests}/test_asyncgen.py (100%) rename trio/_core/{tests => _tests}/test_guest_mode.py (100%) rename trio/_core/{tests => _tests}/test_instrumentation.py (100%) rename trio/_core/{tests => _tests}/test_io.py (100%) rename trio/_core/{tests => _tests}/test_ki.py (100%) rename trio/_core/{tests => _tests}/test_local.py (100%) rename trio/_core/{tests => _tests}/test_mock_clock.py (100%) rename trio/_core/{tests => _tests}/test_multierror.py (100%) rename trio/_core/{tests => _tests}/test_multierror_scripts/__init__.py (100%) rename trio/_core/{tests => _tests}/test_multierror_scripts/_common.py (100%) rename trio/_core/{tests => _tests}/test_multierror_scripts/apport_excepthook.py (100%) rename trio/_core/{tests => _tests}/test_multierror_scripts/ipython_custom_exc.py (100%) rename trio/_core/{tests => _tests}/test_multierror_scripts/simple_excepthook.py (100%) rename trio/_core/{tests => _tests}/test_multierror_scripts/simple_excepthook_IPython.py (100%) rename trio/_core/{tests => _tests}/test_parking_lot.py (100%) rename trio/_core/{tests => _tests}/test_run.py (100%) rename trio/_core/{tests => _tests}/test_thread_cache.py (100%) rename trio/_core/{tests => _tests}/test_tutil.py (100%) rename trio/_core/{tests => _tests}/test_unbounded_queue.py (100%) rename trio/_core/{tests => _tests}/test_util.py (100%) rename trio/_core/{tests => _tests}/test_windows.py (100%) rename trio/_core/{tests => _tests}/tutil.py (97%) delete mode 100644 trio/_core/tests/conftest.py rename trio/{tests => _tests}/__init__.py (100%) rename trio/{tests => _tests}/astrill-codesigning-cert.cer (100%) rename trio/{tests => _tests}/module_with_deprecations.py (100%) rename trio/{tests/conftest.py => _tests/pytest_plugin.py} (93%) rename trio/{tests => _tests}/test_abc.py (100%) rename trio/{tests => _tests}/test_channel.py (100%) rename trio/{tests => _tests}/test_contextvars.py (100%) rename trio/{tests => _tests}/test_deprecate.py (100%) rename trio/{tests => _tests}/test_dtls.py (99%) rename trio/{tests => _tests}/test_exports.py (97%) rename trio/{tests => _tests}/test_fakenet.py (100%) rename trio/{tests => _tests}/test_file_io.py (100%) rename trio/{tests => _tests}/test_highlevel_generic.py (100%) rename trio/{tests => _tests}/test_highlevel_open_tcp_listeners.py (99%) rename trio/{tests => _tests}/test_highlevel_open_tcp_stream.py (100%) rename trio/{tests => _tests}/test_highlevel_open_unix_stream.py (100%) rename trio/{tests => _tests}/test_highlevel_serve_listeners.py (100%) rename trio/{tests => _tests}/test_highlevel_socket.py (100%) rename trio/{tests => _tests}/test_highlevel_ssl_helpers.py (100%) rename trio/{tests => _tests}/test_path.py (100%) rename trio/{tests => _tests}/test_scheduler_determinism.py (100%) rename trio/{tests => _tests}/test_signals.py (100%) rename trio/{tests => _tests}/test_socket.py (99%) rename trio/{tests => _tests}/test_ssl.py (99%) rename trio/{tests => _tests}/test_subprocess.py (99%) rename trio/{tests => _tests}/test_sync.py (100%) rename trio/{tests => _tests}/test_testing.py (99%) rename trio/{tests => _tests}/test_threads.py (99%) rename trio/{tests => _tests}/test_timeouts.py (98%) rename trio/{tests => _tests}/test_tracing.py (100%) rename trio/{tests => _tests}/test_unix_pipes.py (99%) rename trio/{tests => _tests}/test_util.py (99%) rename trio/{tests => _tests}/test_wait_for_object.py (99%) rename trio/{tests => _tests}/test_windows_pipes.py (98%) rename trio/{tests => _tests}/tools/__init__.py (100%) rename trio/{tests => _tests}/tools/test_gen_exports.py (100%) create mode 100644 trio/tests.py diff --git a/.coveragerc b/.coveragerc index 4bbac7b27d..83a7cf0647 100644 --- a/.coveragerc +++ b/.coveragerc @@ -6,7 +6,7 @@ source=trio omit= setup.py */ipython_custom_exc.py -# Omit the generated files in trio/_core starting with _public_ +# Omit the generated files in trio/_core starting with _generated_ */trio/_core/_generated_* # The test suite spawns subprocesses to test some stuff, so make sure # this doesn't corrupt the coverage files diff --git a/MANIFEST.in b/MANIFEST.in index e2fd4c157f..8b92523fb7 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,6 +2,6 @@ include LICENSE LICENSE.MIT LICENSE.APACHE2 include README.rst include CODE_OF_CONDUCT.md CONTRIBUTING.md include test-requirements.txt -recursive-include trio/tests/test_ssl_certs *.pem +recursive-include trio/_tests/test_ssl_certs *.pem recursive-include docs * prune docs/build diff --git a/ci.sh b/ci.sh index 2fc44c0b45..3cd78cfb18 100755 --- a/ci.sh +++ b/ci.sh @@ -81,7 +81,7 @@ else # when installing, and then running 'certmgr.msc' and exporting the # certificate. See: # http://www.migee.com/2010/09/24/solution-for-unattendedsilent-installs-and-would-you-like-to-install-this-device-software/ - certutil -addstore "TrustedPublisher" trio/tests/astrill-codesigning-cert.cer + certutil -addstore "TrustedPublisher" trio/_tests/astrill-codesigning-cert.cer # Double-slashes are how you tell windows-bash that you want a single # slash, and don't treat this as a unix-style filename that needs to # be replaced by a windows-style filename. @@ -107,7 +107,7 @@ else # 'coverage xml' to generate the report that it uses, and that will only # apply the ignore patterns in the current directory's .coveragerc. cp ../.coveragerc . - if pytest -r a --junitxml=../test-results.xml --run-slow ${INSTALLDIR} --cov="$INSTALLDIR" --verbose; then + if pytest -r a -p trio._tests.pytest_plugin --junitxml=../test-results.xml --run-slow ${INSTALLDIR} --cov="$INSTALLDIR" --verbose; then PASSED=true else PASSED=false diff --git a/trio/_core/tests/__init__.py b/trio/_core/_tests/__init__.py similarity index 100% rename from trio/_core/tests/__init__.py rename to trio/_core/_tests/__init__.py diff --git a/trio/_core/tests/test_asyncgen.py b/trio/_core/_tests/test_asyncgen.py similarity index 100% rename from trio/_core/tests/test_asyncgen.py rename to trio/_core/_tests/test_asyncgen.py diff --git a/trio/_core/tests/test_guest_mode.py b/trio/_core/_tests/test_guest_mode.py similarity index 100% rename from trio/_core/tests/test_guest_mode.py rename to trio/_core/_tests/test_guest_mode.py diff --git a/trio/_core/tests/test_instrumentation.py b/trio/_core/_tests/test_instrumentation.py similarity index 100% rename from trio/_core/tests/test_instrumentation.py rename to trio/_core/_tests/test_instrumentation.py diff --git a/trio/_core/tests/test_io.py b/trio/_core/_tests/test_io.py similarity index 100% rename from trio/_core/tests/test_io.py rename to trio/_core/_tests/test_io.py diff --git a/trio/_core/tests/test_ki.py b/trio/_core/_tests/test_ki.py similarity index 100% rename from trio/_core/tests/test_ki.py rename to trio/_core/_tests/test_ki.py diff --git a/trio/_core/tests/test_local.py b/trio/_core/_tests/test_local.py similarity index 100% rename from trio/_core/tests/test_local.py rename to trio/_core/_tests/test_local.py diff --git a/trio/_core/tests/test_mock_clock.py b/trio/_core/_tests/test_mock_clock.py similarity index 100% rename from trio/_core/tests/test_mock_clock.py rename to trio/_core/_tests/test_mock_clock.py diff --git a/trio/_core/tests/test_multierror.py b/trio/_core/_tests/test_multierror.py similarity index 100% rename from trio/_core/tests/test_multierror.py rename to trio/_core/_tests/test_multierror.py diff --git a/trio/_core/tests/test_multierror_scripts/__init__.py b/trio/_core/_tests/test_multierror_scripts/__init__.py similarity index 100% rename from trio/_core/tests/test_multierror_scripts/__init__.py rename to trio/_core/_tests/test_multierror_scripts/__init__.py diff --git a/trio/_core/tests/test_multierror_scripts/_common.py b/trio/_core/_tests/test_multierror_scripts/_common.py similarity index 100% rename from trio/_core/tests/test_multierror_scripts/_common.py rename to trio/_core/_tests/test_multierror_scripts/_common.py diff --git a/trio/_core/tests/test_multierror_scripts/apport_excepthook.py b/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py similarity index 100% rename from trio/_core/tests/test_multierror_scripts/apport_excepthook.py rename to trio/_core/_tests/test_multierror_scripts/apport_excepthook.py diff --git a/trio/_core/tests/test_multierror_scripts/ipython_custom_exc.py b/trio/_core/_tests/test_multierror_scripts/ipython_custom_exc.py similarity index 100% rename from trio/_core/tests/test_multierror_scripts/ipython_custom_exc.py rename to trio/_core/_tests/test_multierror_scripts/ipython_custom_exc.py diff --git a/trio/_core/tests/test_multierror_scripts/simple_excepthook.py b/trio/_core/_tests/test_multierror_scripts/simple_excepthook.py similarity index 100% rename from trio/_core/tests/test_multierror_scripts/simple_excepthook.py rename to trio/_core/_tests/test_multierror_scripts/simple_excepthook.py diff --git a/trio/_core/tests/test_multierror_scripts/simple_excepthook_IPython.py b/trio/_core/_tests/test_multierror_scripts/simple_excepthook_IPython.py similarity index 100% rename from trio/_core/tests/test_multierror_scripts/simple_excepthook_IPython.py rename to trio/_core/_tests/test_multierror_scripts/simple_excepthook_IPython.py diff --git a/trio/_core/tests/test_parking_lot.py b/trio/_core/_tests/test_parking_lot.py similarity index 100% rename from trio/_core/tests/test_parking_lot.py rename to trio/_core/_tests/test_parking_lot.py diff --git a/trio/_core/tests/test_run.py b/trio/_core/_tests/test_run.py similarity index 100% rename from trio/_core/tests/test_run.py rename to trio/_core/_tests/test_run.py diff --git a/trio/_core/tests/test_thread_cache.py b/trio/_core/_tests/test_thread_cache.py similarity index 100% rename from trio/_core/tests/test_thread_cache.py rename to trio/_core/_tests/test_thread_cache.py diff --git a/trio/_core/tests/test_tutil.py b/trio/_core/_tests/test_tutil.py similarity index 100% rename from trio/_core/tests/test_tutil.py rename to trio/_core/_tests/test_tutil.py diff --git a/trio/_core/tests/test_unbounded_queue.py b/trio/_core/_tests/test_unbounded_queue.py similarity index 100% rename from trio/_core/tests/test_unbounded_queue.py rename to trio/_core/_tests/test_unbounded_queue.py diff --git a/trio/_core/tests/test_util.py b/trio/_core/_tests/test_util.py similarity index 100% rename from trio/_core/tests/test_util.py rename to trio/_core/_tests/test_util.py diff --git a/trio/_core/tests/test_windows.py b/trio/_core/_tests/test_windows.py similarity index 100% rename from trio/_core/tests/test_windows.py rename to trio/_core/_tests/test_windows.py diff --git a/trio/_core/tests/tutil.py b/trio/_core/_tests/tutil.py similarity index 97% rename from trio/_core/tests/tutil.py rename to trio/_core/_tests/tutil.py index 74760df38f..dc9a4f486d 100644 --- a/trio/_core/tests/tutil.py +++ b/trio/_core/_tests/tutil.py @@ -12,8 +12,8 @@ import gc -# See trio/tests/conftest.py for the other half of this -from trio.tests.conftest import RUN_SLOW +# See trio/_tests/conftest.py for the other half of this +from trio._tests.pytest_plugin import RUN_SLOW slow = pytest.mark.skipif(not RUN_SLOW, reason="use --run-slow to run slow tests") diff --git a/trio/_core/tests/conftest.py b/trio/_core/tests/conftest.py deleted file mode 100644 index aca1f98a65..0000000000 --- a/trio/_core/tests/conftest.py +++ /dev/null @@ -1,25 +0,0 @@ -import pytest -import inspect - -# XX this should move into a global something -from ...testing import MockClock, trio_test - - -@pytest.fixture -def mock_clock(): - return MockClock() - - -@pytest.fixture -def autojump_clock(): - return MockClock(autojump_threshold=0) - - -# FIXME: split off into a package (or just make part of Trio's public -# interface?), with config file to enable? and I guess a mark option too; I -# guess it's useful with the class- and file-level marking machinery (where -# the raw @trio_test decorator isn't enough). -@pytest.hookimpl(tryfirst=True) -def pytest_pyfunc_call(pyfuncitem): - if inspect.iscoroutinefunction(pyfuncitem.obj): - pyfuncitem.obj = trio_test(pyfuncitem.obj) diff --git a/trio/tests/__init__.py b/trio/_tests/__init__.py similarity index 100% rename from trio/tests/__init__.py rename to trio/_tests/__init__.py diff --git a/trio/tests/astrill-codesigning-cert.cer b/trio/_tests/astrill-codesigning-cert.cer similarity index 100% rename from trio/tests/astrill-codesigning-cert.cer rename to trio/_tests/astrill-codesigning-cert.cer diff --git a/trio/tests/module_with_deprecations.py b/trio/_tests/module_with_deprecations.py similarity index 100% rename from trio/tests/module_with_deprecations.py rename to trio/_tests/module_with_deprecations.py diff --git a/trio/tests/conftest.py b/trio/_tests/pytest_plugin.py similarity index 93% rename from trio/tests/conftest.py rename to trio/_tests/pytest_plugin.py index 772486e1eb..7ed62a1c9f 100644 --- a/trio/tests/conftest.py +++ b/trio/_tests/pytest_plugin.py @@ -1,5 +1,5 @@ # XX this does not belong here -- b/c it's here, these things only apply to -# the tests in trio/_core/tests, not in trio/tests. For now there's some +# the tests in trio/_core/_tests, not in trio/_tests. For now there's some # copy-paste... # # this stuff should become a proper pytest plugin diff --git a/trio/tests/test_abc.py b/trio/_tests/test_abc.py similarity index 100% rename from trio/tests/test_abc.py rename to trio/_tests/test_abc.py diff --git a/trio/tests/test_channel.py b/trio/_tests/test_channel.py similarity index 100% rename from trio/tests/test_channel.py rename to trio/_tests/test_channel.py diff --git a/trio/tests/test_contextvars.py b/trio/_tests/test_contextvars.py similarity index 100% rename from trio/tests/test_contextvars.py rename to trio/_tests/test_contextvars.py diff --git a/trio/tests/test_deprecate.py b/trio/_tests/test_deprecate.py similarity index 100% rename from trio/tests/test_deprecate.py rename to trio/_tests/test_deprecate.py diff --git a/trio/tests/test_dtls.py b/trio/_tests/test_dtls.py similarity index 99% rename from trio/tests/test_dtls.py rename to trio/_tests/test_dtls.py index 1420ea43d1..445ea4d1fd 100644 --- a/trio/tests/test_dtls.py +++ b/trio/_tests/test_dtls.py @@ -11,7 +11,7 @@ from OpenSSL import SSL from trio.testing._fake_net import FakeNet -from .._core.tests.tutil import slow, binds_ipv6, gc_collect_harder +from .._core._tests.tutil import slow, binds_ipv6, gc_collect_harder ca = trustme.CA() server_cert = ca.issue_cert("example.com") diff --git a/trio/tests/test_exports.py b/trio/_tests/test_exports.py similarity index 97% rename from trio/tests/test_exports.py rename to trio/_tests/test_exports.py index 8eb1131ee4..e2e0432d62 100644 --- a/trio/tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -24,7 +24,7 @@ def test_core_is_properly_reexported(): # three modules: sources = [trio, trio.lowlevel, trio.testing] for symbol in dir(_core): - if symbol.startswith("_") or symbol == "tests": + if symbol.startswith("_"): continue found = 0 for source in sources: @@ -47,10 +47,6 @@ def public_modules(module): continue if class_ is module: # pragma: no cover continue - # We should rename the trio.tests module (#274), but until then we use - # a special-case hack: - if class_.__name__ == "trio.tests": - continue yield from public_modules(class_) @@ -83,11 +79,6 @@ def no_underscores(symbols): runtime_names = no_underscores(dir(module)) - # We should rename the trio.tests module (#274), but until then we use a - # special-case hack: - if modname == "trio": - runtime_names.remove("tests") - if tool == "pylint": from pylint.lint import PyLinter diff --git a/trio/tests/test_fakenet.py b/trio/_tests/test_fakenet.py similarity index 100% rename from trio/tests/test_fakenet.py rename to trio/_tests/test_fakenet.py diff --git a/trio/tests/test_file_io.py b/trio/_tests/test_file_io.py similarity index 100% rename from trio/tests/test_file_io.py rename to trio/_tests/test_file_io.py diff --git a/trio/tests/test_highlevel_generic.py b/trio/_tests/test_highlevel_generic.py similarity index 100% rename from trio/tests/test_highlevel_generic.py rename to trio/_tests/test_highlevel_generic.py diff --git a/trio/tests/test_highlevel_open_tcp_listeners.py b/trio/_tests/test_highlevel_open_tcp_listeners.py similarity index 99% rename from trio/tests/test_highlevel_open_tcp_listeners.py rename to trio/_tests/test_highlevel_open_tcp_listeners.py index 0c38b4ca69..4942f94d38 100644 --- a/trio/tests/test_highlevel_open_tcp_listeners.py +++ b/trio/_tests/test_highlevel_open_tcp_listeners.py @@ -11,7 +11,7 @@ from trio import open_tcp_listeners, serve_tcp, SocketListener, open_tcp_stream from trio.testing import open_stream_to_socket_listener from .. import socket as tsocket -from .._core.tests.tutil import slow, creates_ipv6, binds_ipv6 +from .._core._tests.tutil import slow, creates_ipv6, binds_ipv6 if sys.version_info < (3, 11): from exceptiongroup import BaseExceptionGroup diff --git a/trio/tests/test_highlevel_open_tcp_stream.py b/trio/_tests/test_highlevel_open_tcp_stream.py similarity index 100% rename from trio/tests/test_highlevel_open_tcp_stream.py rename to trio/_tests/test_highlevel_open_tcp_stream.py diff --git a/trio/tests/test_highlevel_open_unix_stream.py b/trio/_tests/test_highlevel_open_unix_stream.py similarity index 100% rename from trio/tests/test_highlevel_open_unix_stream.py rename to trio/_tests/test_highlevel_open_unix_stream.py diff --git a/trio/tests/test_highlevel_serve_listeners.py b/trio/_tests/test_highlevel_serve_listeners.py similarity index 100% rename from trio/tests/test_highlevel_serve_listeners.py rename to trio/_tests/test_highlevel_serve_listeners.py diff --git a/trio/tests/test_highlevel_socket.py b/trio/_tests/test_highlevel_socket.py similarity index 100% rename from trio/tests/test_highlevel_socket.py rename to trio/_tests/test_highlevel_socket.py diff --git a/trio/tests/test_highlevel_ssl_helpers.py b/trio/_tests/test_highlevel_ssl_helpers.py similarity index 100% rename from trio/tests/test_highlevel_ssl_helpers.py rename to trio/_tests/test_highlevel_ssl_helpers.py diff --git a/trio/tests/test_path.py b/trio/_tests/test_path.py similarity index 100% rename from trio/tests/test_path.py rename to trio/_tests/test_path.py diff --git a/trio/tests/test_scheduler_determinism.py b/trio/_tests/test_scheduler_determinism.py similarity index 100% rename from trio/tests/test_scheduler_determinism.py rename to trio/_tests/test_scheduler_determinism.py diff --git a/trio/tests/test_signals.py b/trio/_tests/test_signals.py similarity index 100% rename from trio/tests/test_signals.py rename to trio/_tests/test_signals.py diff --git a/trio/tests/test_socket.py b/trio/_tests/test_socket.py similarity index 99% rename from trio/tests/test_socket.py rename to trio/_tests/test_socket.py index db21096fac..9aa18dc469 100644 --- a/trio/tests/test_socket.py +++ b/trio/_tests/test_socket.py @@ -8,7 +8,7 @@ import inspect import tempfile import sys as _sys -from .._core.tests.tutil import creates_ipv6, binds_ipv6 +from .._core._tests.tutil import creates_ipv6, binds_ipv6 from .. import _core from .. import _socket as _tsocket from .. import socket as tsocket diff --git a/trio/tests/test_ssl.py b/trio/_tests/test_ssl.py similarity index 99% rename from trio/tests/test_ssl.py rename to trio/_tests/test_ssl.py index 26e107e08f..7c63da56e8 100644 --- a/trio/tests/test_ssl.py +++ b/trio/_tests/test_ssl.py @@ -26,7 +26,7 @@ from .._ssl import SSLStream, SSLListener, NeedHandshakeError, _is_eof from .._util import ConflictDetector -from .._core.tests.tutil import slow +from .._core._tests.tutil import slow from ..testing import ( assert_checkpoints, diff --git a/trio/tests/test_subprocess.py b/trio/_tests/test_subprocess.py similarity index 99% rename from trio/tests/test_subprocess.py rename to trio/_tests/test_subprocess.py index e2d66f654d..4dfaef4c7f 100644 --- a/trio/tests/test_subprocess.py +++ b/trio/_tests/test_subprocess.py @@ -20,7 +20,7 @@ sleep, sleep_forever, ) -from .._core.tests.tutil import skip_if_fbsd_pipes_broken, slow +from .._core._tests.tutil import skip_if_fbsd_pipes_broken, slow from ..lowlevel import open_process from ..testing import assert_no_checkpoints, wait_all_tasks_blocked diff --git a/trio/tests/test_sync.py b/trio/_tests/test_sync.py similarity index 100% rename from trio/tests/test_sync.py rename to trio/_tests/test_sync.py diff --git a/trio/tests/test_testing.py b/trio/_tests/test_testing.py similarity index 99% rename from trio/tests/test_testing.py rename to trio/_tests/test_testing.py index a2dba728d5..a9cc00684d 100644 --- a/trio/tests/test_testing.py +++ b/trio/_tests/test_testing.py @@ -4,7 +4,7 @@ import pytest -from .._core.tests.tutil import can_bind_ipv6 +from .._core._tests.tutil import can_bind_ipv6 from .. import sleep from .. import _core from .._highlevel_generic import aclose_forcefully diff --git a/trio/tests/test_threads.py b/trio/_tests/test_threads.py similarity index 99% rename from trio/tests/test_threads.py rename to trio/_tests/test_threads.py index ce852d4612..21eb7b12e8 100644 --- a/trio/tests/test_threads.py +++ b/trio/_tests/test_threads.py @@ -14,8 +14,8 @@ from trio._core import TrioToken, current_trio_token from .. import CapacityLimiter, Event, _core, sleep -from .._core.tests.test_ki import ki_self -from .._core.tests.tutil import buggy_pypy_asyncgens +from .._core._tests.test_ki import ki_self +from .._core._tests.tutil import buggy_pypy_asyncgens from .._threads import ( current_default_thread_limiter, from_thread_run, @@ -168,7 +168,7 @@ async def main(): async def test_named_thread(): - ending = " from trio.tests.test_threads.test_named_thread" + ending = " from trio._tests.test_threads.test_named_thread" def inner(name="inner" + ending) -> threading.Thread: assert threading.current_thread().name == name @@ -248,7 +248,7 @@ def f(name: str) -> Callable[[None], threading.Thread]: return partial(inner, name) # test defaults - default = "None from trio.tests.test_threads.test_named_thread" + default = "None from trio._tests.test_threads.test_named_thread" await to_thread_run_sync(f(default)) await to_thread_run_sync(f(default), thread_name=None) diff --git a/trio/tests/test_timeouts.py b/trio/_tests/test_timeouts.py similarity index 98% rename from trio/tests/test_timeouts.py rename to trio/_tests/test_timeouts.py index 382c015b1d..c817c49588 100644 --- a/trio/tests/test_timeouts.py +++ b/trio/_tests/test_timeouts.py @@ -2,7 +2,7 @@ import pytest import time -from .._core.tests.tutil import slow +from .._core._tests.tutil import slow from .. import _core from ..testing import assert_checkpoints from .._timeouts import * diff --git a/trio/tests/test_tracing.py b/trio/_tests/test_tracing.py similarity index 100% rename from trio/tests/test_tracing.py rename to trio/_tests/test_tracing.py diff --git a/trio/tests/test_unix_pipes.py b/trio/_tests/test_unix_pipes.py similarity index 99% rename from trio/tests/test_unix_pipes.py rename to trio/_tests/test_unix_pipes.py index cf98942ea4..2109ab8dd2 100644 --- a/trio/tests/test_unix_pipes.py +++ b/trio/_tests/test_unix_pipes.py @@ -6,7 +6,7 @@ import pytest -from .._core.tests.tutil import gc_collect_harder, skip_if_fbsd_pipes_broken +from .._core._tests.tutil import gc_collect_harder, skip_if_fbsd_pipes_broken from .. import _core, move_on_after from ..testing import wait_all_tasks_blocked, check_one_way_stream diff --git a/trio/tests/test_util.py b/trio/_tests/test_util.py similarity index 99% rename from trio/tests/test_util.py rename to trio/_tests/test_util.py index 15ab09a80b..9f89a68efe 100644 --- a/trio/tests/test_util.py +++ b/trio/_tests/test_util.py @@ -5,7 +5,7 @@ import trio from .. import _core -from .._core.tests.tutil import ( +from .._core._tests.tutil import ( ignore_coroutine_never_awaited_warnings, create_asyncio_future_in_new_loop, ) diff --git a/trio/tests/test_wait_for_object.py b/trio/_tests/test_wait_for_object.py similarity index 99% rename from trio/tests/test_wait_for_object.py rename to trio/_tests/test_wait_for_object.py index 38acfa802d..54291444a0 100644 --- a/trio/tests/test_wait_for_object.py +++ b/trio/_tests/test_wait_for_object.py @@ -6,7 +6,7 @@ # Mark all the tests in this file as being windows-only pytestmark = pytest.mark.skipif(not on_windows, reason="windows only") -from .._core.tests.tutil import slow +from .._core._tests.tutil import slow import trio from .. import _core from .. import _timeouts diff --git a/trio/tests/test_windows_pipes.py b/trio/_tests/test_windows_pipes.py similarity index 98% rename from trio/tests/test_windows_pipes.py rename to trio/_tests/test_windows_pipes.py index 2bcc64a072..4443cc0a2f 100644 --- a/trio/tests/test_windows_pipes.py +++ b/trio/_tests/test_windows_pipes.py @@ -8,7 +8,7 @@ import pytest -from .._core.tests.tutil import gc_collect_harder +from .._core._tests.tutil import gc_collect_harder from .. import _core, move_on_after from ..testing import wait_all_tasks_blocked, check_one_way_stream diff --git a/trio/tests/tools/__init__.py b/trio/_tests/tools/__init__.py similarity index 100% rename from trio/tests/tools/__init__.py rename to trio/_tests/tools/__init__.py diff --git a/trio/tests/tools/test_gen_exports.py b/trio/_tests/tools/test_gen_exports.py similarity index 100% rename from trio/tests/tools/test_gen_exports.py rename to trio/_tests/tools/test_gen_exports.py diff --git a/trio/tests.py b/trio/tests.py new file mode 100644 index 0000000000..ad31bf6602 --- /dev/null +++ b/trio/tests.py @@ -0,0 +1,10 @@ +from ._deprecate import warn_deprecated +import trio +from ._tests import * + +warn_deprecated( + trio.tests, + "0.23.0", + instead=trio._tests, + issue="https://github.com/python-trio/trio/issues/274", +) From 26d9f6e808186726dd3a33e9d42f6301c6a80026 Mon Sep 17 00:00:00 2001 From: jakkdl Date: Mon, 17 Apr 2023 13:28:10 +0200 Subject: [PATCH 052/162] add trio.tests module that raises deprecationwarning, add tests --- trio/__init__.py | 1 + trio/_tests/pytest_plugin.py | 6 ------ trio/_tests/test_deprecate.py | 12 +++++++++++ trio/_tests/test_exports.py | 4 ++-- trio/tests.py | 40 +++++++++++++++++++++++++++++------ 5 files changed, 48 insertions(+), 15 deletions(-) diff --git a/trio/__init__.py b/trio/__init__.py index d6d2adb4bb..c561c65bb4 100644 --- a/trio/__init__.py +++ b/trio/__init__.py @@ -100,6 +100,7 @@ from . import abc from . import from_thread from . import to_thread +from . import tests # deprecated # Not imported by default, but mentioned here so static analysis tools like # pylint will know that it exists. diff --git a/trio/_tests/pytest_plugin.py b/trio/_tests/pytest_plugin.py index 7ed62a1c9f..a893b466cb 100644 --- a/trio/_tests/pytest_plugin.py +++ b/trio/_tests/pytest_plugin.py @@ -1,9 +1,3 @@ -# XX this does not belong here -- b/c it's here, these things only apply to -# the tests in trio/_core/_tests, not in trio/_tests. For now there's some -# copy-paste... -# -# this stuff should become a proper pytest plugin - import pytest import inspect diff --git a/trio/_tests/test_deprecate.py b/trio/_tests/test_deprecate.py index e5e1da8c5f..8a6b812a0a 100644 --- a/trio/_tests/test_deprecate.py +++ b/trio/_tests/test_deprecate.py @@ -241,3 +241,15 @@ def test_module_with_deprecations(recwarn_always): with pytest.raises(AttributeError): module_with_deprecations.asdf + + +def test_tests_is_deprecated() -> None: + from trio import tests + + with pytest.warns(TrioDeprecationWarning): + tests.test_abc # type: ignore[attr-defined] + + with pytest.warns(TrioDeprecationWarning): + import trio.tests + + trio.tests.test_deprecate # type: ignore[attr-defined] diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index e2e0432d62..71c32464a0 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -13,10 +13,10 @@ import trio import trio.testing -from trio.tests.conftest import RUN_SLOW from .. import _core, _util -from .._core.tests.tutil import slow +from .._core._tests.tutil import slow +from .pytest_plugin import RUN_SLOW def test_core_is_properly_reexported(): diff --git a/trio/tests.py b/trio/tests.py index ad31bf6602..51cb247218 100644 --- a/trio/tests.py +++ b/trio/tests.py @@ -1,10 +1,36 @@ +import importlib +import sys +from typing import Any + +# don't ask which of the _tests imports is necessary +import trio._tests + +from . import _tests from ._deprecate import warn_deprecated -import trio from ._tests import * -warn_deprecated( - trio.tests, - "0.23.0", - instead=trio._tests, - issue="https://github.com/python-trio/trio/issues/274", -) + +# This won't give deprecation warning on import, but will give a warning on use of any +# attribute in tests, and static analysis tools will also not see any content inside. +class TestsDeprecationWrapper: + __name__ = "trio.tests" + + def __getattr__(self, attr: str) -> Any: + warn_deprecated( + f"trio.tests.{attr}", + "0.24.0", + instead=f"trio._tests.{attr}", + issue="https://github.com/python-trio/trio/issues/274", + ) + + # idk if this one is necessary, apparently doesn't get triggered by codecov + # but I needed it when running tests locally at some point + if not hasattr(trio._tests, attr): # pragma: no cover + importlib.import_module(f"trio._tests.{attr}", "trio._tests") + return attr + + return getattr(trio._tests, attr) + + +# https://stackoverflow.com/questions/2447353/getattr-on-a-module +sys.modules[__name__] = TestsDeprecationWrapper() # type: ignore[assignment] From 0b5d131f00c63167f68bc90bae361122a29e36cb Mon Sep 17 00:00:00 2001 From: jakkdl Date: Thu, 18 May 2023 14:08:18 +0200 Subject: [PATCH 053/162] remove unnecessary imports, refactor tests, improve comments --- trio/_tests/test_deprecate.py | 24 ++++++++++++++++++++---- trio/tests.py | 11 +++-------- 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/trio/_tests/test_deprecate.py b/trio/_tests/test_deprecate.py index 8a6b812a0a..2a09e14b74 100644 --- a/trio/_tests/test_deprecate.py +++ b/trio/_tests/test_deprecate.py @@ -243,13 +243,29 @@ def test_module_with_deprecations(recwarn_always): module_with_deprecations.asdf -def test_tests_is_deprecated() -> None: - from trio import tests +def test_tests_is_deprecated1() -> None: + from trio import tests # no warning on import + # warning on access of any member with pytest.warns(TrioDeprecationWarning): - tests.test_abc # type: ignore[attr-defined] + assert tests.test_abc # type: ignore[attr-defined] + +def test_tests_is_deprecated2() -> None: + # warning on direct import of test since that accesses `__spec__` with pytest.warns(TrioDeprecationWarning): import trio.tests - trio.tests.test_deprecate # type: ignore[attr-defined] + with pytest.warns(TrioDeprecationWarning): + assert trio.tests.test_deprecate # type: ignore[attr-defined] + + +def test_tests_is_deprecated3() -> None: + import trio + + # no warning on accessing the submodule + assert trio.tests + + # only when accessing a submodule member + with pytest.warns(TrioDeprecationWarning): + assert trio.tests.test_abc # type: ignore[attr-defined] diff --git a/trio/tests.py b/trio/tests.py index 51cb247218..1dc0825f2c 100644 --- a/trio/tests.py +++ b/trio/tests.py @@ -2,12 +2,8 @@ import sys from typing import Any -# don't ask which of the _tests imports is necessary -import trio._tests - from . import _tests from ._deprecate import warn_deprecated -from ._tests import * # This won't give deprecation warning on import, but will give a warning on use of any @@ -23,13 +19,12 @@ def __getattr__(self, attr: str) -> Any: issue="https://github.com/python-trio/trio/issues/274", ) - # idk if this one is necessary, apparently doesn't get triggered by codecov - # but I needed it when running tests locally at some point - if not hasattr(trio._tests, attr): # pragma: no cover + # needed to access e.g. trio._tests.tools, although pytest doesn't need it + if not hasattr(_tests, attr): # pragma: no cover importlib.import_module(f"trio._tests.{attr}", "trio._tests") return attr - return getattr(trio._tests, attr) + return getattr(_tests, attr) # https://stackoverflow.com/questions/2447353/getattr-on-a-module From 1b422767aa7797259764f1625808a0fb2cee3c3f Mon Sep 17 00:00:00 2001 From: jakkdl Date: Mon, 22 May 2023 10:38:04 +0200 Subject: [PATCH 054/162] now also warns on `from trio import tests` --- trio/__init__.py | 1 - trio/_tests/test_deprecate.py | 3 ++- trio/_tests/test_exports.py | 8 ++++++++ trio/tests.py | 7 +++++++ 4 files changed, 17 insertions(+), 2 deletions(-) diff --git a/trio/__init__.py b/trio/__init__.py index c561c65bb4..d6d2adb4bb 100644 --- a/trio/__init__.py +++ b/trio/__init__.py @@ -100,7 +100,6 @@ from . import abc from . import from_thread from . import to_thread -from . import tests # deprecated # Not imported by default, but mentioned here so static analysis tools like # pylint will know that it exists. diff --git a/trio/_tests/test_deprecate.py b/trio/_tests/test_deprecate.py index 2a09e14b74..856ff20673 100644 --- a/trio/_tests/test_deprecate.py +++ b/trio/_tests/test_deprecate.py @@ -244,7 +244,8 @@ def test_module_with_deprecations(recwarn_always): def test_tests_is_deprecated1() -> None: - from trio import tests # no warning on import + with pytest.warns(TrioDeprecationWarning): + from trio import tests # warning on import # warning on access of any member with pytest.warns(TrioDeprecationWarning): diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index 71c32464a0..2fd8362193 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -79,6 +79,10 @@ def no_underscores(symbols): runtime_names = no_underscores(dir(module)) + # ignore deprecated module `tests` being invisible + if modname == "trio": + runtime_names.discard("tests") + if tool == "pylint": from pylint.lint import PyLinter @@ -133,6 +137,10 @@ def no_underscores(symbols): # static analysis (e.g. in trio.socket or trio.lowlevel) # So we check that the runtime names are a subset of the static names. missing_names = runtime_names - static_names + + # ignore warnings about deprecated module tests + missing_names -= {"tests"} + if missing_names: # pragma: no cover print(f"{tool} can't see the following names in {modname}:") print() diff --git a/trio/tests.py b/trio/tests.py index 1dc0825f2c..f1940a6929 100644 --- a/trio/tests.py +++ b/trio/tests.py @@ -5,6 +5,13 @@ from . import _tests from ._deprecate import warn_deprecated +warn_deprecated( + "trio.tests", + "0.24.0", + instead="trio._tests", + issue="https://github.com/python-trio/trio/issues/274", +) + # This won't give deprecation warning on import, but will give a warning on use of any # attribute in tests, and static analysis tools will also not see any content inside. From 93bb27b494f0ab810fb93a13247b45e336182273 Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Sat, 6 May 2023 03:42:51 +0000 Subject: [PATCH 055/162] Make the CI not trample itself This does not cancel older runs on the master branch. Co-authored-by: Kyle Altendorf Co-authored-by: Sviatoslav Sydorenko --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 00fe9f55d2..0b530bd4b0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,6 +6,10 @@ on: - "dependabot/**" pull_request: +concurrency: + group: ${{ github.ref }}-${{ github.workflow }}-${{ github.event_name }}${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) && format('-{0}', github.sha) || '' }} + cancel-in-progress: true + jobs: Windows: name: 'Windows (${{ matrix.python }}, ${{ matrix.arch }}${{ matrix.extra_name }})' From b21d4b923d274a4489fb941087b1afec86cf9384 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 28 May 2023 23:07:35 +0000 Subject: [PATCH 056/162] Dependency updates (#2652) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- docs-requirements.txt | 6 +++--- test-requirements.txt | 16 ++++++++-------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 7cf9dac27f..93d9195e08 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -14,7 +14,7 @@ attrs==23.1.0 # outcome babel==2.12.1 # via sphinx -certifi==2022.12.7 +certifi==2023.5.7 # via requests charset-normalizer==3.1.0 # via requests @@ -57,7 +57,7 @@ pygments==2.15.1 # via sphinx pytz==2023.3 # via babel -requests==2.30.0 +requests==2.31.0 # via sphinx sniffio==1.3.0 # via -r docs-requirements.in @@ -71,7 +71,7 @@ sphinx==6.1.3 # sphinx-rtd-theme # sphinxcontrib-jquery # sphinxcontrib-trio -sphinx-rtd-theme==1.2.0 +sphinx-rtd-theme==1.2.1 # via -r docs-requirements.in sphinxcontrib-applehelp==1.0.4 # via sphinx diff --git a/test-requirements.txt b/test-requirements.txt index 8e2d54758c..1287cc268e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,7 +6,7 @@ # astor==0.8.1 # via -r test-requirements.in -astroid==2.15.4 +astroid==2.15.5 # via pylint async-generator==1.10 # via -r test-requirements.in @@ -26,7 +26,7 @@ click==8.1.3 # via # black # pip-tools -coverage[toml]==7.2.5 +coverage[toml]==7.2.6 # via pytest-cov cryptography==39.0.2 # via @@ -68,7 +68,7 @@ mccabe==0.7.0 # via # flake8 # pylint -mypy==1.2.0 ; implementation_name == "cpython" +mypy==1.3.0 ; implementation_name == "cpython" # via -r test-requirements.in mypy-extensions==1.0.0 ; implementation_name == "cpython" # via @@ -92,7 +92,7 @@ pickleshare==0.7.5 # via ipython pip-tools==6.13.0 # via -r test-requirements.in -platformdirs==3.5.0 +platformdirs==3.5.1 # via # black # pylint @@ -110,7 +110,7 @@ pyflakes==2.5.0 # via flake8 pygments==2.15.1 # via ipython -pylint==2.17.3 +pylint==2.17.4 # via -r test-requirements.in pyopenssl==23.1.1 # via -r test-requirements.in @@ -120,7 +120,7 @@ pytest==7.3.1 # via # -r test-requirements.in # pytest-cov -pytest-cov==4.0.0 +pytest-cov==4.1.0 # via -r test-requirements.in sniffio==1.3.0 # via -r test-requirements.in @@ -142,9 +142,9 @@ traitlets==5.9.0 # matplotlib-inline trustme==1.0.0 # via -r test-requirements.in -types-pyopenssl==23.1.0.2 ; implementation_name == "cpython" +types-pyopenssl==23.1.0.3 ; implementation_name == "cpython" # via -r test-requirements.in -typing-extensions==4.5.0 ; implementation_name == "cpython" +typing-extensions==4.6.2 ; implementation_name == "cpython" # via # -r test-requirements.in # astroid From 6e8c2af00961663f7e5fe14b2011a5744c7c1784 Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Mon, 29 May 2023 21:24:39 +0900 Subject: [PATCH 057/162] Ignore Pytest deprecation warnings (#2651) --- pyproject.toml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 4b939510ef..76e4cff9f3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,4 +60,9 @@ filterwarnings = [ "error", # https://gitter.im/python-trio/general?at=63bb8d0740557a3d5c688d67 'ignore:You are using cryptography on a 32-bit Python on a 64-bit Windows Operating System. Cryptography will be significantly faster if you switch to using a 64-bit Python.:UserWarning', + # this should remain until https://github.com/pytest-dev/pytest/pull/10894 is merged + 'ignore:ast.Str is deprecated:DeprecationWarning', + 'ignore:Attribute s is deprecated and will be removed:DeprecationWarning', + 'ignore:ast.NameConstant is deprecated:DeprecationWarning', + 'ignore:ast.Num is deprecated:DeprecationWarning' ] From 16b6b5e5b7c9041414227584c1325de3e4f5387f Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Tue, 30 May 2023 13:48:02 +0900 Subject: [PATCH 058/162] Replace codecov with their GHA thing (#2653) * Try a simple replacement * Remove some unnecessary stuff * Add back names to codecov uploads * Add some codecov "flags" --- .github/workflows/ci.yml | 22 +++++++++++++++------- ci.sh | 20 ++------------------ 2 files changed, 17 insertions(+), 25 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 00fe9f55d2..a01edb179a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -73,8 +73,11 @@ jobs: env: LSP: '${{ matrix.lsp }}' LSP_EXTRACT_FILE: '${{ matrix.lsp_extract_file }}' - # Should match 'name:' up above - JOB_NAME: 'Windows (${{ matrix.python }}, ${{ matrix.arch }}${{ matrix.extra_name }})' + - uses: codecov/codecov-action@v3 + with: + directory: empty + name: 'Windows (${{ matrix.python }}, ${{ matrix.arch }}${{ matrix.extra_name }})' + flags: Windows,${{ matrix.python }} Ubuntu: name: 'Ubuntu (${{ matrix.python }}${{ matrix.extra_name }})' @@ -119,8 +122,11 @@ jobs: run: ./ci.sh env: CHECK_FORMATTING: '${{ matrix.check_formatting }}' - # Should match 'name:' up above - JOB_NAME: 'Ubuntu (${{ matrix.python }}${{ matrix.extra_name }})' + - uses: codecov/codecov-action@v3 + with: + directory: empty + name: 'Ubuntu (${{ matrix.python }}${{ matrix.extra_name }})' + flags: Ubuntu,${{ matrix.python }} macOS: name: 'macOS (${{ matrix.python }})' @@ -150,9 +156,11 @@ jobs: cache-dependency-path: test-requirements.txt - name: Run tests run: ./ci.sh - env: - # Should match 'name:' up above - JOB_NAME: 'macOS (${{ matrix.python }})' + - uses: codecov/codecov-action@v3 + with: + directory: empty + name: 'macOS (${{ matrix.python }})' + flags: macOS,${{ matrix.python }} # https://github.com/marketplace/actions/alls-green#why check: # This job does nothing and is only used for the branch protection diff --git a/ci.sh b/ci.sh index 3cd78cfb18..875d778487 100755 --- a/ci.sh +++ b/ci.sh @@ -6,10 +6,6 @@ set -ex -o pipefail uname -a env | sort -if [ "$JOB_NAME" = "" ]; then - JOB_NAME="${TRAVIS_OS_NAME}-${TRAVIS_PYTHON_VERSION:-unknown}" -fi - # Curl's built-in retry system is not very robust; it gives up on lots of # network errors that we want to retry on. Wget might work better, but it's # not installed on azure pipelines's windows boxes. So... let's try some good @@ -102,12 +98,8 @@ else INSTALLDIR=$(python -c "import os, trio; print(os.path.dirname(trio.__file__))") cp ../pyproject.toml $INSTALLDIR - # We have to copy .coveragerc into this directory, rather than passing - # --cov-config=../.coveragerc to pytest, because codecov.sh will run - # 'coverage xml' to generate the report that it uses, and that will only - # apply the ignore patterns in the current directory's .coveragerc. - cp ../.coveragerc . - if pytest -r a -p trio._tests.pytest_plugin --junitxml=../test-results.xml --run-slow ${INSTALLDIR} --cov="$INSTALLDIR" --verbose; then + + if pytest -r a -p trio._tests.pytest_plugin --junitxml=../test-results.xml --run-slow ${INSTALLDIR} --cov="$INSTALLDIR" --cov-report=xml --verbose; then PASSED=true else PASSED=false @@ -119,13 +111,5 @@ else netsh winsock reset fi - # The codecov docs recommend something like 'bash <(curl ...)' to pipe the - # script directly into bash as its being downloaded. But, the codecov - # server is flaky, so we instead save to a temp file with retries, and - # wait until we've successfully fetched the whole script before trying to - # run it. - curl-harder -o codecov.sh https://codecov.io/bash - bash codecov.sh -n "${JOB_NAME}" - $PASSED fi From 93c1d96a535cca565e1b1ba552cb9c4798b3f852 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Tue, 30 May 2023 10:33:21 +0200 Subject: [PATCH 059/162] fix pickle support for MultiError (#2648) * fix pickle support for MultiError --- trio/_core/_multierror.py | 7 ++++++ trio/_core/_tests/test_multierror.py | 37 ++++++++++++++++++++++++++-- 2 files changed, 42 insertions(+), 2 deletions(-) diff --git a/trio/_core/_multierror.py b/trio/_core/_multierror.py index a9778fd244..c9c355408a 100644 --- a/trio/_core/_multierror.py +++ b/trio/_core/_multierror.py @@ -213,6 +213,13 @@ def __new__(cls, exceptions, *, _collapse=True): return super().__new__(cls, "multiple tasks failed", exceptions) + def __reduce__(self): + return ( + self.__new__, + (self.__class__, list(self.exceptions)), + {"collapse": self.collapse}, + ) + def __str__(self): return ", ".join(repr(exc) for exc in self.exceptions) diff --git a/trio/_core/_tests/test_multierror.py b/trio/_core/_tests/test_multierror.py index 650f9bf597..7d31b1e889 100644 --- a/trio/_core/_tests/test_multierror.py +++ b/trio/_core/_tests/test_multierror.py @@ -1,15 +1,15 @@ import gc -import logging import os import subprocess from pathlib import Path +import pickle +import warnings import pytest from traceback import ( extract_tb, print_exception, - format_exception, ) from traceback import _cause_message # type: ignore import sys @@ -555,3 +555,36 @@ def test_apport_excepthook_monkeypatch_interaction(): ["--- 1 ---", "KeyError", "--- 2 ---", "ValueError"], stdout, ) + + +@pytest.mark.parametrize("protocol", range(0, pickle.HIGHEST_PROTOCOL + 1)) +def test_pickle_multierror(protocol) -> None: + # use trio.MultiError to make sure that pickle works through the deprecation layer + import trio + + my_except = ZeroDivisionError() + + try: + 1 / 0 + except ZeroDivisionError as e: + my_except = e + + # MultiError will collapse into different classes depending on the errors + for cls, errors in ( + (ZeroDivisionError, [my_except]), + (NonBaseMultiError, [my_except, ValueError()]), + (MultiError, [BaseException(), my_except]), + ): + with warnings.catch_warnings(): + warnings.simplefilter("ignore", TrioDeprecationWarning) + me = trio.MultiError(errors) # type: ignore[attr-defined] + dump = pickle.dumps(me, protocol=protocol) + load = pickle.loads(dump) + assert repr(me) == repr(load) + assert me.__class__ == load.__class__ == cls + + assert me.__dict__.keys() == load.__dict__.keys() + for me_val, load_val in zip(me.__dict__.values(), load.__dict__.values()): + # tracebacks etc are not preserved through pickling for the default + # exceptions, so we only check that the repr stays the same + assert repr(me_val) == repr(load_val) From 8ae599cc8fddbcdd616183e14baf471937344b81 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Wed, 31 May 2023 01:38:07 +0200 Subject: [PATCH 060/162] specify --cov-config=.coveragerc in ci.sh (#2654) --- ci.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci.sh b/ci.sh index 875d778487..710535695d 100755 --- a/ci.sh +++ b/ci.sh @@ -99,7 +99,7 @@ else INSTALLDIR=$(python -c "import os, trio; print(os.path.dirname(trio.__file__))") cp ../pyproject.toml $INSTALLDIR - if pytest -r a -p trio._tests.pytest_plugin --junitxml=../test-results.xml --run-slow ${INSTALLDIR} --cov="$INSTALLDIR" --cov-report=xml --verbose; then + if pytest -r a -p trio._tests.pytest_plugin --junitxml=../test-results.xml --run-slow ${INSTALLDIR} --cov="$INSTALLDIR" --cov-report=xml --cov-config=../.coveragerc --verbose; then PASSED=true else PASSED=false From 91942f6e0a752f9808922be9d933941188a62442 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 1 Jun 2023 00:26:41 +0000 Subject: [PATCH 061/162] Dependency updates (#2657) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- test-requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test-requirements.txt b/test-requirements.txt index 1287cc268e..9378d51a03 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -26,7 +26,7 @@ click==8.1.3 # via # black # pip-tools -coverage[toml]==7.2.6 +coverage[toml]==7.2.7 # via pytest-cov cryptography==39.0.2 # via @@ -112,7 +112,7 @@ pygments==2.15.1 # via ipython pylint==2.17.4 # via -r test-requirements.in -pyopenssl==23.1.1 +pyopenssl==23.2.0 # via -r test-requirements.in pyproject-hooks==1.0.0 # via build From b2e617ab3950c1166d0f2f318f9d3580140b6a40 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Thu, 1 Jun 2023 22:21:23 +0200 Subject: [PATCH 062/162] fix static analysis tests on 3.12 (#2656) * fix static analysis tests on 3.12 * Try not skipping codecov uploads on 3.12 --------- Co-authored-by: EXPLOSION --- .github/workflows/ci.yml | 9 ++++++--- pyproject.toml | 5 ++++- trio/_path.py | 8 ++++++-- trio/_tests/test_exports.py | 20 ++++++++++++++++++++ trio/socket.py | 8 ++++++++ 5 files changed, 44 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a01edb179a..18dabcecdd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -73,7 +73,8 @@ jobs: env: LSP: '${{ matrix.lsp }}' LSP_EXTRACT_FILE: '${{ matrix.lsp_extract_file }}' - - uses: codecov/codecov-action@v3 + - if: always() + uses: codecov/codecov-action@v3 with: directory: empty name: 'Windows (${{ matrix.python }}, ${{ matrix.arch }}${{ matrix.extra_name }})' @@ -122,7 +123,8 @@ jobs: run: ./ci.sh env: CHECK_FORMATTING: '${{ matrix.check_formatting }}' - - uses: codecov/codecov-action@v3 + - if: always() + uses: codecov/codecov-action@v3 with: directory: empty name: 'Ubuntu (${{ matrix.python }}${{ matrix.extra_name }})' @@ -156,7 +158,8 @@ jobs: cache-dependency-path: test-requirements.txt - name: Run tests run: ./ci.sh - - uses: codecov/codecov-action@v3 + - if: always() + uses: codecov/codecov-action@v3 with: directory: empty name: 'macOS (${{ matrix.python }})' diff --git a/pyproject.toml b/pyproject.toml index 76e4cff9f3..96396faddc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,5 +64,8 @@ filterwarnings = [ 'ignore:ast.Str is deprecated:DeprecationWarning', 'ignore:Attribute s is deprecated and will be removed:DeprecationWarning', 'ignore:ast.NameConstant is deprecated:DeprecationWarning', - 'ignore:ast.Num is deprecated:DeprecationWarning' + 'ignore:ast.Num is deprecated:DeprecationWarning', + # https://github.com/python/mypy/issues/15330 + 'ignore:ast.Ellipsis is deprecated:DeprecationWarning', + 'ignore:ast.Bytes is deprecated:DeprecationWarning', ] diff --git a/trio/_path.py b/trio/_path.py index 7c338dbc97..bb81759ecf 100644 --- a/trio/_path.py +++ b/trio/_path.py @@ -3,7 +3,7 @@ import sys import types from functools import partial, wraps -from typing import TYPE_CHECKING, Awaitable, Callable, TypeVar, Any +from typing import TYPE_CHECKING, Any import trio from trio._util import Final, async_wraps @@ -246,7 +246,7 @@ async def open(self, *args, **kwargs): is_mount: Any owner: Any - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 8) and sys.version_info < (3, 12): link_to: Any if sys.version_info >= (3, 9): is_relative_to: Any @@ -254,6 +254,10 @@ async def open(self, *args, **kwargs): readlink: Any if sys.version_info >= (3, 10): hardlink_to: Any + if sys.version_info >= (3, 12): + is_junction: Any + walk: Any + with_segments: Any Path.iterdir.__doc__ = """ diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index 2fd8362193..4a35a95528 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -242,6 +242,10 @@ def no_hidden(symbols): missing = runtime_names - static_names extra = static_names - runtime_names + + # using .remove() instead of .delete() to get an error in case they start not + # being missing + if BaseException in class_.__mro__ and sys.version_info > (3, 11): missing.remove("add_note") @@ -251,6 +255,22 @@ def no_hidden(symbols): extra.remove("receive_stream") extra.remove("send_stream") + # I have not researched why these are missing, should maybe create an issue + # upstream with jedi + if sys.version_info >= (3, 12): + if class_ in ( + trio.DTLSChannel, + trio.MemoryReceiveChannel, + trio.MemorySendChannel, + trio.SSLListener, + trio.SocketListener, + ): + missing.remove("__aenter__") + missing.remove("__aexit__") + if class_ in (trio.DTLSChannel, trio.MemoryReceiveChannel): + missing.remove("__aiter__") + missing.remove("__anext__") + # intentionally hidden behind type guard if class_ == trio.Path: missing.remove("__getattr__") diff --git a/trio/socket.py b/trio/socket.py index b9e9308dd9..d4beb159c0 100644 --- a/trio/socket.py +++ b/trio/socket.py @@ -123,6 +123,14 @@ WSA_FLAG_OVERLAPPED, WSA_INVALID_HANDLE, WSA_INVALID_PARAMETER, WSA_IO_INCOMPLETE, WSA_IO_PENDING, WSA_NOT_ENOUGH_MEMORY, WSA_OPERATION_ABORTED, WSA_WAIT_FAILED, WSA_WAIT_TIMEOUT, + ETHERTYPE_ARP, ETHERTYPE_IP, ETHERTYPE_IPV6, ETHERTYPE_VLAN, ETH_P_ALL, + IP_ADD_SOURCE_MEMBERSHIP, IP_BLOCK_SOURCE, IP_DROP_SOURCE_MEMBERSHIP, + IP_PKTINFO, IP_UNBLOCK_SOURCE, TCP_CC_INFO, TCP_FASTOPEN_CONNECT, + TCP_FASTOPEN_KEY, TCP_FASTOPEN_NO_COOKIE, TCP_INQ, TCP_MD5SIG, + TCP_MD5SIG_EXT, TCP_QUEUE_SEQ, TCP_REPAIR, TCP_REPAIR_OPTIONS, + TCP_REPAIR_QUEUE, TCP_REPAIR_WINDOW, TCP_SAVED_SYN, TCP_SAVE_SYN, + TCP_THIN_DUPACK, TCP_THIN_LINEAR_TIMEOUTS, TCP_TIMESTAMP, TCP_TX_DELAY, + TCP_ULP, TCP_ZEROCOPY_RECEIVE, ) # fmt: on except ImportError: From 7a35c5b300e2ad593f14863990cf073a2cc755dd Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Sat, 3 Jun 2023 14:03:36 +0900 Subject: [PATCH 063/162] Clear some cruft from 3.6 (#2659) * Remove remnants of 3.6 * Oops, missed a spot --- docs-requirements.in | 1 - trio/_socket.py | 12 +----------- trio/_tests/test_socket.py | 2 +- 3 files changed, 2 insertions(+), 13 deletions(-) diff --git a/docs-requirements.in b/docs-requirements.in index fab339e1f9..98d5030bc5 100644 --- a/docs-requirements.in +++ b/docs-requirements.in @@ -9,7 +9,6 @@ towncrier # Trio's own dependencies cffi; os_name == "nt" -contextvars; python_version < "3.7" attrs >= 19.2.0 sortedcontainers async_generator >= 1.9 diff --git a/trio/_socket.py b/trio/_socket.py index b12126f7e1..980b5ba860 100644 --- a/trio/_socket.py +++ b/trio/_socket.py @@ -324,13 +324,6 @@ def _sniff_sockopts_for_fileno(family, type, proto, fileno): ) -# This function will modify the given socket to match the behavior in python -# 3.7. This will become unnecessary and can be removed when support for versions -# older than 3.7 is dropped. -def real_socket_type(type_num): - return type_num & _SOCK_TYPE_MASK - - def _make_simple_sock_method_wrapper(methname, wait_fn, maybe_avail=False): fn = getattr(_stdlib_socket.socket, methname) @@ -496,10 +489,7 @@ def family(self): @property def type(self): - # Modify the socket type do match what is done on python 3.7. When - # support for versions older than 3.7 is dropped, this can be updated - # to just return self._sock.type - return real_socket_type(self._sock.type) + return self._sock.type @property def proto(self): diff --git a/trio/_tests/test_socket.py b/trio/_tests/test_socket.py index 9aa18dc469..44d285452f 100644 --- a/trio/_tests/test_socket.py +++ b/trio/_tests/test_socket.py @@ -356,7 +356,7 @@ async def test_SocketType_basics(): # type family proto stdlib_sock = stdlib_socket.socket() sock = tsocket.from_stdlib_socket(stdlib_sock) - assert sock.type == _tsocket.real_socket_type(stdlib_sock.type) + assert sock.type == stdlib_sock.type assert sock.family == stdlib_sock.family assert sock.proto == stdlib_sock.proto sock.close() From 493c915c90f9639c60d2ba9943642b7f40f83679 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Fri, 9 Jun 2023 13:49:41 +0200 Subject: [PATCH 064/162] change imports to `from Y import X as X` for compatibility with `pyright --verifytypes` (#2629) * Change imports in all public files to 'from y import x as x' for compatibility with pyright --verifytypes Also adds `__all__` to a couple files, as well as moving re-exported constants in trio.socket to the bottom of the file. * Add pyright_verifytypes to test_exports.test_static_tool_sees_all_symbols * Add trio/_tests/check_type_completeness.py that's run from check.sh to make sure type completeness doesn't go down * Add trio/_tests/verify_types.json which tracks the current state of type completeness, including a list of all untyped symbols. --- .coveragerc | 2 + check.sh | 7 + ci.sh | 4 + test-requirements.in | 1 + test-requirements.txt | 4 + trio/__init__.py | 115 +++-- trio/_tests/check_type_completeness.py | 159 ++++++ trio/_tests/test_exports.py | 68 ++- trio/_tests/verify_types.json | 366 ++++++++++++++ trio/abc.py | 28 +- trio/from_thread.py | 4 + trio/lowlevel.py | 92 ++-- trio/socket.py | 654 +++++++++++++++++++------ trio/testing/__init__.py | 38 +- trio/to_thread.py | 3 + 15 files changed, 1251 insertions(+), 294 deletions(-) create mode 100755 trio/_tests/check_type_completeness.py create mode 100644 trio/_tests/verify_types.json diff --git a/.coveragerc b/.coveragerc index 83a7cf0647..5d3f57aa66 100644 --- a/.coveragerc +++ b/.coveragerc @@ -8,6 +8,8 @@ omit= */ipython_custom_exc.py # Omit the generated files in trio/_core starting with _generated_ */trio/_core/_generated_* +# script used to check type completeness that isn't run in tests + */trio/_tests/check_type_completeness.py # The test suite spawns subprocesses to test some stuff, so make sure # this doesn't corrupt the coverage files parallel=True diff --git a/check.sh b/check.sh index 8416a9c5d1..3c46cf844f 100755 --- a/check.sh +++ b/check.sh @@ -37,6 +37,13 @@ if git status --porcelain | grep -q "requirements.txt"; then EXIT_STATUS=1 fi +python trio/_tests/check_type_completeness.py --overwrite-file || EXIT_STATUS=$? +if git status --porcelain trio/_tests/verify_types.json | grep -q "M"; then + echo "Type completeness changed, please update!" + git diff trio/_tests/verify_types.json + EXIT_STATUS=1 +fi + # Finally, leave a really clear warning of any issues and exit if [ $EXIT_STATUS -ne 0 ]; then cat <= 5.0 # for faulthandler in core pytest-cov >= 2.6.0 async_generator >= 1.9 +pyright # ipython 7.x is the last major version supporting Python 3.7 ipython < 7.35 # for the IPython traceback integration tests pyOpenSSL >= 22.0.0 # for the ssl + DTLS tests diff --git a/test-requirements.txt b/test-requirements.txt index 9378d51a03..719617a34b 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -75,6 +75,8 @@ mypy-extensions==1.0.0 ; implementation_name == "cpython" # -r test-requirements.in # black # mypy +nodeenv==1.7.0 + # via pyright outcome==1.2.0 # via -r test-requirements.in packaging==23.1 @@ -116,6 +118,8 @@ pyopenssl==23.2.0 # via -r test-requirements.in pyproject-hooks==1.0.0 # via build +pyright==1.1.310 + # via -r test-requirements.in pytest==7.3.1 # via # -r test-requirements.in diff --git a/trio/__init__.py b/trio/__init__.py index d6d2adb4bb..40aa3c430d 100644 --- a/trio/__init__.py +++ b/trio/__init__.py @@ -12,87 +12,104 @@ # # This file pulls together the friendly public API, by re-exporting the more # innocuous bits of the _core API + the higher-level tools from trio/*.py. +# +# Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) +# pyright explicitly does not care about `__version__` +# see https://github.com/microsoft/pyright/blob/main/docs/typed-libraries.md#type-completeness from ._version import __version__ from ._core import ( - TrioInternalError, - RunFinishedError, - WouldBlock, - Cancelled, - BusyResourceError, - ClosedResourceError, - run, - open_nursery, - CancelScope, - current_effective_deadline, - TASK_STATUS_IGNORED, - current_time, - BrokenResourceError, - EndOfChannel, - Nursery, + TrioInternalError as TrioInternalError, + RunFinishedError as RunFinishedError, + WouldBlock as WouldBlock, + Cancelled as Cancelled, + BusyResourceError as BusyResourceError, + ClosedResourceError as ClosedResourceError, + run as run, + open_nursery as open_nursery, + CancelScope as CancelScope, + current_effective_deadline as current_effective_deadline, + TASK_STATUS_IGNORED as TASK_STATUS_IGNORED, + current_time as current_time, + BrokenResourceError as BrokenResourceError, + EndOfChannel as EndOfChannel, + Nursery as Nursery, ) from ._timeouts import ( - move_on_at, - move_on_after, - sleep_forever, - sleep_until, - sleep, - fail_at, - fail_after, - TooSlowError, + move_on_at as move_on_at, + move_on_after as move_on_after, + sleep_forever as sleep_forever, + sleep_until as sleep_until, + sleep as sleep, + fail_at as fail_at, + fail_after as fail_after, + TooSlowError as TooSlowError, ) from ._sync import ( - Event, - CapacityLimiter, - Semaphore, - Lock, - StrictFIFOLock, - Condition, + Event as Event, + CapacityLimiter as CapacityLimiter, + Semaphore as Semaphore, + Lock as Lock, + StrictFIFOLock as StrictFIFOLock, + Condition as Condition, ) -from ._highlevel_generic import aclose_forcefully, StapledStream +from ._highlevel_generic import ( + aclose_forcefully as aclose_forcefully, + StapledStream as StapledStream, +) from ._channel import ( - open_memory_channel, - MemorySendChannel, - MemoryReceiveChannel, + open_memory_channel as open_memory_channel, + MemorySendChannel as MemorySendChannel, + MemoryReceiveChannel as MemoryReceiveChannel, ) -from ._signals import open_signal_receiver +from ._signals import open_signal_receiver as open_signal_receiver -from ._highlevel_socket import SocketStream, SocketListener +from ._highlevel_socket import ( + SocketStream as SocketStream, + SocketListener as SocketListener, +) -from ._file_io import open_file, wrap_file +from ._file_io import open_file as open_file, wrap_file as wrap_file -from ._path import Path +from ._path import Path as Path -from ._subprocess import Process, run_process +from ._subprocess import Process as Process, run_process as run_process -from ._ssl import SSLStream, SSLListener, NeedHandshakeError +from ._ssl import ( + SSLStream as SSLStream, + SSLListener as SSLListener, + NeedHandshakeError as NeedHandshakeError, +) -from ._dtls import DTLSEndpoint, DTLSChannel +from ._dtls import DTLSEndpoint as DTLSEndpoint, DTLSChannel as DTLSChannel -from ._highlevel_serve_listeners import serve_listeners +from ._highlevel_serve_listeners import serve_listeners as serve_listeners -from ._highlevel_open_tcp_stream import open_tcp_stream +from ._highlevel_open_tcp_stream import open_tcp_stream as open_tcp_stream -from ._highlevel_open_tcp_listeners import open_tcp_listeners, serve_tcp +from ._highlevel_open_tcp_listeners import ( + open_tcp_listeners as open_tcp_listeners, + serve_tcp as serve_tcp, +) -from ._highlevel_open_unix_stream import open_unix_socket +from ._highlevel_open_unix_stream import open_unix_socket as open_unix_socket from ._highlevel_ssl_helpers import ( - open_ssl_over_tcp_stream, - open_ssl_over_tcp_listeners, - serve_ssl_over_tcp, + open_ssl_over_tcp_stream as open_ssl_over_tcp_stream, + open_ssl_over_tcp_listeners as open_ssl_over_tcp_listeners, + serve_ssl_over_tcp as serve_ssl_over_tcp, ) from ._core._multierror import MultiError as _MultiError from ._core._multierror import NonBaseMultiError as _NonBaseMultiError -from ._deprecate import TrioDeprecationWarning +from ._deprecate import TrioDeprecationWarning as TrioDeprecationWarning # Submodules imported by default from . import lowlevel @@ -106,7 +123,7 @@ if False: from . import testing -from . import _deprecate +from . import _deprecate as _deprecate _deprecate.enable_attribute_deprecations(__name__) diff --git a/trio/_tests/check_type_completeness.py b/trio/_tests/check_type_completeness.py new file mode 100755 index 0000000000..d67d11958e --- /dev/null +++ b/trio/_tests/check_type_completeness.py @@ -0,0 +1,159 @@ +#!/usr/bin/env python3 +# this file is not run as part of the tests, instead it's run standalone from check.sh +import subprocess +import json +from pathlib import Path +import sys +import argparse + +# the result file is not marked in MANIFEST.in so it's not included in the package +RESULT_FILE = Path(__file__).parent / "verify_types.json" +failed = False + + +# TODO: consider checking manually without `--ignoreexternal`, and/or +# removing it from the below call later on. +def run_pyright(): + return subprocess.run( + ["pyright", "--verifytypes=trio", "--outputjson", "--ignoreexternal"], + capture_output=True, + ) + + +def check_less_than(key, current_dict, last_dict, /, invert=False): + global failed + current = current_dict[key] + last = last_dict[key] + assert isinstance(current, (float, int)) + assert isinstance(last, (float, int)) + if current == last: + return + if (current > last) ^ invert: + failed = True + print("ERROR: ", end="") + if isinstance(current, float): + strcurrent = f"{current:.4}" + strlast = f"{last:.4}" + else: + strcurrent = str(current) + strlast = str(last) + print( + f"{key} has gone {'down' if current int: + print("*" * 20, "\nChecking type completeness hasn't gone down...") + + res = run_pyright() + current_result = json.loads(res.stdout) + py_typed_file: Path | None = None + + # check if py.typed file was missing + if ( + current_result["generalDiagnostics"] + and current_result["generalDiagnostics"][0]["message"] + == "No py.typed file found" + ): + print("creating py.typed") + py_typed_file = ( + Path(current_result["typeCompleteness"]["packageRootDirectory"]) + / "py.typed" + ) + py_typed_file.write_text("") + + res = run_pyright() + current_result = json.loads(res.stdout) + + if res.stderr: + print(res.stderr) + + last_result = json.loads(RESULT_FILE.read_text()) + + for key in "errorCount", "warningCount", "informationCount": + check_zero(key, current_result["summary"]) + + for key, invert in ( + ("missingFunctionDocStringCount", False), + ("missingClassDocStringCount", False), + ("missingDefaultParamCount", False), + ("completenessScore", True), + ): + check_less_than( + key, + current_result["typeCompleteness"], + last_result["typeCompleteness"], + invert=invert, + ) + + for key, invert in ( + ("withUnknownType", False), + ("withAmbiguousType", False), + ("withKnownType", True), + ): + check_less_than( + key, + current_result["typeCompleteness"]["exportedSymbolCounts"], + last_result["typeCompleteness"]["exportedSymbolCounts"], + invert=invert, + ) + + assert ( + res.returncode != 0 + ), "Fully type complete! Delete this script and instead directly run `pyright --verifytypes=trio` (consider `--ignoreexternal`) in CI and checking exit code." + + if args.overwrite_file: + print("Overwriting file") + + # don't care about differences in time taken + del current_result["time"] + del current_result["summary"]["timeInSec"] + + # don't fail on version diff so pyright updates can be automerged + del current_result["version"] + + for key in ( + # don't save path (because that varies between machines) + "moduleRootDirectory", + "packageRootDirectory", + "pyTypedPath", + ): + del current_result["typeCompleteness"][key] + + # prune the symbols to only be the name of the symbols with + # errors, instead of saving a huge file. + new_symbols = [] + for symbol in current_result["typeCompleteness"]["symbols"]: + if symbol["diagnostics"]: + new_symbols.append(symbol["name"]) + continue + + current_result["typeCompleteness"]["symbols"] = new_symbols + + with open(RESULT_FILE, "w") as file: + json.dump(current_result, file, sort_keys=True, indent=2) + # add newline at end of file so it's easier to manually modify + file.write("\n") + + if py_typed_file is not None: + print("deleting py.typed") + py_typed_file.unlink() + + print("*" * 20) + + return int(failed) + + +parser = argparse.ArgumentParser() +parser.add_argument("--overwrite-file", action="store_true", default=False) +args = parser.parse_args() + +assert __name__ == "__main__", "This script should be run standalone" +sys.exit(main(args)) diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index 4a35a95528..842b1313cf 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -1,13 +1,10 @@ import enum import importlib import inspect -import re import socket as stdlib_socket import sys -import types from pathlib import Path from types import ModuleType -from typing import Any, Iterable import pytest @@ -41,7 +38,7 @@ def public_modules(module): for name, class_ in module.__dict__.items(): if name.startswith("_"): # pragma: no cover continue - if not isinstance(class_, types.ModuleType): + if not isinstance(class_, ModuleType): continue if not class_.__name__.startswith(module.__name__): # pragma: no cover continue @@ -66,7 +63,7 @@ def public_modules(module): reason="skip static introspection tools on Python dev/alpha releases", ) @pytest.mark.parametrize("modname", PUBLIC_MODULE_NAMES) -@pytest.mark.parametrize("tool", ["pylint", "jedi", "mypy"]) +@pytest.mark.parametrize("tool", ["pylint", "jedi", "mypy", "pyright_verifytypes"]) @pytest.mark.filterwarnings( # https://github.com/pypa/setuptools/issues/3274 "ignore:module 'sre_constants' is deprecated:DeprecationWarning", @@ -83,6 +80,13 @@ def no_underscores(symbols): if modname == "trio": runtime_names.discard("tests") + if tool in ("mypy", "pyright_verifytypes"): + # create py.typed file + py_typed_path = Path(trio.__file__).parent / "py.typed" + py_typed_exists = py_typed_path.exists() + if not py_typed_exists: # pragma: no branch + py_typed_path.write_text("") + if tool == "pylint": from pylint.lint import PyLinter @@ -102,12 +106,6 @@ def no_underscores(symbols): if sys.implementation.name != "cpython": pytest.skip("mypy not installed in tests on pypy") - # create py.typed file - py_typed_path = Path(trio.__file__).parent / "py.typed" - py_typed_exists = py_typed_path.exists() - if not py_typed_exists: # pragma: no cover - py_typed_path.write_text("") - # mypy behaves strangely when passed a huge semicolon-separated line with `-c` # so we use a tmpfile tmpfile = tmpdir / "check_mypy.py" @@ -118,18 +116,45 @@ def no_underscores(symbols): ) from mypy.api import run - res = run(["--config-file=", "--follow-imports=silent", str(tmpfile)]) - - # clean up created py.typed file - if not py_typed_exists: # pragma: no cover - py_typed_path.unlink() + mypy_res = run(["--config-file=", "--follow-imports=silent", str(tmpfile)]) # check that there were no errors (exit code 0), otherwise print the errors - assert res[2] == 0, res[0] - return + assert mypy_res[2] == 0, mypy_res[0] + elif tool == "pyright_verifytypes": + if not RUN_SLOW: # pragma: no cover + pytest.skip("use --run-slow to check against mypy") + import subprocess + import json + + # uses `--verbose` to also get symbols without errors + # `--verbose` and `--outputjson` are incompatible, so we do string parsing + res = subprocess.run( + ["pyright", f"--verifytypes={modname}", "--outputjson"], + capture_output=True, + ) + current_result = json.loads(res.stdout) + + static_names = { + x["name"][len(modname) + 1 :] + for x in current_result["typeCompleteness"]["symbols"] + if x["name"].startswith(modname) + } + + # pytest ignores the symbol defined behind `if False` + if modname == "trio": + static_names.add("testing") + else: # pragma: no cover assert False + # remove py.typed file + if tool in ("mypy", "pyright_verifytypes") and not py_typed_exists: + py_typed_path.unlink() + + # mypy handles errors with an `assert` in its branch + if tool == "mypy": + return + # It's expected that the static set will contain more names than the # runtime set: # - static tools are sometimes sloppy and include deleted names @@ -180,9 +205,8 @@ def no_hidden(symbols): if sys.implementation.name != "cpython": pytest.skip("mypy not installed in tests on pypy") # create py.typed file - # not marked with no-cover pragma, remove this logic when trio is marked - # with py.typed proper - if not py_typed_exists: + # remove this logic when trio is marked with py.typed proper + if not py_typed_exists: # pragma: no branch py_typed_path.write_text("") errors: dict[str, object] = {} @@ -282,7 +306,7 @@ def no_hidden(symbols): } elif tool == "mypy": tmpfile = tmpdir / "check_mypy.py" - sorted_runtime_names = list(sorted(runtime_names)) + sorted_runtime_names = sorted(runtime_names) content = f"from {module_name} import {class_name}\n" + "".join( f"{class_name}.{name}\n" for name in sorted_runtime_names ) diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json new file mode 100644 index 0000000000..7b0c39d20d --- /dev/null +++ b/trio/_tests/verify_types.json @@ -0,0 +1,366 @@ +{ + "generalDiagnostics": [], + "summary": { + "errorCount": 0, + "filesAnalyzed": 8, + "informationCount": 0, + "warningCount": 0 + }, + "typeCompleteness": { + "completenessScore": 0.8155339805825242, + "exportedSymbolCounts": { + "withAmbiguousType": 1, + "withKnownType": 504, + "withUnknownType": 113 + }, + "ignoreUnknownTypesFromImports": true, + "missingClassDocStringCount": 1, + "missingDefaultParamCount": 0, + "missingFunctionDocStringCount": 4, + "moduleName": "trio", + "modules": [ + { + "name": "trio" + }, + { + "name": "trio.abc" + }, + { + "name": "trio.from_thread" + }, + { + "name": "trio.lowlevel" + }, + { + "name": "trio.socket" + }, + { + "name": "trio.testing" + }, + { + "name": "trio.tests" + }, + { + "name": "trio.to_thread" + } + ], + "otherSymbolCounts": { + "withAmbiguousType": 15, + "withKnownType": 231, + "withUnknownType": 236 + }, + "packageName": "trio", + "symbols": [ + "trio._core._exceptions.Cancelled", + "trio._core._exceptions.Cancelled.__str__", + "trio._util.NoPublicConstructor", + "trio._util.NoPublicConstructor.__call__", + "trio._util.Final.__new__", + "trio.run", + "trio.open_nursery", + "trio._core._run.CancelScope", + "trio._core._run.CancelScope.cancelled_caught", + "trio._core._run.CancelScope.__exit__", + "trio._core._run.CancelScope.__repr__", + "trio._core._run.CancelScope.deadline", + "trio._core._run.CancelScope.cancel_called", + "trio.current_effective_deadline", + "trio._core._run._TaskStatusIgnored.__repr__", + "trio._core._run._TaskStatusIgnored.started", + "trio.current_time", + "trio._core._run.Nursery", + "trio._core._run.Nursery.__init__", + "trio._core._run.Nursery.child_tasks", + "trio._core._run.Nursery.parent_task", + "trio._core._run.Nursery.start_soon", + "trio._core._run.Nursery.start", + "trio._core._run.Nursery.__del__", + "trio.move_on_at", + "trio.move_on_after", + "trio.sleep_forever", + "trio.sleep_until", + "trio.sleep", + "trio.fail_after", + "trio._sync.Event", + "trio._sync.Event.is_set", + "trio._sync.Event.wait", + "trio._sync.Event.statistics", + "trio._sync.CapacityLimiter", + "trio._sync.CapacityLimiter.__init__", + "trio._sync.CapacityLimiter.__repr__", + "trio._sync.CapacityLimiter.total_tokens", + "trio._sync.CapacityLimiter.borrowed_tokens", + "trio._sync.CapacityLimiter.available_tokens", + "trio._sync.CapacityLimiter.statistics", + "trio._sync.Semaphore", + "trio._sync.Semaphore.__init__", + "trio._sync.Semaphore.__repr__", + "trio._sync.Semaphore.value", + "trio._sync.Semaphore.max_value", + "trio._sync.Semaphore.statistics", + "trio._sync.Lock", + "trio._sync._LockImpl.__repr__", + "trio._sync._LockImpl.locked", + "trio._sync._LockImpl.statistics", + "trio._sync.StrictFIFOLock", + "trio._sync.Condition", + "trio._sync.Condition.__init__", + "trio._sync.Condition.locked", + "trio._sync.Condition.acquire_nowait", + "trio._sync.Condition.acquire", + "trio._sync.Condition.release", + "trio._sync.Condition.notify", + "trio._sync.Condition.notify_all", + "trio._sync.Condition.statistics", + "trio.aclose_forcefully", + "trio._highlevel_generic.StapledStream", + "trio._highlevel_generic.StapledStream.send_stream", + "trio._highlevel_generic.StapledStream.receive_stream", + "trio._highlevel_generic.StapledStream.send_all", + "trio._highlevel_generic.StapledStream.wait_send_all_might_not_block", + "trio._highlevel_generic.StapledStream.send_eof", + "trio._highlevel_generic.StapledStream.receive_some", + "trio._highlevel_generic.StapledStream.aclose", + "trio._abc.HalfCloseableStream", + "trio._abc.HalfCloseableStream.send_eof", + "trio._abc.Stream", + "trio._abc.SendStream", + "trio._abc.SendStream.send_all", + "trio._abc.SendStream.wait_send_all_might_not_block", + "trio._abc.AsyncResource.aclose", + "trio._abc.AsyncResource.__aenter__", + "trio._abc.AsyncResource.__aexit__", + "trio._abc.ReceiveStream", + "trio._abc.ReceiveStream.receive_some", + "trio._abc.ReceiveStream.__aiter__", + "trio._abc.ReceiveStream.__anext__", + "trio._channel.MemorySendChannel", + "trio._abc.SendChannel", + "trio._channel.MemoryReceiveChannel", + "trio._abc.ReceiveChannel", + "trio._abc.ReceiveChannel.__aiter__", + "trio._highlevel_socket.SocketStream", + "trio._highlevel_socket.SocketStream.__init__", + "trio._highlevel_socket.SocketStream.send_all", + "trio._highlevel_socket.SocketStream.wait_send_all_might_not_block", + "trio._highlevel_socket.SocketStream.send_eof", + "trio._highlevel_socket.SocketStream.receive_some", + "trio._highlevel_socket.SocketStream.aclose", + "trio._highlevel_socket.SocketStream.setsockopt", + "trio._highlevel_socket.SocketStream.getsockopt", + "trio._highlevel_socket.SocketListener", + "trio._highlevel_socket.SocketListener.__init__", + "trio._highlevel_socket.SocketListener.accept", + "trio._highlevel_socket.SocketListener.aclose", + "trio._abc.Listener", + "trio._abc.Listener.accept", + "trio.open_file", + "trio.wrap_file", + "trio._path.Path", + "trio._path.Path.__init__", + "trio._path.Path.__dir__", + "trio._path.Path.__repr__", + "trio._path.Path.__fspath__", + "trio._path.Path.open", + "trio._path.Path.__bytes__", + "trio._path.Path.__truediv__", + "trio._path.Path.__rtruediv__", + "trio._path.AsyncAutoWrapperType", + "trio._path.AsyncAutoWrapperType.__init__", + "trio._path.AsyncAutoWrapperType.generate_forwards", + "trio._path.AsyncAutoWrapperType.generate_wraps", + "trio._path.AsyncAutoWrapperType.generate_magic", + "trio._path.AsyncAutoWrapperType.generate_iter", + "trio._subprocess.Process", + "trio._subprocess.Process.encoding", + "trio._subprocess.Process.errors", + "trio._subprocess.Process.__init__", + "trio._subprocess.Process.__repr__", + "trio._subprocess.Process.returncode", + "trio._subprocess.Process.__aenter__", + "trio._subprocess.Process.aclose", + "trio._subprocess.Process.wait", + "trio._subprocess.Process.poll", + "trio._subprocess.Process.send_signal", + "trio._subprocess.Process.terminate", + "trio._subprocess.Process.kill", + "trio._subprocess.Process.args", + "trio._subprocess.Process.pid", + "trio.run_process", + "trio._ssl.SSLStream", + "trio._ssl.SSLStream.__init__", + "trio._ssl.SSLStream.__getattr__", + "trio._ssl.SSLStream.__setattr__", + "trio._ssl.SSLStream.__dir__", + "trio._ssl.SSLStream.do_handshake", + "trio._ssl.SSLStream.receive_some", + "trio._ssl.SSLStream.send_all", + "trio._ssl.SSLStream.unwrap", + "trio._ssl.SSLStream.aclose", + "trio._ssl.SSLStream.wait_send_all_might_not_block", + "trio._ssl.SSLStream.transport_stream", + "trio._ssl.SSLListener", + "trio._ssl.SSLListener.__init__", + "trio._ssl.SSLListener.accept", + "trio._ssl.SSLListener.aclose", + "trio._dtls.DTLSEndpoint", + "trio._dtls.DTLSEndpoint.__init__", + "trio._dtls.DTLSEndpoint.__del__", + "trio._dtls.DTLSEndpoint.close", + "trio._dtls.DTLSEndpoint.__enter__", + "trio._dtls.DTLSEndpoint.__exit__", + "trio._dtls.DTLSEndpoint.serve", + "trio._dtls.DTLSEndpoint.connect", + "trio._dtls.DTLSEndpoint.socket", + "trio._dtls.DTLSEndpoint.incoming_packets_buffer", + "trio._dtls.DTLSChannel", + "trio._dtls.DTLSChannel.__init__", + "trio._dtls.DTLSChannel.close", + "trio._dtls.DTLSChannel.__enter__", + "trio._dtls.DTLSChannel.__exit__", + "trio._dtls.DTLSChannel.aclose", + "trio._dtls.DTLSChannel.do_handshake", + "trio._dtls.DTLSChannel.send", + "trio._dtls.DTLSChannel.receive", + "trio._dtls.DTLSChannel.set_ciphertext_mtu", + "trio._dtls.DTLSChannel.get_cleartext_mtu", + "trio._dtls.DTLSChannel.statistics", + "trio._abc.Channel", + "trio.serve_listeners", + "trio.open_tcp_stream", + "trio.open_tcp_listeners", + "trio.serve_tcp", + "trio.open_unix_socket", + "trio.open_ssl_over_tcp_stream", + "trio.open_ssl_over_tcp_listeners", + "trio.serve_ssl_over_tcp", + "trio.__deprecated_attributes__", + "trio._abc.Clock.start_clock", + "trio._abc.Clock.current_time", + "trio._abc.Clock.deadline_to_sleep_time", + "trio._abc.Instrument.before_run", + "trio._abc.Instrument.after_run", + "trio._abc.Instrument.task_spawned", + "trio._abc.Instrument.task_scheduled", + "trio._abc.Instrument.before_task_step", + "trio._abc.Instrument.after_task_step", + "trio._abc.Instrument.task_exited", + "trio._abc.Instrument.before_io_wait", + "trio._abc.Instrument.after_io_wait", + "trio._abc.SocketFactory.socket", + "trio._abc.HostnameResolver.getaddrinfo", + "trio._abc.HostnameResolver.getnameinfo", + "trio.from_thread.run", + "trio.from_thread.run_sync", + "trio.lowlevel.cancel_shielded_checkpoint", + "trio.lowlevel.currently_ki_protected", + "trio._core._run.Task", + "trio._core._run.Task.coro", + "trio._core._run.Task.name", + "trio._core._run.Task.context", + "trio._core._run.Task.custom_sleep_data", + "trio._core._run.Task.__repr__", + "trio._core._run.Task.parent_nursery", + "trio._core._run.Task.eventual_parent_nursery", + "trio._core._run.Task.child_nurseries", + "trio._core._run.Task.iter_await_frames", + "trio.lowlevel.checkpoint", + "trio.lowlevel.current_task", + "trio._core._parking_lot.ParkingLot", + "trio._core._parking_lot.ParkingLot.__len__", + "trio._core._parking_lot.ParkingLot.__bool__", + "trio._core._parking_lot.ParkingLot.unpark_all", + "trio._core._parking_lot.ParkingLot.repark_all", + "trio._core._parking_lot.ParkingLot.statistics", + "trio._core._unbounded_queue.UnboundedQueue", + "trio._core._unbounded_queue.UnboundedQueue.__repr__", + "trio._core._unbounded_queue.UnboundedQueue.qsize", + "trio._core._unbounded_queue.UnboundedQueue.empty", + "trio._core._unbounded_queue.UnboundedQueue.get_batch_nowait", + "trio._core._unbounded_queue.UnboundedQueue.get_batch", + "trio._core._unbounded_queue.UnboundedQueue.statistics", + "trio._core._unbounded_queue.UnboundedQueue.__aiter__", + "trio._core._unbounded_queue.UnboundedQueue.__anext__", + "trio._core._local.RunVar", + "trio._core._local.RunVar.get", + "trio._core._local.RunVar.set", + "trio._core._local.RunVar.reset", + "trio._core._local.RunVar.__repr__", + "trio._core._entry_queue.TrioToken", + "trio._core._entry_queue.TrioToken.run_sync_soon", + "trio.lowlevel.current_trio_token", + "trio.lowlevel.temporarily_detach_coroutine_object", + "trio.lowlevel.permanently_detach_coroutine_object", + "trio.lowlevel.reattach_detached_coroutine_object", + "trio.lowlevel.current_statistics", + "trio.lowlevel.reschedule", + "trio.lowlevel.remove_instrument", + "trio.lowlevel.add_instrument", + "trio.lowlevel.current_clock", + "trio.lowlevel.current_root_task", + "trio.lowlevel.checkpoint_if_cancelled", + "trio.lowlevel.spawn_system_task", + "trio.lowlevel.wait_readable", + "trio.lowlevel.wait_writable", + "trio.lowlevel.notify_closing", + "trio.lowlevel.start_thread_soon", + "trio.lowlevel.start_guest_run", + "trio.lowlevel.open_process", + "trio._unix_pipes.FdStream", + "trio.socket.fromfd", + "trio.socket.from_stdlib_socket", + "trio.socket.getprotobyname", + "trio.socket.socketpair", + "trio.socket.getnameinfo", + "trio.socket.socket", + "trio.socket.getaddrinfo", + "trio.socket.set_custom_hostname_resolver", + "trio.socket.set_custom_socket_factory", + "trio.testing.wait_all_tasks_blocked", + "trio._core._mock_clock.MockClock", + "trio._core._mock_clock.MockClock.__init__", + "trio._core._mock_clock.MockClock.__repr__", + "trio._core._mock_clock.MockClock.rate", + "trio._core._mock_clock.MockClock.autojump_threshold", + "trio._core._mock_clock.MockClock.start_clock", + "trio._core._mock_clock.MockClock.current_time", + "trio._core._mock_clock.MockClock.deadline_to_sleep_time", + "trio._core._mock_clock.MockClock.jump", + "trio.testing.trio_test", + "trio.testing.assert_checkpoints", + "trio.testing.assert_no_checkpoints", + "trio.testing._sequencer.Sequencer", + "trio.testing.check_one_way_stream", + "trio.testing.check_two_way_stream", + "trio.testing.check_half_closeable_stream", + "trio.testing._memory_streams.MemorySendStream", + "trio.testing._memory_streams.MemorySendStream.__init__", + "trio.testing._memory_streams.MemorySendStream.send_all", + "trio.testing._memory_streams.MemorySendStream.wait_send_all_might_not_block", + "trio.testing._memory_streams.MemorySendStream.close", + "trio.testing._memory_streams.MemorySendStream.aclose", + "trio.testing._memory_streams.MemorySendStream.get_data", + "trio.testing._memory_streams.MemorySendStream.get_data_nowait", + "trio.testing._memory_streams.MemorySendStream.send_all_hook", + "trio.testing._memory_streams.MemorySendStream.wait_send_all_might_not_block_hook", + "trio.testing._memory_streams.MemorySendStream.close_hook", + "trio.testing._memory_streams.MemoryReceiveStream", + "trio.testing._memory_streams.MemoryReceiveStream.__init__", + "trio.testing._memory_streams.MemoryReceiveStream.receive_some", + "trio.testing._memory_streams.MemoryReceiveStream.close", + "trio.testing._memory_streams.MemoryReceiveStream.aclose", + "trio.testing._memory_streams.MemoryReceiveStream.put_data", + "trio.testing._memory_streams.MemoryReceiveStream.put_eof", + "trio.testing._memory_streams.MemoryReceiveStream.receive_some_hook", + "trio.testing._memory_streams.MemoryReceiveStream.close_hook", + "trio.testing.memory_stream_pump", + "trio.testing.memory_stream_one_way_pair", + "trio.testing.memory_stream_pair", + "trio.testing.lockstep_stream_one_way_pair", + "trio.testing.lockstep_stream_pair", + "trio.testing.open_stream_to_socket_listener", + "trio.tests.TestsDeprecationWrapper", + "trio.to_thread.current_default_thread_limiter" + ] + } +} diff --git a/trio/abc.py b/trio/abc.py index ce0a1f6c00..dd4d4fcd08 100644 --- a/trio/abc.py +++ b/trio/abc.py @@ -4,18 +4,20 @@ # temporaries, imports, etc. when implementing the module. So we put the # implementation in an underscored module, and then re-export the public parts # here. + +# Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) from ._abc import ( - Clock, - Instrument, - AsyncResource, - SendStream, - ReceiveStream, - Stream, - HalfCloseableStream, - SocketFactory, - HostnameResolver, - Listener, - SendChannel, - ReceiveChannel, - Channel, + Clock as Clock, + Instrument as Instrument, + AsyncResource as AsyncResource, + SendStream as SendStream, + ReceiveStream as ReceiveStream, + Stream as Stream, + HalfCloseableStream as HalfCloseableStream, + SocketFactory as SocketFactory, + HostnameResolver as HostnameResolver, + Listener as Listener, + SendChannel as SendChannel, + ReceiveChannel as ReceiveChannel, + Channel as Channel, ) diff --git a/trio/from_thread.py b/trio/from_thread.py index 296a5a89ea..8c2b490705 100644 --- a/trio/from_thread.py +++ b/trio/from_thread.py @@ -3,5 +3,9 @@ an external thread by means of a Trio Token present in Thread Local Storage """ + from ._threads import from_thread_run as run from ._threads import from_thread_run_sync as run_sync + +# need to use __all__ for pyright --verifytypes to see re-exports when renaming them +__all__ = ["run", "run_sync"] diff --git a/trio/lowlevel.py b/trio/lowlevel.py index 004692475f..b7f4f3a725 100644 --- a/trio/lowlevel.py +++ b/trio/lowlevel.py @@ -8,68 +8,68 @@ import typing as _t # This is the union of a subset of trio/_core/ and some things from trio/*.py. -# See comments in trio/__init__.py for details. To make static analysis easier, -# this lists all possible symbols from trio._core, and then we prune those that -# aren't available on this system. After that we add some symbols from trio/*.py. +# See comments in trio/__init__.py for details. + +# Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) # Generally available symbols from ._core import ( - cancel_shielded_checkpoint, - Abort, - RaiseCancelT, - wait_task_rescheduled, - enable_ki_protection, - disable_ki_protection, - currently_ki_protected, - Task, - checkpoint, - current_task, - ParkingLot, - UnboundedQueue, - RunVar, - TrioToken, - current_trio_token, - temporarily_detach_coroutine_object, - permanently_detach_coroutine_object, - reattach_detached_coroutine_object, - current_statistics, - reschedule, - remove_instrument, - add_instrument, - current_clock, - current_root_task, - checkpoint_if_cancelled, - spawn_system_task, - wait_readable, - wait_writable, - notify_closing, - start_thread_soon, - start_guest_run, + cancel_shielded_checkpoint as cancel_shielded_checkpoint, + Abort as Abort, + RaiseCancelT as RaiseCancelT, + wait_task_rescheduled as wait_task_rescheduled, + enable_ki_protection as enable_ki_protection, + disable_ki_protection as disable_ki_protection, + currently_ki_protected as currently_ki_protected, + Task as Task, + checkpoint as checkpoint, + current_task as current_task, + ParkingLot as ParkingLot, + UnboundedQueue as UnboundedQueue, + RunVar as RunVar, + TrioToken as TrioToken, + current_trio_token as current_trio_token, + temporarily_detach_coroutine_object as temporarily_detach_coroutine_object, + permanently_detach_coroutine_object as permanently_detach_coroutine_object, + reattach_detached_coroutine_object as reattach_detached_coroutine_object, + current_statistics as current_statistics, + reschedule as reschedule, + remove_instrument as remove_instrument, + add_instrument as add_instrument, + current_clock as current_clock, + current_root_task as current_root_task, + checkpoint_if_cancelled as checkpoint_if_cancelled, + spawn_system_task as spawn_system_task, + wait_readable as wait_readable, + wait_writable as wait_writable, + notify_closing as notify_closing, + start_thread_soon as start_thread_soon, + start_guest_run as start_guest_run, ) -from ._subprocess import open_process +from ._subprocess import open_process as open_process if sys.platform == "win32": # Windows symbols from ._core import ( - current_iocp, - register_with_iocp, - wait_overlapped, - monitor_completion_key, - readinto_overlapped, - write_overlapped, + current_iocp as current_iocp, + register_with_iocp as register_with_iocp, + wait_overlapped as wait_overlapped, + monitor_completion_key as monitor_completion_key, + readinto_overlapped as readinto_overlapped, + write_overlapped as write_overlapped, ) - from ._wait_for_object import WaitForSingleObject + from ._wait_for_object import WaitForSingleObject as WaitForSingleObject else: # Unix symbols - from ._unix_pipes import FdStream + from ._unix_pipes import FdStream as FdStream # Kqueue-specific symbols if sys.platform != "linux" and (_t.TYPE_CHECKING or not hasattr(_select, "epoll")): from ._core import ( - current_kqueue, - monitor_kevent, - wait_kevent, + current_kqueue as current_kqueue, + monitor_kevent as monitor_kevent, + wait_kevent as wait_kevent, ) del sys diff --git a/trio/socket.py b/trio/socket.py index d4beb159c0..6d3ed366d4 100644 --- a/trio/socket.py +++ b/trio/socket.py @@ -6,136 +6,13 @@ # here. # We still have some underscore names though but only a few. + +# Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) + from . import _socket import sys import typing as _t -# The socket module exports a bunch of platform-specific constants. We want to -# re-export them. Since the exact set of constants varies depending on Python -# version, platform, the libc installed on the system where Python was built, -# etc., we figure out which constants to re-export dynamically at runtime (see -# below). But that confuses static analysis tools like jedi and mypy. So this -# import statement statically lists every constant that *could* be -# exported. It always fails at runtime, since no single Python build exports -# all these constants, but it lets static analysis tools understand what's -# going on. There's a test in test_exports.py to make sure that the list is -# kept up to date. -try: - # fmt: off - from socket import ( # type: ignore - CMSG_LEN, CMSG_SPACE, CAPI, AF_UNSPEC, AF_INET, AF_UNIX, AF_IPX, - AF_APPLETALK, AF_INET6, AF_ROUTE, AF_LINK, AF_SNA, PF_SYSTEM, - AF_SYSTEM, SOCK_STREAM, SOCK_DGRAM, SOCK_RAW, SOCK_SEQPACKET, SOCK_RDM, - SO_DEBUG, SO_ACCEPTCONN, SO_REUSEADDR, SO_KEEPALIVE, SO_DONTROUTE, - SO_BROADCAST, SO_USELOOPBACK, SO_LINGER, SO_OOBINLINE, SO_REUSEPORT, - SO_SNDBUF, SO_RCVBUF, SO_SNDLOWAT, SO_RCVLOWAT, SO_SNDTIMEO, - SO_RCVTIMEO, SO_ERROR, SO_TYPE, LOCAL_PEERCRED, SOMAXCONN, SCM_RIGHTS, - SCM_CREDS, MSG_OOB, MSG_PEEK, MSG_DONTROUTE, MSG_DONTWAIT, MSG_EOR, - MSG_TRUNC, MSG_CTRUNC, MSG_WAITALL, MSG_EOF, SOL_SOCKET, SOL_IP, - SOL_TCP, SOL_UDP, IPPROTO_IP, IPPROTO_HOPOPTS, IPPROTO_ICMP, - IPPROTO_IGMP, IPPROTO_GGP, IPPROTO_IPV4, IPPROTO_IPIP, IPPROTO_TCP, - IPPROTO_EGP, IPPROTO_PUP, IPPROTO_UDP, IPPROTO_IDP, IPPROTO_HELLO, - IPPROTO_ND, IPPROTO_TP, IPPROTO_ROUTING, IPPROTO_FRAGMENT, - IPPROTO_RSVP, IPPROTO_GRE, IPPROTO_ESP, IPPROTO_AH, IPPROTO_ICMPV6, - IPPROTO_NONE, IPPROTO_DSTOPTS, IPPROTO_XTP, IPPROTO_EON, IPPROTO_PIM, - IPPROTO_IPCOMP, IPPROTO_SCTP, IPPROTO_RAW, IPPROTO_MAX, IPPROTO_MPTCP, - SYSPROTO_CONTROL, IPPORT_RESERVED, IPPORT_USERRESERVED, INADDR_ANY, - INADDR_BROADCAST, INADDR_LOOPBACK, INADDR_UNSPEC_GROUP, - INADDR_ALLHOSTS_GROUP, INADDR_MAX_LOCAL_GROUP, INADDR_NONE, IP_OPTIONS, - IP_HDRINCL, IP_TOS, IP_TTL, IP_RECVOPTS, IP_RECVRETOPTS, - IP_RECVDSTADDR, IP_RETOPTS, IP_MULTICAST_IF, IP_MULTICAST_TTL, - IP_MULTICAST_LOOP, IP_ADD_MEMBERSHIP, IP_DROP_MEMBERSHIP, - IP_DEFAULT_MULTICAST_TTL, IP_DEFAULT_MULTICAST_LOOP, - IP_MAX_MEMBERSHIPS, IPV6_JOIN_GROUP, IPV6_LEAVE_GROUP, - IPV6_MULTICAST_HOPS, IPV6_MULTICAST_IF, IPV6_MULTICAST_LOOP, - IPV6_UNICAST_HOPS, IPV6_V6ONLY, IPV6_CHECKSUM, IPV6_RECVTCLASS, - IPV6_RTHDR_TYPE_0, IPV6_TCLASS, TCP_NODELAY, TCP_MAXSEG, TCP_KEEPINTVL, - TCP_KEEPCNT, TCP_FASTOPEN, TCP_NOTSENT_LOWAT, EAI_ADDRFAMILY, - EAI_AGAIN, EAI_BADFLAGS, EAI_FAIL, EAI_FAMILY, EAI_MEMORY, EAI_NODATA, - EAI_NONAME, EAI_OVERFLOW, EAI_SERVICE, EAI_SOCKTYPE, EAI_SYSTEM, - EAI_BADHINTS, EAI_PROTOCOL, EAI_MAX, AI_PASSIVE, AI_CANONNAME, - AI_NUMERICHOST, AI_NUMERICSERV, AI_MASK, AI_ALL, AI_V4MAPPED_CFG, - AI_ADDRCONFIG, AI_V4MAPPED, AI_DEFAULT, NI_MAXHOST, NI_MAXSERV, - NI_NOFQDN, NI_NUMERICHOST, NI_NAMEREQD, NI_NUMERICSERV, NI_DGRAM, - SHUT_RD, SHUT_WR, SHUT_RDWR, EBADF, EAGAIN, EWOULDBLOCK, AF_ASH, - AF_ATMPVC, AF_ATMSVC, AF_AX25, AF_BLUETOOTH, AF_BRIDGE, AF_ECONET, - AF_IRDA, AF_KEY, AF_LLC, AF_NETBEUI, AF_NETLINK, AF_NETROM, AF_PACKET, - AF_PPPOX, AF_ROSE, AF_SECURITY, AF_WANPIPE, AF_X25, BDADDR_ANY, - BDADDR_LOCAL, FD_SETSIZE, IPV6_DSTOPTS, IPV6_HOPLIMIT, IPV6_HOPOPTS, - IPV6_NEXTHOP, IPV6_PKTINFO, IPV6_RECVDSTOPTS, IPV6_RECVHOPLIMIT, - IPV6_RECVHOPOPTS, IPV6_RECVPKTINFO, IPV6_RECVRTHDR, IPV6_RTHDR, - IPV6_RTHDRDSTOPTS, MSG_ERRQUEUE, NETLINK_DNRTMSG, NETLINK_FIREWALL, - NETLINK_IP6_FW, NETLINK_NFLOG, NETLINK_ROUTE, NETLINK_USERSOCK, - NETLINK_XFRM, PACKET_BROADCAST, PACKET_FASTROUTE, PACKET_HOST, - PACKET_LOOPBACK, PACKET_MULTICAST, PACKET_OTHERHOST, PACKET_OUTGOING, - POLLERR, POLLHUP, POLLIN, POLLMSG, POLLNVAL, POLLOUT, POLLPRI, - POLLRDBAND, POLLRDNORM, POLLWRNORM, SIOCGIFINDEX, SIOCGIFNAME, - SOCK_CLOEXEC, TCP_CORK, TCP_DEFER_ACCEPT, TCP_INFO, TCP_KEEPIDLE, - TCP_LINGER2, TCP_QUICKACK, TCP_SYNCNT, TCP_WINDOW_CLAMP, AF_ALG, - AF_CAN, AF_RDS, AF_TIPC, AF_VSOCK, ALG_OP_DECRYPT, ALG_OP_ENCRYPT, - ALG_OP_SIGN, ALG_OP_VERIFY, ALG_SET_AEAD_ASSOCLEN, - ALG_SET_AEAD_AUTHSIZE, ALG_SET_IV, ALG_SET_KEY, ALG_SET_OP, - ALG_SET_PUBKEY, CAN_BCM, CAN_BCM_RX_CHANGED, CAN_BCM_RX_DELETE, - CAN_BCM_RX_READ, CAN_BCM_RX_SETUP, CAN_BCM_RX_STATUS, - CAN_BCM_RX_TIMEOUT, CAN_BCM_TX_DELETE, CAN_BCM_TX_EXPIRED, - CAN_BCM_TX_READ, CAN_BCM_TX_SEND, CAN_BCM_TX_SETUP, CAN_BCM_TX_STATUS, - CAN_EFF_FLAG, CAN_EFF_MASK, CAN_ERR_FLAG, CAN_ERR_MASK, CAN_ISOTP, - CAN_RAW, CAN_RAW_ERR_FILTER, CAN_RAW_FD_FRAMES, CAN_RAW_FILTER, - CAN_RAW_LOOPBACK, CAN_RAW_RECV_OWN_MSGS, CAN_RTR_FLAG, CAN_SFF_MASK, - IOCTL_VM_SOCKETS_GET_LOCAL_CID, IPV6_DONTFRAG, IPV6_PATHMTU, - IPV6_RECVPATHMTU, IP_TRANSPARENT, MSG_CMSG_CLOEXEC, MSG_CONFIRM, - MSG_FASTOPEN, MSG_MORE, MSG_NOSIGNAL, NETLINK_CRYPTO, PF_CAN, - PF_PACKET, PF_RDS, SCM_CREDENTIALS, SOCK_NONBLOCK, SOL_ALG, - SOL_CAN_BASE, SOL_CAN_RAW, SOL_TIPC, SO_BINDTODEVICE, SO_DOMAIN, - SO_MARK, SO_PASSCRED, SO_PASSSEC, SO_PEERCRED, SO_PEERSEC, SO_PRIORITY, - SO_PROTOCOL, SO_VM_SOCKETS_BUFFER_MAX_SIZE, - SO_VM_SOCKETS_BUFFER_MIN_SIZE, SO_VM_SOCKETS_BUFFER_SIZE, - TCP_CONGESTION, TCP_USER_TIMEOUT, TIPC_ADDR_ID, TIPC_ADDR_NAME, - TIPC_ADDR_NAMESEQ, TIPC_CFG_SRV, TIPC_CLUSTER_SCOPE, TIPC_CONN_TIMEOUT, - TIPC_CRITICAL_IMPORTANCE, TIPC_DEST_DROPPABLE, TIPC_HIGH_IMPORTANCE, - TIPC_IMPORTANCE, TIPC_LOW_IMPORTANCE, TIPC_MEDIUM_IMPORTANCE, - TIPC_NODE_SCOPE, TIPC_PUBLISHED, TIPC_SRC_DROPPABLE, - TIPC_SUBSCR_TIMEOUT, TIPC_SUB_CANCEL, TIPC_SUB_PORTS, TIPC_SUB_SERVICE, - TIPC_TOP_SRV, TIPC_WAIT_FOREVER, TIPC_WITHDRAWN, TIPC_ZONE_SCOPE, - VMADDR_CID_ANY, VMADDR_CID_HOST, VMADDR_PORT_ANY, - VM_SOCKETS_INVALID_VERSION, MSG_BCAST, MSG_MCAST, RCVALL_MAX, - RCVALL_OFF, RCVALL_ON, RCVALL_SOCKETLEVELONLY, SIO_KEEPALIVE_VALS, - SIO_LOOPBACK_FAST_PATH, SIO_RCVALL, SO_EXCLUSIVEADDRUSE, HCI_FILTER, - BTPROTO_SCO, BTPROTO_HCI, HCI_TIME_STAMP, SOL_RDS, BTPROTO_L2CAP, - BTPROTO_RFCOMM, HCI_DATA_DIR, SOL_HCI, CAN_BCM_RX_ANNOUNCE_RESUME, - CAN_BCM_RX_CHECK_DLC, CAN_BCM_RX_FILTER_ID, CAN_BCM_RX_NO_AUTOTIMER, - CAN_BCM_RX_RTR_FRAME, CAN_BCM_SETTIMER, CAN_BCM_STARTTIMER, - CAN_BCM_TX_ANNOUNCE, CAN_BCM_TX_COUNTEVT, CAN_BCM_TX_CP_CAN_ID, - CAN_BCM_TX_RESET_MULTI_IDX, IPPROTO_CBT, IPPROTO_ICLFXBM, IPPROTO_IGP, - IPPROTO_L2TP, IPPROTO_PGM, IPPROTO_RDP, IPPROTO_ST, AF_QIPCRTR, - CAN_BCM_CAN_FD_FRAME, IPPROTO_MOBILE, IPV6_USE_MIN_MTU, - MSG_NOTIFICATION, SO_SETFIB, CAN_J1939, CAN_RAW_JOIN_FILTERS, - IPPROTO_UDPLITE, J1939_EE_INFO_NONE, J1939_EE_INFO_TX_ABORT, - J1939_FILTER_MAX, J1939_IDLE_ADDR, J1939_MAX_UNICAST_ADDR, - J1939_NLA_BYTES_ACKED, J1939_NLA_PAD, J1939_NO_ADDR, J1939_NO_NAME, - J1939_NO_PGN, J1939_PGN_ADDRESS_CLAIMED, J1939_PGN_ADDRESS_COMMANDED, - J1939_PGN_MAX, J1939_PGN_PDU1_MAX, J1939_PGN_REQUEST, - SCM_J1939_DEST_ADDR, SCM_J1939_DEST_NAME, SCM_J1939_ERRQUEUE, - SCM_J1939_PRIO, SO_J1939_ERRQUEUE, SO_J1939_FILTER, SO_J1939_PROMISC, - SO_J1939_SEND_PRIO, UDPLITE_RECV_CSCOV, UDPLITE_SEND_CSCOV, IP_RECVTOS, - TCP_KEEPALIVE, SO_INCOMING_CPU, FD_ACCEPT, FD_CLOSE, FD_CLOSE_BIT, - FD_CONNECT, FD_CONNECT_BIT, FD_READ, FD_WRITE, INFINITE, - WSA_FLAG_OVERLAPPED, WSA_INVALID_HANDLE, WSA_INVALID_PARAMETER, - WSA_IO_INCOMPLETE, WSA_IO_PENDING, WSA_NOT_ENOUGH_MEMORY, - WSA_OPERATION_ABORTED, WSA_WAIT_FAILED, WSA_WAIT_TIMEOUT, - ETHERTYPE_ARP, ETHERTYPE_IP, ETHERTYPE_IPV6, ETHERTYPE_VLAN, ETH_P_ALL, - IP_ADD_SOURCE_MEMBERSHIP, IP_BLOCK_SOURCE, IP_DROP_SOURCE_MEMBERSHIP, - IP_PKTINFO, IP_UNBLOCK_SOURCE, TCP_CC_INFO, TCP_FASTOPEN_CONNECT, - TCP_FASTOPEN_KEY, TCP_FASTOPEN_NO_COOKIE, TCP_INQ, TCP_MD5SIG, - TCP_MD5SIG_EXT, TCP_QUEUE_SEQ, TCP_REPAIR, TCP_REPAIR_OPTIONS, - TCP_REPAIR_QUEUE, TCP_REPAIR_WINDOW, TCP_SAVED_SYN, TCP_SAVE_SYN, - TCP_THIN_DUPACK, TCP_THIN_LINEAR_TIMEOUTS, TCP_TIMESTAMP, TCP_TX_DELAY, - TCP_ULP, TCP_ZEROCOPY_RECEIVE, - ) - # fmt: on -except ImportError: - pass - # Dynamically re-export whatever constants this particular Python happens to # have: import socket as _stdlib_socket @@ -157,48 +34,53 @@ # import the overwrites from ._socket import ( - fromfd, - from_stdlib_socket, - getprotobyname, - socketpair, - getnameinfo, - socket, - getaddrinfo, - set_custom_hostname_resolver, - set_custom_socket_factory, - SocketType, + fromfd as fromfd, + from_stdlib_socket as from_stdlib_socket, + getprotobyname as getprotobyname, + socketpair as socketpair, + getnameinfo as getnameinfo, + socket as socket, + getaddrinfo as getaddrinfo, + set_custom_hostname_resolver as set_custom_hostname_resolver, + set_custom_socket_factory as set_custom_socket_factory, + SocketType as SocketType, ) # not always available so expose only if if sys.platform == "win32" or not _t.TYPE_CHECKING: try: - from ._socket import fromshare + from ._socket import fromshare as fromshare except ImportError: pass # expose these functions to trio.socket from socket import ( - gaierror, - herror, - gethostname, - ntohs, - htonl, - htons, - inet_aton, - inet_ntoa, - inet_pton, - inet_ntop, + gaierror as gaierror, + herror as herror, + gethostname as gethostname, + ntohs as ntohs, + htonl as htonl, + htons as htons, + inet_aton as inet_aton, + inet_ntoa as inet_ntoa, + inet_pton as inet_pton, + inet_ntop as inet_ntop, ) # not always available so expose only if if sys.platform != "win32" or not _t.TYPE_CHECKING: try: - from socket import sethostname, if_nameindex, if_nametoindex, if_indextoname + from socket import ( + sethostname as sethostname, + if_nameindex as if_nameindex, + if_nametoindex as if_nametoindex, + if_indextoname as if_indextoname, + ) except ImportError: pass # get names used by Trio that we define on our own -from ._socket import IPPROTO_IPV6 +from ._socket import IPPROTO_IPV6 as IPPROTO_IPV6 if _t.TYPE_CHECKING: IP_BIND_ADDRESS_NO_PORT: int @@ -210,3 +92,477 @@ IP_BIND_ADDRESS_NO_PORT = 24 del sys + + +# The socket module exports a bunch of platform-specific constants. We want to +# re-export them. Since the exact set of constants varies depending on Python +# version, platform, the libc installed on the system where Python was built, +# etc., we figure out which constants to re-export dynamically at runtime (see +# below). But that confuses static analysis tools like jedi and mypy. So this +# import statement statically lists every constant that *could* be +# exported. There's a test in test_exports.py to make sure that the list is +# kept up to date. +if _t.TYPE_CHECKING: + from socket import ( # type: ignore[attr-defined] + CMSG_LEN as CMSG_LEN, + CMSG_SPACE as CMSG_SPACE, + CAPI as CAPI, + AF_UNSPEC as AF_UNSPEC, + AF_INET as AF_INET, + AF_UNIX as AF_UNIX, + AF_IPX as AF_IPX, + AF_APPLETALK as AF_APPLETALK, + AF_INET6 as AF_INET6, + AF_ROUTE as AF_ROUTE, + AF_LINK as AF_LINK, + AF_SNA as AF_SNA, + PF_SYSTEM as PF_SYSTEM, + AF_SYSTEM as AF_SYSTEM, + SOCK_STREAM as SOCK_STREAM, + SOCK_DGRAM as SOCK_DGRAM, + SOCK_RAW as SOCK_RAW, + SOCK_SEQPACKET as SOCK_SEQPACKET, + SOCK_RDM as SOCK_RDM, + SO_DEBUG as SO_DEBUG, + SO_ACCEPTCONN as SO_ACCEPTCONN, + SO_REUSEADDR as SO_REUSEADDR, + SO_KEEPALIVE as SO_KEEPALIVE, + SO_DONTROUTE as SO_DONTROUTE, + SO_BROADCAST as SO_BROADCAST, + SO_USELOOPBACK as SO_USELOOPBACK, + SO_LINGER as SO_LINGER, + SO_OOBINLINE as SO_OOBINLINE, + SO_REUSEPORT as SO_REUSEPORT, + SO_SNDBUF as SO_SNDBUF, + SO_RCVBUF as SO_RCVBUF, + SO_SNDLOWAT as SO_SNDLOWAT, + SO_RCVLOWAT as SO_RCVLOWAT, + SO_SNDTIMEO as SO_SNDTIMEO, + SO_RCVTIMEO as SO_RCVTIMEO, + SO_ERROR as SO_ERROR, + SO_TYPE as SO_TYPE, + LOCAL_PEERCRED as LOCAL_PEERCRED, + SOMAXCONN as SOMAXCONN, + SCM_RIGHTS as SCM_RIGHTS, + SCM_CREDS as SCM_CREDS, + MSG_OOB as MSG_OOB, + MSG_PEEK as MSG_PEEK, + MSG_DONTROUTE as MSG_DONTROUTE, + MSG_DONTWAIT as MSG_DONTWAIT, + MSG_EOR as MSG_EOR, + MSG_TRUNC as MSG_TRUNC, + MSG_CTRUNC as MSG_CTRUNC, + MSG_WAITALL as MSG_WAITALL, + MSG_EOF as MSG_EOF, + SOL_SOCKET as SOL_SOCKET, + SOL_IP as SOL_IP, + SOL_TCP as SOL_TCP, + SOL_UDP as SOL_UDP, + IPPROTO_IP as IPPROTO_IP, + IPPROTO_HOPOPTS as IPPROTO_HOPOPTS, + IPPROTO_ICMP as IPPROTO_ICMP, + IPPROTO_IGMP as IPPROTO_IGMP, + IPPROTO_GGP as IPPROTO_GGP, + IPPROTO_IPV4 as IPPROTO_IPV4, + IPPROTO_IPIP as IPPROTO_IPIP, + IPPROTO_TCP as IPPROTO_TCP, + IPPROTO_EGP as IPPROTO_EGP, + IPPROTO_PUP as IPPROTO_PUP, + IPPROTO_UDP as IPPROTO_UDP, + IPPROTO_IDP as IPPROTO_IDP, + IPPROTO_HELLO as IPPROTO_HELLO, + IPPROTO_ND as IPPROTO_ND, + IPPROTO_TP as IPPROTO_TP, + IPPROTO_ROUTING as IPPROTO_ROUTING, + IPPROTO_FRAGMENT as IPPROTO_FRAGMENT, + IPPROTO_RSVP as IPPROTO_RSVP, + IPPROTO_GRE as IPPROTO_GRE, + IPPROTO_ESP as IPPROTO_ESP, + IPPROTO_AH as IPPROTO_AH, + IPPROTO_ICMPV6 as IPPROTO_ICMPV6, + IPPROTO_NONE as IPPROTO_NONE, + IPPROTO_DSTOPTS as IPPROTO_DSTOPTS, + IPPROTO_XTP as IPPROTO_XTP, + IPPROTO_EON as IPPROTO_EON, + IPPROTO_PIM as IPPROTO_PIM, + IPPROTO_IPCOMP as IPPROTO_IPCOMP, + IPPROTO_SCTP as IPPROTO_SCTP, + IPPROTO_RAW as IPPROTO_RAW, + IPPROTO_MAX as IPPROTO_MAX, + IPPROTO_MPTCP as IPPROTO_MPTCP, + SYSPROTO_CONTROL as SYSPROTO_CONTROL, + IPPORT_RESERVED as IPPORT_RESERVED, + IPPORT_USERRESERVED as IPPORT_USERRESERVED, + INADDR_ANY as INADDR_ANY, + INADDR_BROADCAST as INADDR_BROADCAST, + INADDR_LOOPBACK as INADDR_LOOPBACK, + INADDR_UNSPEC_GROUP as INADDR_UNSPEC_GROUP, + INADDR_ALLHOSTS_GROUP as INADDR_ALLHOSTS_GROUP, + INADDR_MAX_LOCAL_GROUP as INADDR_MAX_LOCAL_GROUP, + INADDR_NONE as INADDR_NONE, + IP_OPTIONS as IP_OPTIONS, + IP_HDRINCL as IP_HDRINCL, + IP_TOS as IP_TOS, + IP_TTL as IP_TTL, + IP_RECVOPTS as IP_RECVOPTS, + IP_RECVRETOPTS as IP_RECVRETOPTS, + IP_RECVDSTADDR as IP_RECVDSTADDR, + IP_RETOPTS as IP_RETOPTS, + IP_MULTICAST_IF as IP_MULTICAST_IF, + IP_MULTICAST_TTL as IP_MULTICAST_TTL, + IP_MULTICAST_LOOP as IP_MULTICAST_LOOP, + IP_ADD_MEMBERSHIP as IP_ADD_MEMBERSHIP, + IP_DROP_MEMBERSHIP as IP_DROP_MEMBERSHIP, + IP_DEFAULT_MULTICAST_TTL as IP_DEFAULT_MULTICAST_TTL, + IP_DEFAULT_MULTICAST_LOOP as IP_DEFAULT_MULTICAST_LOOP, + IP_MAX_MEMBERSHIPS as IP_MAX_MEMBERSHIPS, + IPV6_JOIN_GROUP as IPV6_JOIN_GROUP, + IPV6_LEAVE_GROUP as IPV6_LEAVE_GROUP, + IPV6_MULTICAST_HOPS as IPV6_MULTICAST_HOPS, + IPV6_MULTICAST_IF as IPV6_MULTICAST_IF, + IPV6_MULTICAST_LOOP as IPV6_MULTICAST_LOOP, + IPV6_UNICAST_HOPS as IPV6_UNICAST_HOPS, + IPV6_V6ONLY as IPV6_V6ONLY, + IPV6_CHECKSUM as IPV6_CHECKSUM, + IPV6_RECVTCLASS as IPV6_RECVTCLASS, + IPV6_RTHDR_TYPE_0 as IPV6_RTHDR_TYPE_0, + IPV6_TCLASS as IPV6_TCLASS, + TCP_NODELAY as TCP_NODELAY, + TCP_MAXSEG as TCP_MAXSEG, + TCP_KEEPINTVL as TCP_KEEPINTVL, + TCP_KEEPCNT as TCP_KEEPCNT, + TCP_FASTOPEN as TCP_FASTOPEN, + TCP_NOTSENT_LOWAT as TCP_NOTSENT_LOWAT, + EAI_ADDRFAMILY as EAI_ADDRFAMILY, + EAI_AGAIN as EAI_AGAIN, + EAI_BADFLAGS as EAI_BADFLAGS, + EAI_FAIL as EAI_FAIL, + EAI_FAMILY as EAI_FAMILY, + EAI_MEMORY as EAI_MEMORY, + EAI_NODATA as EAI_NODATA, + EAI_NONAME as EAI_NONAME, + EAI_OVERFLOW as EAI_OVERFLOW, + EAI_SERVICE as EAI_SERVICE, + EAI_SOCKTYPE as EAI_SOCKTYPE, + EAI_SYSTEM as EAI_SYSTEM, + EAI_BADHINTS as EAI_BADHINTS, + EAI_PROTOCOL as EAI_PROTOCOL, + EAI_MAX as EAI_MAX, + AI_PASSIVE as AI_PASSIVE, + AI_CANONNAME as AI_CANONNAME, + AI_NUMERICHOST as AI_NUMERICHOST, + AI_NUMERICSERV as AI_NUMERICSERV, + AI_MASK as AI_MASK, + AI_ALL as AI_ALL, + AI_V4MAPPED_CFG as AI_V4MAPPED_CFG, + AI_ADDRCONFIG as AI_ADDRCONFIG, + AI_V4MAPPED as AI_V4MAPPED, + AI_DEFAULT as AI_DEFAULT, + NI_MAXHOST as NI_MAXHOST, + NI_MAXSERV as NI_MAXSERV, + NI_NOFQDN as NI_NOFQDN, + NI_NUMERICHOST as NI_NUMERICHOST, + NI_NAMEREQD as NI_NAMEREQD, + NI_NUMERICSERV as NI_NUMERICSERV, + NI_DGRAM as NI_DGRAM, + SHUT_RD as SHUT_RD, + SHUT_WR as SHUT_WR, + SHUT_RDWR as SHUT_RDWR, + EBADF as EBADF, + EAGAIN as EAGAIN, + EWOULDBLOCK as EWOULDBLOCK, + AF_ASH as AF_ASH, + AF_ATMPVC as AF_ATMPVC, + AF_ATMSVC as AF_ATMSVC, + AF_AX25 as AF_AX25, + AF_BLUETOOTH as AF_BLUETOOTH, + AF_BRIDGE as AF_BRIDGE, + AF_ECONET as AF_ECONET, + AF_IRDA as AF_IRDA, + AF_KEY as AF_KEY, + AF_LLC as AF_LLC, + AF_NETBEUI as AF_NETBEUI, + AF_NETLINK as AF_NETLINK, + AF_NETROM as AF_NETROM, + AF_PACKET as AF_PACKET, + AF_PPPOX as AF_PPPOX, + AF_ROSE as AF_ROSE, + AF_SECURITY as AF_SECURITY, + AF_WANPIPE as AF_WANPIPE, + AF_X25 as AF_X25, + BDADDR_ANY as BDADDR_ANY, + BDADDR_LOCAL as BDADDR_LOCAL, + FD_SETSIZE as FD_SETSIZE, + IPV6_DSTOPTS as IPV6_DSTOPTS, + IPV6_HOPLIMIT as IPV6_HOPLIMIT, + IPV6_HOPOPTS as IPV6_HOPOPTS, + IPV6_NEXTHOP as IPV6_NEXTHOP, + IPV6_PKTINFO as IPV6_PKTINFO, + IPV6_RECVDSTOPTS as IPV6_RECVDSTOPTS, + IPV6_RECVHOPLIMIT as IPV6_RECVHOPLIMIT, + IPV6_RECVHOPOPTS as IPV6_RECVHOPOPTS, + IPV6_RECVPKTINFO as IPV6_RECVPKTINFO, + IPV6_RECVRTHDR as IPV6_RECVRTHDR, + IPV6_RTHDR as IPV6_RTHDR, + IPV6_RTHDRDSTOPTS as IPV6_RTHDRDSTOPTS, + MSG_ERRQUEUE as MSG_ERRQUEUE, + NETLINK_DNRTMSG as NETLINK_DNRTMSG, + NETLINK_FIREWALL as NETLINK_FIREWALL, + NETLINK_IP6_FW as NETLINK_IP6_FW, + NETLINK_NFLOG as NETLINK_NFLOG, + NETLINK_ROUTE as NETLINK_ROUTE, + NETLINK_USERSOCK as NETLINK_USERSOCK, + NETLINK_XFRM as NETLINK_XFRM, + PACKET_BROADCAST as PACKET_BROADCAST, + PACKET_FASTROUTE as PACKET_FASTROUTE, + PACKET_HOST as PACKET_HOST, + PACKET_LOOPBACK as PACKET_LOOPBACK, + PACKET_MULTICAST as PACKET_MULTICAST, + PACKET_OTHERHOST as PACKET_OTHERHOST, + PACKET_OUTGOING as PACKET_OUTGOING, + POLLERR as POLLERR, + POLLHUP as POLLHUP, + POLLIN as POLLIN, + POLLMSG as POLLMSG, + POLLNVAL as POLLNVAL, + POLLOUT as POLLOUT, + POLLPRI as POLLPRI, + POLLRDBAND as POLLRDBAND, + POLLRDNORM as POLLRDNORM, + POLLWRNORM as POLLWRNORM, + SIOCGIFINDEX as SIOCGIFINDEX, + SIOCGIFNAME as SIOCGIFNAME, + SOCK_CLOEXEC as SOCK_CLOEXEC, + TCP_CORK as TCP_CORK, + TCP_DEFER_ACCEPT as TCP_DEFER_ACCEPT, + TCP_INFO as TCP_INFO, + TCP_KEEPIDLE as TCP_KEEPIDLE, + TCP_LINGER2 as TCP_LINGER2, + TCP_QUICKACK as TCP_QUICKACK, + TCP_SYNCNT as TCP_SYNCNT, + TCP_WINDOW_CLAMP as TCP_WINDOW_CLAMP, + AF_ALG as AF_ALG, + AF_CAN as AF_CAN, + AF_RDS as AF_RDS, + AF_TIPC as AF_TIPC, + AF_VSOCK as AF_VSOCK, + ALG_OP_DECRYPT as ALG_OP_DECRYPT, + ALG_OP_ENCRYPT as ALG_OP_ENCRYPT, + ALG_OP_SIGN as ALG_OP_SIGN, + ALG_OP_VERIFY as ALG_OP_VERIFY, + ALG_SET_AEAD_ASSOCLEN as ALG_SET_AEAD_ASSOCLEN, + ALG_SET_AEAD_AUTHSIZE as ALG_SET_AEAD_AUTHSIZE, + ALG_SET_IV as ALG_SET_IV, + ALG_SET_KEY as ALG_SET_KEY, + ALG_SET_OP as ALG_SET_OP, + ALG_SET_PUBKEY as ALG_SET_PUBKEY, + CAN_BCM as CAN_BCM, + CAN_BCM_RX_CHANGED as CAN_BCM_RX_CHANGED, + CAN_BCM_RX_DELETE as CAN_BCM_RX_DELETE, + CAN_BCM_RX_READ as CAN_BCM_RX_READ, + CAN_BCM_RX_SETUP as CAN_BCM_RX_SETUP, + CAN_BCM_RX_STATUS as CAN_BCM_RX_STATUS, + CAN_BCM_RX_TIMEOUT as CAN_BCM_RX_TIMEOUT, + CAN_BCM_TX_DELETE as CAN_BCM_TX_DELETE, + CAN_BCM_TX_EXPIRED as CAN_BCM_TX_EXPIRED, + CAN_BCM_TX_READ as CAN_BCM_TX_READ, + CAN_BCM_TX_SEND as CAN_BCM_TX_SEND, + CAN_BCM_TX_SETUP as CAN_BCM_TX_SETUP, + CAN_BCM_TX_STATUS as CAN_BCM_TX_STATUS, + CAN_EFF_FLAG as CAN_EFF_FLAG, + CAN_EFF_MASK as CAN_EFF_MASK, + CAN_ERR_FLAG as CAN_ERR_FLAG, + CAN_ERR_MASK as CAN_ERR_MASK, + CAN_ISOTP as CAN_ISOTP, + CAN_RAW as CAN_RAW, + CAN_RAW_ERR_FILTER as CAN_RAW_ERR_FILTER, + CAN_RAW_FD_FRAMES as CAN_RAW_FD_FRAMES, + CAN_RAW_FILTER as CAN_RAW_FILTER, + CAN_RAW_LOOPBACK as CAN_RAW_LOOPBACK, + CAN_RAW_RECV_OWN_MSGS as CAN_RAW_RECV_OWN_MSGS, + CAN_RTR_FLAG as CAN_RTR_FLAG, + CAN_SFF_MASK as CAN_SFF_MASK, + IOCTL_VM_SOCKETS_GET_LOCAL_CID as IOCTL_VM_SOCKETS_GET_LOCAL_CID, + IPV6_DONTFRAG as IPV6_DONTFRAG, + IPV6_PATHMTU as IPV6_PATHMTU, + IPV6_RECVPATHMTU as IPV6_RECVPATHMTU, + IP_TRANSPARENT as IP_TRANSPARENT, + MSG_CMSG_CLOEXEC as MSG_CMSG_CLOEXEC, + MSG_CONFIRM as MSG_CONFIRM, + MSG_FASTOPEN as MSG_FASTOPEN, + MSG_MORE as MSG_MORE, + MSG_NOSIGNAL as MSG_NOSIGNAL, + NETLINK_CRYPTO as NETLINK_CRYPTO, + PF_CAN as PF_CAN, + PF_PACKET as PF_PACKET, + PF_RDS as PF_RDS, + SCM_CREDENTIALS as SCM_CREDENTIALS, + SOCK_NONBLOCK as SOCK_NONBLOCK, + SOL_ALG as SOL_ALG, + SOL_CAN_BASE as SOL_CAN_BASE, + SOL_CAN_RAW as SOL_CAN_RAW, + SOL_TIPC as SOL_TIPC, + SO_BINDTODEVICE as SO_BINDTODEVICE, + SO_DOMAIN as SO_DOMAIN, + SO_MARK as SO_MARK, + SO_PASSCRED as SO_PASSCRED, + SO_PASSSEC as SO_PASSSEC, + SO_PEERCRED as SO_PEERCRED, + SO_PEERSEC as SO_PEERSEC, + SO_PRIORITY as SO_PRIORITY, + SO_PROTOCOL as SO_PROTOCOL, + SO_VM_SOCKETS_BUFFER_MAX_SIZE as SO_VM_SOCKETS_BUFFER_MAX_SIZE, + SO_VM_SOCKETS_BUFFER_MIN_SIZE as SO_VM_SOCKETS_BUFFER_MIN_SIZE, + SO_VM_SOCKETS_BUFFER_SIZE as SO_VM_SOCKETS_BUFFER_SIZE, + TCP_CONGESTION as TCP_CONGESTION, + TCP_USER_TIMEOUT as TCP_USER_TIMEOUT, + TIPC_ADDR_ID as TIPC_ADDR_ID, + TIPC_ADDR_NAME as TIPC_ADDR_NAME, + TIPC_ADDR_NAMESEQ as TIPC_ADDR_NAMESEQ, + TIPC_CFG_SRV as TIPC_CFG_SRV, + TIPC_CLUSTER_SCOPE as TIPC_CLUSTER_SCOPE, + TIPC_CONN_TIMEOUT as TIPC_CONN_TIMEOUT, + TIPC_CRITICAL_IMPORTANCE as TIPC_CRITICAL_IMPORTANCE, + TIPC_DEST_DROPPABLE as TIPC_DEST_DROPPABLE, + TIPC_HIGH_IMPORTANCE as TIPC_HIGH_IMPORTANCE, + TIPC_IMPORTANCE as TIPC_IMPORTANCE, + TIPC_LOW_IMPORTANCE as TIPC_LOW_IMPORTANCE, + TIPC_MEDIUM_IMPORTANCE as TIPC_MEDIUM_IMPORTANCE, + TIPC_NODE_SCOPE as TIPC_NODE_SCOPE, + TIPC_PUBLISHED as TIPC_PUBLISHED, + TIPC_SRC_DROPPABLE as TIPC_SRC_DROPPABLE, + TIPC_SUBSCR_TIMEOUT as TIPC_SUBSCR_TIMEOUT, + TIPC_SUB_CANCEL as TIPC_SUB_CANCEL, + TIPC_SUB_PORTS as TIPC_SUB_PORTS, + TIPC_SUB_SERVICE as TIPC_SUB_SERVICE, + TIPC_TOP_SRV as TIPC_TOP_SRV, + TIPC_WAIT_FOREVER as TIPC_WAIT_FOREVER, + TIPC_WITHDRAWN as TIPC_WITHDRAWN, + TIPC_ZONE_SCOPE as TIPC_ZONE_SCOPE, + VMADDR_CID_ANY as VMADDR_CID_ANY, + VMADDR_CID_HOST as VMADDR_CID_HOST, + VMADDR_PORT_ANY as VMADDR_PORT_ANY, + VM_SOCKETS_INVALID_VERSION as VM_SOCKETS_INVALID_VERSION, + MSG_BCAST as MSG_BCAST, + MSG_MCAST as MSG_MCAST, + RCVALL_MAX as RCVALL_MAX, + RCVALL_OFF as RCVALL_OFF, + RCVALL_ON as RCVALL_ON, + RCVALL_SOCKETLEVELONLY as RCVALL_SOCKETLEVELONLY, + SIO_KEEPALIVE_VALS as SIO_KEEPALIVE_VALS, + SIO_LOOPBACK_FAST_PATH as SIO_LOOPBACK_FAST_PATH, + SIO_RCVALL as SIO_RCVALL, + SO_EXCLUSIVEADDRUSE as SO_EXCLUSIVEADDRUSE, + HCI_FILTER as HCI_FILTER, + BTPROTO_SCO as BTPROTO_SCO, + BTPROTO_HCI as BTPROTO_HCI, + HCI_TIME_STAMP as HCI_TIME_STAMP, + SOL_RDS as SOL_RDS, + BTPROTO_L2CAP as BTPROTO_L2CAP, + BTPROTO_RFCOMM as BTPROTO_RFCOMM, + HCI_DATA_DIR as HCI_DATA_DIR, + SOL_HCI as SOL_HCI, + CAN_BCM_RX_ANNOUNCE_RESUME as CAN_BCM_RX_ANNOUNCE_RESUME, + CAN_BCM_RX_CHECK_DLC as CAN_BCM_RX_CHECK_DLC, + CAN_BCM_RX_FILTER_ID as CAN_BCM_RX_FILTER_ID, + CAN_BCM_RX_NO_AUTOTIMER as CAN_BCM_RX_NO_AUTOTIMER, + CAN_BCM_RX_RTR_FRAME as CAN_BCM_RX_RTR_FRAME, + CAN_BCM_SETTIMER as CAN_BCM_SETTIMER, + CAN_BCM_STARTTIMER as CAN_BCM_STARTTIMER, + CAN_BCM_TX_ANNOUNCE as CAN_BCM_TX_ANNOUNCE, + CAN_BCM_TX_COUNTEVT as CAN_BCM_TX_COUNTEVT, + CAN_BCM_TX_CP_CAN_ID as CAN_BCM_TX_CP_CAN_ID, + CAN_BCM_TX_RESET_MULTI_IDX as CAN_BCM_TX_RESET_MULTI_IDX, + IPPROTO_CBT as IPPROTO_CBT, + IPPROTO_ICLFXBM as IPPROTO_ICLFXBM, + IPPROTO_IGP as IPPROTO_IGP, + IPPROTO_L2TP as IPPROTO_L2TP, + IPPROTO_PGM as IPPROTO_PGM, + IPPROTO_RDP as IPPROTO_RDP, + IPPROTO_ST as IPPROTO_ST, + AF_QIPCRTR as AF_QIPCRTR, + CAN_BCM_CAN_FD_FRAME as CAN_BCM_CAN_FD_FRAME, + IPPROTO_MOBILE as IPPROTO_MOBILE, + IPV6_USE_MIN_MTU as IPV6_USE_MIN_MTU, + MSG_NOTIFICATION as MSG_NOTIFICATION, + SO_SETFIB as SO_SETFIB, + CAN_J1939 as CAN_J1939, + CAN_RAW_JOIN_FILTERS as CAN_RAW_JOIN_FILTERS, + IPPROTO_UDPLITE as IPPROTO_UDPLITE, + J1939_EE_INFO_NONE as J1939_EE_INFO_NONE, + J1939_EE_INFO_TX_ABORT as J1939_EE_INFO_TX_ABORT, + J1939_FILTER_MAX as J1939_FILTER_MAX, + J1939_IDLE_ADDR as J1939_IDLE_ADDR, + J1939_MAX_UNICAST_ADDR as J1939_MAX_UNICAST_ADDR, + J1939_NLA_BYTES_ACKED as J1939_NLA_BYTES_ACKED, + J1939_NLA_PAD as J1939_NLA_PAD, + J1939_NO_ADDR as J1939_NO_ADDR, + J1939_NO_NAME as J1939_NO_NAME, + J1939_NO_PGN as J1939_NO_PGN, + J1939_PGN_ADDRESS_CLAIMED as J1939_PGN_ADDRESS_CLAIMED, + J1939_PGN_ADDRESS_COMMANDED as J1939_PGN_ADDRESS_COMMANDED, + J1939_PGN_MAX as J1939_PGN_MAX, + J1939_PGN_PDU1_MAX as J1939_PGN_PDU1_MAX, + J1939_PGN_REQUEST as J1939_PGN_REQUEST, + SCM_J1939_DEST_ADDR as SCM_J1939_DEST_ADDR, + SCM_J1939_DEST_NAME as SCM_J1939_DEST_NAME, + SCM_J1939_ERRQUEUE as SCM_J1939_ERRQUEUE, + SCM_J1939_PRIO as SCM_J1939_PRIO, + SO_J1939_ERRQUEUE as SO_J1939_ERRQUEUE, + SO_J1939_FILTER as SO_J1939_FILTER, + SO_J1939_PROMISC as SO_J1939_PROMISC, + SO_J1939_SEND_PRIO as SO_J1939_SEND_PRIO, + UDPLITE_RECV_CSCOV as UDPLITE_RECV_CSCOV, + UDPLITE_SEND_CSCOV as UDPLITE_SEND_CSCOV, + IP_RECVTOS as IP_RECVTOS, + TCP_KEEPALIVE as TCP_KEEPALIVE, + SO_INCOMING_CPU as SO_INCOMING_CPU, + FD_ACCEPT as FD_ACCEPT, + FD_CLOSE as FD_CLOSE, + FD_CLOSE_BIT as FD_CLOSE_BIT, + FD_CONNECT as FD_CONNECT, + FD_CONNECT_BIT as FD_CONNECT_BIT, + FD_READ as FD_READ, + FD_WRITE as FD_WRITE, + INFINITE as INFINITE, + WSA_FLAG_OVERLAPPED as WSA_FLAG_OVERLAPPED, + WSA_INVALID_HANDLE as WSA_INVALID_HANDLE, + WSA_INVALID_PARAMETER as WSA_INVALID_PARAMETER, + WSA_IO_INCOMPLETE as WSA_IO_INCOMPLETE, + WSA_IO_PENDING as WSA_IO_PENDING, + WSA_NOT_ENOUGH_MEMORY as WSA_NOT_ENOUGH_MEMORY, + WSA_OPERATION_ABORTED as WSA_OPERATION_ABORTED, + WSA_WAIT_FAILED as WSA_WAIT_FAILED, + WSA_WAIT_TIMEOUT as WSA_WAIT_TIMEOUT, + # python 3.12 + ETHERTYPE_ARP as ETHERTYPE_ARP, + ETHERTYPE_IP as ETHERTYPE_IP, + ETHERTYPE_IPV6 as ETHERTYPE_IPV6, + ETHERTYPE_VLAN as ETHERTYPE_VLAN, + ETH_P_ALL as ETH_P_ALL, + IP_ADD_SOURCE_MEMBERSHIP as IP_ADD_SOURCE_MEMBERSHIP, + IP_BLOCK_SOURCE as IP_BLOCK_SOURCE, + IP_DROP_SOURCE_MEMBERSHIP as IP_DROP_SOURCE_MEMBERSHIP, + IP_PKTINFO as IP_PKTINFO, + IP_UNBLOCK_SOURCE as IP_UNBLOCK_SOURCE, + TCP_CC_INFO as TCP_CC_INFO, + TCP_FASTOPEN_CONNECT as TCP_FASTOPEN_CONNECT, + TCP_FASTOPEN_KEY as TCP_FASTOPEN_KEY, + TCP_FASTOPEN_NO_COOKIE as TCP_FASTOPEN_NO_COOKIE, + TCP_INQ as TCP_INQ, + TCP_MD5SIG as TCP_MD5SIG, + TCP_MD5SIG_EXT as TCP_MD5SIG_EXT, + TCP_QUEUE_SEQ as TCP_QUEUE_SEQ, + TCP_REPAIR as TCP_REPAIR, + TCP_REPAIR_OPTIONS as TCP_REPAIR_OPTIONS, + TCP_REPAIR_QUEUE as TCP_REPAIR_QUEUE, + TCP_REPAIR_WINDOW as TCP_REPAIR_WINDOW, + TCP_SAVED_SYN as TCP_SAVED_SYN, + TCP_SAVE_SYN as TCP_SAVE_SYN, + TCP_THIN_DUPACK as TCP_THIN_DUPACK, + TCP_THIN_LINEAR_TIMEOUTS as TCP_THIN_LINEAR_TIMEOUTS, + TCP_TIMESTAMP as TCP_TIMESTAMP, + TCP_TX_DELAY as TCP_TX_DELAY, + TCP_ULP as TCP_ULP, + TCP_ZEROCOPY_RECEIVE as TCP_ZEROCOPY_RECEIVE, + ) diff --git a/trio/testing/__init__.py b/trio/testing/__init__.py index aa15c4743e..202c501483 100644 --- a/trio/testing/__init__.py +++ b/trio/testing/__init__.py @@ -1,28 +1,36 @@ -from .._core import wait_all_tasks_blocked, MockClock +# Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) -from ._trio_test import trio_test +from .._core import ( + wait_all_tasks_blocked as wait_all_tasks_blocked, + MockClock as MockClock, +) + +from ._trio_test import trio_test as trio_test -from ._checkpoints import assert_checkpoints, assert_no_checkpoints +from ._checkpoints import ( + assert_checkpoints as assert_checkpoints, + assert_no_checkpoints as assert_no_checkpoints, +) -from ._sequencer import Sequencer +from ._sequencer import Sequencer as Sequencer from ._check_streams import ( - check_one_way_stream, - check_two_way_stream, - check_half_closeable_stream, + check_one_way_stream as check_one_way_stream, + check_two_way_stream as check_two_way_stream, + check_half_closeable_stream as check_half_closeable_stream, ) from ._memory_streams import ( - MemorySendStream, - MemoryReceiveStream, - memory_stream_pump, - memory_stream_one_way_pair, - memory_stream_pair, - lockstep_stream_one_way_pair, - lockstep_stream_pair, + MemorySendStream as MemorySendStream, + MemoryReceiveStream as MemoryReceiveStream, + memory_stream_pump as memory_stream_pump, + memory_stream_one_way_pair as memory_stream_one_way_pair, + memory_stream_pair as memory_stream_pair, + lockstep_stream_one_way_pair as lockstep_stream_one_way_pair, + lockstep_stream_pair as lockstep_stream_pair, ) -from ._network import open_stream_to_socket_listener +from ._network import open_stream_to_socket_listener as open_stream_to_socket_listener ################################################################ diff --git a/trio/to_thread.py b/trio/to_thread.py index 6eec7b36c7..f2b5ec659e 100644 --- a/trio/to_thread.py +++ b/trio/to_thread.py @@ -1,2 +1,5 @@ from ._threads import to_thread_run_sync as run_sync from ._threads import current_default_thread_limiter + +# need to use __all__ for pyright --verifytypes to see re-exports when renaming them +__all__ = ["current_default_thread_limiter", "run_sync"] From 00c058b829acb844f23b3f23a533ec035790d9fa Mon Sep 17 00:00:00 2001 From: A5rocks Date: Wed, 21 Jun 2023 05:25:59 +0900 Subject: [PATCH 065/162] Make the mypy-based tests faster! (mostly) --- trio/_tests/test_exports.py | 289 +++++++++++++++++++++--------------- 1 file changed, 173 insertions(+), 116 deletions(-) diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index 842b1313cf..fd69a9d0d5 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -1,11 +1,14 @@ import enum +import functools import importlib import inspect +import json import socket as stdlib_socket import sys from pathlib import Path from types import ModuleType +import attrs import pytest import trio @@ -106,28 +109,35 @@ def no_underscores(symbols): if sys.implementation.name != "cpython": pytest.skip("mypy not installed in tests on pypy") - # mypy behaves strangely when passed a huge semicolon-separated line with `-c` - # so we use a tmpfile - tmpfile = tmpdir / "check_mypy.py" - tmpfile.write_text( - f"import {modname}\n" - + "".join(f"{modname}.{name}\n" for name in runtime_names), - encoding="utf8", - ) + cache = Path(tmpdir / "cache") + cache.mkdir() from mypy.api import run - mypy_res = run(["--config-file=", "--follow-imports=silent", str(tmpfile)]) - - # check that there were no errors (exit code 0), otherwise print the errors - assert mypy_res[2] == 0, mypy_res[0] + # pollute CWD with `.mypy_cache`? TODO think about it + run(["--config-file=", f"--cache-dir={cache}", "-c", f"import {modname}"]) + + trio_cache = next(cache.glob("*/trio")) + _, modname = (modname + ".").split(".", 1) + modname = modname[:-1] + mod_cache = trio_cache / modname if modname else trio_cache + if mod_cache.is_dir(): + mod_cache = mod_cache / "__init__.data.json" + else: + mod_cache = trio_cache / (modname + ".data.json") + + assert mod_cache.exists() and mod_cache.is_file() + with mod_cache.open() as cache_file: + cache_json = json.loads(cache_file.read()) + static_names = no_underscores( + key + for key, value in cache_json["names"].items() + if not key.startswith(".") and value["kind"] == "Gdef" + ) elif tool == "pyright_verifytypes": if not RUN_SLOW: # pragma: no cover pytest.skip("use --run-slow to check against mypy") import subprocess - import json - # uses `--verbose` to also get symbols without errors - # `--verbose` and `--outputjson` are incompatible, so we do string parsing res = subprocess.run( ["pyright", f"--verifytypes={modname}", "--outputjson"], capture_output=True, @@ -140,7 +150,7 @@ def no_underscores(symbols): if x["name"].startswith(modname) } - # pytest ignores the symbol defined behind `if False` + # pyright ignores the symbol defined behind `if False` if modname == "trio": static_names.add("testing") @@ -179,7 +189,7 @@ def no_underscores(symbols): @slow # see comment on test_static_tool_sees_all_symbols @pytest.mark.redistributors_should_skip -# pylint/jedi often have trouble with alpha releases, where Python's internals +# jedi/mypy often have trouble with alpha releases, where Python's internals # are in flux, grammar may not have settled down, etc. @pytest.mark.skipif( sys.version_info.releaselevel == "alpha", @@ -209,6 +219,48 @@ def no_hidden(symbols): if not py_typed_exists: # pragma: no branch py_typed_path.write_text("") + cache = Path(tmpdir / "cache") + cache.mkdir() + from mypy.api import run + + # pollute CWD with `.mypy_cache`? TODO think about it + run(["--config-file=", f"--cache-dir={cache}", "-c", f"import {module_name}"]) + + trio_cache = next(cache.glob("*/trio")) + modname = module_name + _, modname = (modname + ".").split(".", 1) + modname = modname[:-1] + mod_cache = trio_cache / modname if modname else trio_cache + if mod_cache.is_dir(): + mod_cache = mod_cache / "__init__.data.json" + else: + mod_cache = trio_cache / (modname + ".data.json") + + assert mod_cache.exists() and mod_cache.is_file() + with mod_cache.open() as cache_file: + cache_json = json.loads(cache_file.read()) + + # skip a bunch of file-system activity (probably can un-memoize?) + @functools.lru_cache() + def lookup_symbol(symbol): + topname, *modname, name = symbol.split(".") + version = next(cache.glob("*.*/")) + mod_cache = version / topname + if not mod_cache.is_dir(): + mod_cache = version / (topname + ".data.json") + + if modname: + for piece in modname[:-1]: + mod_cache /= piece + next_cache = mod_cache / modname[-1] + if next_cache.is_dir(): + mod_cache = next_cache / "__init__.data.json" + else: + mod_cache = mod_cache / (modname[-1] + ".data.json") + + with mod_cache.open() as f: + return json.loads(f.read())["names"][name] + errors: dict[str, object] = {} for class_name, class_ in module.__dict__.items(): if not isinstance(class_, type): @@ -264,111 +316,116 @@ def no_hidden(symbols): completions = script.complete() static_names = no_hidden(c.name for c in completions) - ignore_names - missing = runtime_names - static_names - extra = static_names - runtime_names - - # using .remove() instead of .delete() to get an error in case they start not - # being missing - - if BaseException in class_.__mro__ and sys.version_info > (3, 11): - missing.remove("add_note") - - # TODO: why is this? Is it a problem? - # see https://github.com/python-trio/trio/pull/2631#discussion_r1185615916 - if class_ == trio.StapledStream: - extra.remove("receive_stream") - extra.remove("send_stream") - - # I have not researched why these are missing, should maybe create an issue - # upstream with jedi - if sys.version_info >= (3, 12): - if class_ in ( - trio.DTLSChannel, - trio.MemoryReceiveChannel, - trio.MemorySendChannel, - trio.SSLListener, - trio.SocketListener, - ): - missing.remove("__aenter__") - missing.remove("__aexit__") - if class_ in (trio.DTLSChannel, trio.MemoryReceiveChannel): - missing.remove("__aiter__") - missing.remove("__anext__") - - # intentionally hidden behind type guard - if class_ == trio.Path: - missing.remove("__getattr__") - - if missing or extra: # pragma: no cover - errors[f"{module_name}.{class_name}"] = { - "missing": missing, - "extra": extra, - } elif tool == "mypy": - tmpfile = tmpdir / "check_mypy.py" - sorted_runtime_names = sorted(runtime_names) - content = f"from {module_name} import {class_name}\n" + "".join( - f"{class_name}.{name}\n" for name in sorted_runtime_names - ) - tmpfile.write_text(content, encoding="utf8") - from mypy.api import run - - res = run( - [ - "--config-file=", - "--follow-imports=silent", - "--disable-error-code=operator", - "--soft-error-limit=-1", - "--no-error-summary", - str(tmpfile), - ] - ) - # no errors - if res[2] == 0: - continue - - # get each line of output, containing an error for a symbol, - # stripping of trailing newline - it = iter(res[0].split("\n")[:-1]) - for output_line in it: - # split out the three last fields to not have problems with windows - # drives or other paths with any `:` - _, line, error_type, message = output_line.rsplit(":", 3) - - # -2 due to lines being 1-indexed and to skip the import line - symbol = ( - f"{module_name}.{class_name}." + sorted_runtime_names[int(line) - 2] + import itertools + + # load the cached type information + cached_type_info = cache_json["names"][class_name] + if "node" not in cached_type_info: + cached_type_info = lookup_symbol(cached_type_info["cross_ref"]) + + assert "node" in cached_type_info + node = cached_type_info["node"] + static_names = no_hidden(k for k in node["names"] if not k.startswith(".")) + for symbol in node["mro"][1:]: + node = lookup_symbol(symbol)["node"] + static_names |= no_hidden( + k for k in node["names"] if not k.startswith(".") ) - - # The POSIX-only attributes get listed in `dir(trio.Path)` since - # they're in `dir(pathlib.Path)` on win32 cpython. This should *maybe* - # be fixed in the future, but for now we ignore it. - if ( - symbol - in ("trio.Path.group", "trio.Path.owner", "trio.Path.is_mount") - and sys.platform == "win32" - and sys.implementation.name == "cpython" - ): - continue - - # intentionally hidden from type checkers, lest they accept any attribute - if symbol == "trio.Path.__getattr__": - continue - - # a bunch of symbols have this error, e.g. trio.lowlevel.Task.context - # It's not a problem: it's just complaining we're accessing - # instance-only attributes on a class! - # See this test for a minimized version that causes this error: - # https://github.com/python/mypy/blob/c517b86b9ba7487e7758f187cf31478e7aeaad47/test-data/unit/check-slots.test#L515-L523. - - if "conflicts with class variable access" in message: - continue - - errors[symbol] = error_type + ":" + message # pragma: no cover + static_names -= ignore_names else: # pragma: no cover assert False, "unknown tool" + missing = runtime_names - static_names + extra = static_names - runtime_names + + # using .remove() instead of .delete() to get an error in case they start not + # being missing + + if ( + tool == "jedi" + and BaseException in class_.__mro__ + and sys.version_info >= (3, 11) + ): + missing.remove("add_note") + + if ( + tool == "mypy" + and BaseException in class_.__mro__ + and sys.version_info >= (3, 11) + ): + extra.remove("__notes__") + + if tool == "mypy" and attrs.has(class_): + # e.g. __trio__core__run_CancelScope_AttrsAttributes__ + before = len(extra) + extra = {e for e in extra if not e.endswith("AttrsAttributes__")} + assert len(extra) == before - 1 + + # TODO: this *should* be visible via `dir`!! + if tool == "mypy" and class_ == trio.Nursery: + extra.remove("cancel_scope") + + # TODO: I'm not so sure about these, but should still be looked at. + EXTRAS = { + trio.DTLSChannel: {"peer_address", "endpoint"}, + trio.DTLSEndpoint: {"socket", "incoming_packets_buffer"}, + trio.Process: {"args", "pid", "stderr", "stdin", "stdio", "stdout"}, + trio.SSLListener: {"transport_listener"}, + trio.SSLStream: {"transport_stream"}, + trio.SocketListener: {"socket"}, + trio.SocketStream: {"socket"}, + trio.testing.MemoryReceiveStream: {"close_hook", "receive_some_hook"}, + trio.testing.MemorySendStream: { + "close_hook", + "send_all_hook", + "wait_send_all_might_not_block_hook", + }, + } + if tool == "mypy" and class_ in EXTRAS: + before = len(extra) + extra -= EXTRAS[class_] + assert len(extra) == before - len(EXTRAS[class_]) + + # probably an issue with mypy.... + if tool == "mypy" and class_ == trio.Path and sys.platform == "win32": + before = len(missing) + missing -= {"owner", "group", "is_mount"} + assert len(missing) == before - 3 + + # TODO: why is this? Is it a problem? + # see https://github.com/python-trio/trio/pull/2631#discussion_r1185615916 + if class_ == trio.StapledStream: + extra.remove("receive_stream") + extra.remove("send_stream") + + # I have not researched why these are missing, should maybe create an issue + # upstream with jedi + if tool == "jedi" and sys.version_info >= (3, 11): + if class_ in ( + trio.DTLSChannel, + trio.MemoryReceiveChannel, + trio.MemorySendChannel, + trio.SSLListener, + trio.SocketListener, + ): + missing.remove("__aenter__") + missing.remove("__aexit__") + if class_ in (trio.DTLSChannel, trio.MemoryReceiveChannel): + missing.remove("__aiter__") + missing.remove("__anext__") + + # intentionally hidden behind type guard + if class_ == trio.Path: + missing.remove("__getattr__") + + if missing or extra: # pragma: no cover + errors[f"{module_name}.{class_name}"] = { + "missing": missing, + "extra": extra, + } + # clean up created py.typed file if tool == "mypy" and not py_typed_exists: py_typed_path.unlink() From 7be61ff05192d91aee905788505e7f8fc31519fe Mon Sep 17 00:00:00 2001 From: A5rocks Date: Wed, 21 Jun 2023 08:56:10 +0900 Subject: [PATCH 066/162] Pollute `empty` directory for test speedup --- ci.sh | 6 +++++ trio/_tests/test_exports.py | 48 +++++++++++++++++++++++++++---------- 2 files changed, 41 insertions(+), 13 deletions(-) diff --git a/ci.sh b/ci.sh index 9188bac622..d306212f70 100755 --- a/ci.sh +++ b/ci.sh @@ -103,6 +103,12 @@ else INSTALLDIR=$(python -c "import os, trio; print(os.path.dirname(trio.__file__))") cp ../pyproject.toml $INSTALLDIR + # TODO: remove this once we have a py.typed file + touch "$INSTALLDIR/py.typed" + + # get mypy tests a nice cache + MYPYPATH=".." mypy --config-file= --cache-dir=./.mypy_cache -c "import trio" >/dev/null 2>/dev/null || true + if pytest -r a -p trio._tests.pytest_plugin --junitxml=../test-results.xml --run-slow ${INSTALLDIR} --cov="$INSTALLDIR" --cov-report=xml --cov-config=../.coveragerc --verbose; then PASSED=true else diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index fd69a9d0d5..db79434071 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -18,6 +18,8 @@ from .._core._tests.tutil import slow from .pytest_plugin import RUN_SLOW +mypy_cache_updated = False + def test_core_is_properly_reexported(): # Each export from _core should be re-exported by exactly one of these @@ -59,8 +61,8 @@ def public_modules(module): # won't be reflected in trio.socket, and this shouldn't cause downstream test # runs to start failing. @pytest.mark.redistributors_should_skip -# pylint/jedi often have trouble with alpha releases, where Python's internals -# are in flux, grammar may not have settled down, etc. +# Static analysis tools often have trouble with alpha releases, where Python's +# internals are in flux, grammar may not have settled down, etc. @pytest.mark.skipif( sys.version_info.releaselevel == "alpha", reason="skip static introspection tools on Python dev/alpha releases", @@ -72,6 +74,7 @@ def public_modules(module): "ignore:module 'sre_constants' is deprecated:DeprecationWarning", ) def test_static_tool_sees_all_symbols(tool, modname, tmpdir): + global mypy_cache_updated module = importlib.import_module(modname) def no_underscores(symbols): @@ -109,12 +112,20 @@ def no_underscores(symbols): if sys.implementation.name != "cpython": pytest.skip("mypy not installed in tests on pypy") - cache = Path(tmpdir / "cache") - cache.mkdir() + cache = Path.cwd() / ".mypy_cache" from mypy.api import run - # pollute CWD with `.mypy_cache`? TODO think about it - run(["--config-file=", f"--cache-dir={cache}", "-c", f"import {modname}"]) + # This pollutes the `empty` dir. Should this be changed? + if not mypy_cache_updated: + run( + [ + "--config-file=", + "--cache-dir=./.mypy_cache", + "-c", + f"import {modname}", + ] + ) + mypy_cache_updated = True trio_cache = next(cache.glob("*/trio")) _, modname = (modname + ".").split(".", 1) @@ -189,8 +200,8 @@ def no_underscores(symbols): @slow # see comment on test_static_tool_sees_all_symbols @pytest.mark.redistributors_should_skip -# jedi/mypy often have trouble with alpha releases, where Python's internals -# are in flux, grammar may not have settled down, etc. +# Static analysis tools often have trouble with alpha releases, where Python's +# internals are in flux, grammar may not have settled down, etc. @pytest.mark.skipif( sys.version_info.releaselevel == "alpha", reason="skip static introspection tools on Python dev/alpha releases", @@ -198,6 +209,7 @@ def no_underscores(symbols): @pytest.mark.parametrize("module_name", PUBLIC_MODULE_NAMES) @pytest.mark.parametrize("tool", ["jedi", "mypy"]) def test_static_tool_sees_class_members(tool, module_name, tmpdir) -> None: + global mypy_cache_updated module = PUBLIC_MODULES[PUBLIC_MODULE_NAMES.index(module_name)] # ignore hidden, but not dunder, symbols @@ -219,12 +231,22 @@ def no_hidden(symbols): if not py_typed_exists: # pragma: no branch py_typed_path.write_text("") - cache = Path(tmpdir / "cache") - cache.mkdir() + cache = Path.cwd() / ".mypy_cache" from mypy.api import run - # pollute CWD with `.mypy_cache`? TODO think about it - run(["--config-file=", f"--cache-dir={cache}", "-c", f"import {module_name}"]) + # This pollutes the `empty` dir. Should this be changed? + if not mypy_cache_updated: # pragma: no cover + # mypy cache was *probably* already updated by the other tests, + # but `pytest -k ...` might run just this test on its own + run( + [ + "--config-file=", + "--cache-dir=./.mypy_cache", + "-c", + f"import {module_name}", + ] + ) + mypy_cache_updated = True trio_cache = next(cache.glob("*/trio")) modname = module_name @@ -244,7 +266,7 @@ def no_hidden(symbols): @functools.lru_cache() def lookup_symbol(symbol): topname, *modname, name = symbol.split(".") - version = next(cache.glob("*.*/")) + version = next(cache.glob("3.*/")) mod_cache = version / topname if not mod_cache.is_dir(): mod_cache = version / (topname + ".data.json") From e917f7cdd17eecee5353c4059f068801cc45368d Mon Sep 17 00:00:00 2001 From: jakkdl Date: Fri, 23 Jun 2023 12:39:21 +0200 Subject: [PATCH 067/162] mark test_nursery_cancel_doesnt_create_cyclic_garbage as xfail on 3.12, waiting for upstream issue to get resolved. --- trio/_core/_tests/test_run.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/trio/_core/_tests/test_run.py b/trio/_core/_tests/test_run.py index 4d2cf204fe..082a1f8f5c 100644 --- a/trio/_core/_tests/test_run.py +++ b/trio/_core/_tests/test_run.py @@ -2279,6 +2279,10 @@ async def crasher(): gc.garbage.clear() +@pytest.mark.xfail( + sys.version_info >= (3, 12), + reason="Waiting on https://github.com/python/cpython/issues/100964", +) @pytest.mark.skipif( sys.implementation.name != "cpython", reason="Only makes sense with refcounting GC" ) From d1cbbd3bf0164344532e1c62bff6d76cc5171f8a Mon Sep 17 00:00:00 2001 From: jakkdl Date: Tue, 6 Jun 2023 11:28:08 +0200 Subject: [PATCH 068/162] timeout functions now raise ValueError on NaN inputs --- newsfragments/2493.breaking.rst | 1 + trio/_tests/test_timeouts.py | 36 ++++++++++++++++++++++++--------- trio/_timeouts.py | 23 +++++++++++++++++---- 3 files changed, 46 insertions(+), 14 deletions(-) create mode 100644 newsfragments/2493.breaking.rst diff --git a/newsfragments/2493.breaking.rst b/newsfragments/2493.breaking.rst new file mode 100644 index 0000000000..51c2985d1a --- /dev/null +++ b/newsfragments/2493.breaking.rst @@ -0,0 +1 @@ +Timeout functions now raise `ValueError` if passed `math.nan`. This includes `trio.sleep`, `trio.sleep_until`, `trio.move_on_at`, `trio.move_on_after`, `trio.fail_at` and `trio.fail_after`. diff --git a/trio/_tests/test_timeouts.py b/trio/_tests/test_timeouts.py index c817c49588..f55e697d6f 100644 --- a/trio/_tests/test_timeouts.py +++ b/trio/_tests/test_timeouts.py @@ -53,9 +53,6 @@ async def sleep_2(): await check_takes_about(sleep_2, TARGET) - with pytest.raises(ValueError): - await sleep(-1) - with assert_checkpoints(): await sleep(0) # This also serves as a test of the trivial move_on_at @@ -66,10 +63,6 @@ async def sleep_2(): @slow async def test_move_on_after(): - with pytest.raises(ValueError): - with move_on_after(-1): - pass # pragma: no cover - async def sleep_3(): with move_on_after(TARGET): await sleep(100) @@ -99,6 +92,29 @@ async def sleep_5(): with fail_after(100): await sleep(0) - with pytest.raises(ValueError): - with fail_after(-1): - pass # pragma: no cover + +async def test_timeouts_raise_value_error(): + # deadlines are allowed to be negative, but not delays. + # neither delays nor deadlines are allowed to be NaN + + nan = float("nan") + + for fun, val in ( + (sleep, -1), + (sleep, nan), + (sleep_until, nan), + ): + with pytest.raises(ValueError): + await fun(val) + + for cm, val in ( + (fail_after, -1), + (fail_after, nan), + (fail_at, nan), + (move_on_after, -1), + (move_on_after, nan), + (move_on_at, nan), + ): + with pytest.raises(ValueError): + with cm(val): + pass # pragma: no cover diff --git a/trio/_timeouts.py b/trio/_timeouts.py index 1f7878f89e..ad31e78404 100644 --- a/trio/_timeouts.py +++ b/trio/_timeouts.py @@ -1,3 +1,4 @@ +import math from contextlib import contextmanager import trio @@ -10,7 +11,12 @@ def move_on_at(deadline): Args: deadline (float): The deadline. + Raises: + ValueError: if deadline is NaN. + """ + if math.isnan(deadline): + raise ValueError("deadline must not be NaN") return trio.CancelScope(deadline=deadline) @@ -22,10 +28,9 @@ def move_on_after(seconds): seconds (float): The timeout. Raises: - ValueError: if timeout is less than zero. + ValueError: if timeout is less than zero or NaN. """ - if seconds < 0: raise ValueError("timeout must be non-negative") return move_on_at(trio.current_time() + seconds) @@ -52,6 +57,9 @@ async def sleep_until(deadline): the past, in which case this function executes a checkpoint but does not block. + Raises: + ValueError: if deadline is NaN. + """ with move_on_at(deadline): await sleep_forever() @@ -65,7 +73,7 @@ async def sleep(seconds): insert a checkpoint without actually blocking. Raises: - ValueError: if *seconds* is negative. + ValueError: if *seconds* is negative or NaN. """ if seconds < 0: @@ -96,9 +104,13 @@ def fail_at(deadline): :func:`fail_at`, then it's caught and :exc:`TooSlowError` is raised in its place. + Args: + deadline (float): The deadline. + Raises: TooSlowError: if a :exc:`Cancelled` exception is raised in this scope and caught by the context manager. + ValueError: if deadline is NaN. """ @@ -119,10 +131,13 @@ def fail_after(seconds): it's caught and discarded. When it reaches :func:`fail_after`, then it's caught and :exc:`TooSlowError` is raised in its place. + Args: + seconds (float): The timeout. + Raises: TooSlowError: if a :exc:`Cancelled` exception is raised in this scope and caught by the context manager. - ValueError: if *seconds* is less than zero. + ValueError: if *seconds* is less than zero or NaN. """ if seconds < 0: From 30235b8aa364b42dda9a2c16a35cf6194be1fb3e Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Mon, 26 Jun 2023 23:22:19 +0900 Subject: [PATCH 069/162] Fix tokenless uploads by hardcoding token... --- .github/workflows/ci.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 42e872be34..480398a1f0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -81,7 +81,8 @@ jobs: uses: codecov/codecov-action@v3 with: directory: empty - name: 'Windows (${{ matrix.python }}, ${{ matrix.arch }}${{ matrix.extra_name }})' + token: 87cefb17-c44b-4f2f-8b30-1fff5769ce46 + name: Windows (${{ matrix.python }}, ${{ matrix.arch }}${{ matrix.extra_name }}) flags: Windows,${{ matrix.python }} Ubuntu: @@ -131,7 +132,8 @@ jobs: uses: codecov/codecov-action@v3 with: directory: empty - name: 'Ubuntu (${{ matrix.python }}${{ matrix.extra_name }})' + token: 87cefb17-c44b-4f2f-8b30-1fff5769ce46 + name: Ubuntu (${{ matrix.python }}${{ matrix.extra_name }}) flags: Ubuntu,${{ matrix.python }} macOS: @@ -166,7 +168,8 @@ jobs: uses: codecov/codecov-action@v3 with: directory: empty - name: 'macOS (${{ matrix.python }})' + token: 87cefb17-c44b-4f2f-8b30-1fff5769ce46 + name: macOS (${{ matrix.python }}) flags: macOS,${{ matrix.python }} # https://github.com/marketplace/actions/alls-green#why From afd51d1617ada51e59a9e2b1258f94982d817794 Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Wed, 28 Jun 2023 22:31:25 +0900 Subject: [PATCH 070/162] Try out coverage instead of pytest-cov (#2665) * Use coverage instead of pytest-cov --------- Co-authored-by: John Litborn <11260241+jakkdl@users.noreply.github.com> --- .coveragerc | 8 ++++---- ci.sh | 9 ++++++++- test-requirements.in | 2 +- test-requirements.txt | 9 ++------- trio/_core/_tests/test_multierror.py | 6 +++++- .../_tests/test_multierror_scripts/apport_excepthook.py | 2 ++ 6 files changed, 22 insertions(+), 14 deletions(-) diff --git a/.coveragerc b/.coveragerc index 5d3f57aa66..98f923bd8e 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,14 +1,14 @@ [run] branch=True source=trio -# For some reason coverage recording doesn't work for ipython_custom_exc.py, -# so leave it out of reports omit= setup.py - */ipython_custom_exc.py +# These are run in subprocesses, but still don't work. We follow +# coverage's documentation to no avail. + */trio/_core/_tests/test_multierror_scripts/* # Omit the generated files in trio/_core starting with _generated_ */trio/_core/_generated_* -# script used to check type completeness that isn't run in tests +# Script used to check type completeness that isn't run in tests */trio/_tests/check_type_completeness.py # The test suite spawns subprocesses to test some stuff, so make sure # this doesn't corrupt the coverage files diff --git a/ci.sh b/ci.sh index d306212f70..ed97ff738b 100755 --- a/ci.sh +++ b/ci.sh @@ -109,12 +109,19 @@ else # get mypy tests a nice cache MYPYPATH=".." mypy --config-file= --cache-dir=./.mypy_cache -c "import trio" >/dev/null 2>/dev/null || true - if pytest -r a -p trio._tests.pytest_plugin --junitxml=../test-results.xml --run-slow ${INSTALLDIR} --cov="$INSTALLDIR" --cov-report=xml --cov-config=../.coveragerc --verbose; then + # support subprocess spawning with coverage.py + echo "import coverage; coverage.process_startup()" | tee -a "$INSTALLDIR/../sitecustomize.py" + + if COVERAGE_PROCESS_START=$(pwd)/../.coveragerc coverage run --rcfile=../.coveragerc -m pytest -r a -p trio._tests.pytest_plugin --junitxml=../test-results.xml --run-slow ${INSTALLDIR} --verbose; then PASSED=true else PASSED=false fi + coverage combine --rcfile ../.coveragerc + coverage report -m --rcfile ../.coveragerc + coverage xml --rcfile ../.coveragerc + # Remove the LSP again; again we want to do this ASAP to avoid # accidentally breaking other stuff. if [ "$LSP" != "" ]; then diff --git a/test-requirements.in b/test-requirements.in index 0227daef6a..fc94889d91 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -1,6 +1,6 @@ # For tests pytest >= 5.0 # for faulthandler in core -pytest-cov >= 2.6.0 +coverage >= 7.2.5 async_generator >= 1.9 pyright # ipython 7.x is the last major version supporting Python 3.7 diff --git a/test-requirements.txt b/test-requirements.txt index 719617a34b..f96eb2f32a 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -26,8 +26,8 @@ click==8.1.3 # via # black # pip-tools -coverage[toml]==7.2.7 - # via pytest-cov +coverage==7.2.7 + # via -r test-requirements.in cryptography==39.0.2 # via # -r test-requirements.in @@ -121,10 +121,6 @@ pyproject-hooks==1.0.0 pyright==1.1.310 # via -r test-requirements.in pytest==7.3.1 - # via - # -r test-requirements.in - # pytest-cov -pytest-cov==4.1.0 # via -r test-requirements.in sniffio==1.3.0 # via -r test-requirements.in @@ -134,7 +130,6 @@ tomli==2.0.1 # via # black # build - # coverage # mypy # pylint # pytest diff --git a/trio/_core/_tests/test_multierror.py b/trio/_core/_tests/test_multierror.py index 7d31b1e889..fc4a5a6637 100644 --- a/trio/_core/_tests/test_multierror.py +++ b/trio/_core/_tests/test_multierror.py @@ -451,7 +451,11 @@ def run_script(name, use_ipython=False): print("subprocess PYTHONPATH:", env.get("PYTHONPATH")) if use_ipython: - lines = [script_path.read_text(), "exit()"] + lines = [ + "import runpy", + f"runpy.run_path(r'{script_path}', run_name='trio.fake')", + "exit()", + ] cmd = [ sys.executable, diff --git a/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py b/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py index 12e7fb0851..b5b2e16c8e 100644 --- a/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py +++ b/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py @@ -1,3 +1,5 @@ +import _common + # The apport_python_hook package is only installed as part of Ubuntu's system # python, and not available in venvs. So before we can import it we have to # make sure it's on sys.path. From 746389e9a888dc92bb66b587df64a25c373a5106 Mon Sep 17 00:00:00 2001 From: Kar Petrosyan <92274156+karosis88@users.noreply.github.com> Date: Wed, 28 Jun 2023 17:08:47 -0400 Subject: [PATCH 071/162] Fix typo (#2673) * Fix typo in error message in _socket.py --------- Co-authored-by: John Litborn <11260241+jakkdl@users.noreply.github.com> --- trio/_socket.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/trio/_socket.py b/trio/_socket.py index 980b5ba860..2889f48113 100644 --- a/trio/_socket.py +++ b/trio/_socket.py @@ -434,9 +434,7 @@ def __init__(self, sock): # For example, ssl.SSLSocket subclasses socket.socket, but we # certainly don't want to blindly wrap one of those. raise TypeError( - "expected object of type 'socket.socket', not '{}".format( - type(sock).__name__ - ) + f"expected object of type 'socket.socket', not '{type(sock).__name__}'" ) self._sock = sock self._sock.setblocking(False) From ae200e60840081ac37e55cc512ac7cc965f00caf Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Thu, 29 Jun 2023 00:04:58 +0900 Subject: [PATCH 072/162] Render long description as ReST --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index a8e1154dc6..c13d1eb78a 100644 --- a/setup.py +++ b/setup.py @@ -73,6 +73,7 @@ version=__version__, description="A friendly Python library for async concurrency and I/O", long_description=LONG_DESC, + long_description_content_type="text/x-rst", author="Nathaniel J. Smith", author_email="njs@pobox.com", url="https://github.com/python-trio/trio", From c758fbc27bc88ef8cba83972b6586732492120d8 Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Sun, 2 Jul 2023 10:40:02 +0900 Subject: [PATCH 073/162] Prevent typing_extensions 4.7.0 (#2676) * Prevent typing_extensions 4.7.0 * xfail instead * Oops, sys.implementation is an object * Reformat * Update requirements files again --- test-requirements.in | 2 +- test-requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test-requirements.in b/test-requirements.in index fc94889d91..d95e9dc843 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -23,7 +23,7 @@ pip-tools >= 6.13.0 # typed_ast is deprecated as of 3.8, and straight up doesn't compile on 3.10-dev as of 2021-12-13 typed_ast; implementation_name == "cpython" and python_version < "3.8" mypy-extensions; implementation_name == "cpython" -typing-extensions; implementation_name == "cpython" +typing-extensions < 4.7.0 # Trio's own dependencies cffi; os_name == "nt" diff --git a/test-requirements.txt b/test-requirements.txt index f96eb2f32a..5cc5cc87f8 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -143,7 +143,7 @@ trustme==1.0.0 # via -r test-requirements.in types-pyopenssl==23.1.0.3 ; implementation_name == "cpython" # via -r test-requirements.in -typing-extensions==4.6.2 ; implementation_name == "cpython" +typing-extensions==4.6.2 # via # -r test-requirements.in # astroid From d01158bd721e6a600fdc2ef6c85d8d23e284e4d4 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Sun, 2 Jul 2023 04:16:15 +0200 Subject: [PATCH 074/162] Enable F401 in flake8, configure per-file disables. (#2669) * Enable F401 in flake8, configure per-file disables. * remove asyncio import * use flake8-pyproject, add comments to some noqa's, remove test_util.py * pretty-format-toml pyproject.toml, update comment in check.sh, fix CI * fix CI, add diff print to check.sh --------- Co-authored-by: EXPLOSION --- check.sh | 7 +- pyproject.toml | 64 +++++++++++-------- test-requirements.in | 1 + test-requirements.txt | 5 ++ trio/_channel.py | 5 +- trio/_core/_exceptions.py | 2 - trio/_core/_instrumentation.py | 3 +- trio/_core/_multierror.py | 1 - trio/_core/_tests/test_asyncgen.py | 1 - trio/_core/_tests/test_io.py | 4 +- trio/_core/_tests/test_ki.py | 6 +- trio/_core/_tests/test_multierror.py | 3 +- trio/_core/_tests/test_run.py | 8 +-- trio/_core/_tests/test_thread_cache.py | 3 +- trio/_core/_tests/test_util.py | 1 - trio/_core/_tests/test_windows.py | 2 +- trio/_subprocess_platform/__init__.py | 3 +- trio/_tests/test_abc.py | 1 - trio/_tests/test_exports.py | 2 - .../test_highlevel_open_tcp_listeners.py | 2 +- trio/_tests/test_highlevel_ssl_helpers.py | 4 +- trio/_tests/test_socket.py | 1 - trio/_tests/test_ssl.py | 1 - trio/_tests/test_sync.py | 3 +- trio/_tests/test_unix_pipes.py | 3 +- trio/_tests/test_windows_pipes.py | 7 +- trio/_tests/tools/test_gen_exports.py | 4 -- trio/_wait_for_object.py | 1 - trio/_windows_pipes.py | 2 +- trio/testing/_fake_net.py | 5 +- 30 files changed, 66 insertions(+), 89 deletions(-) delete mode 100644 trio/_core/_tests/test_util.py diff --git a/check.sh b/check.sh index 3c46cf844f..a9b2eb1e5e 100755 --- a/check.sh +++ b/check.sh @@ -18,10 +18,8 @@ if ! black --check setup.py trio; then black --diff setup.py trio fi -# Run flake8 without pycodestyle and import-related errors -flake8 trio/ \ - --ignore=D,E,W,F401,F403,F405,F821,F822\ - || EXIT_STATUS=$? +# Run flake8, configured in pyproject.toml +flake8 trio/ || EXIT_STATUS=$? # Run mypy on all supported platforms mypy -m trio -m trio.testing --platform linux || EXIT_STATUS=$? @@ -34,6 +32,7 @@ pip-compile docs-requirements.in if git status --porcelain | grep -q "requirements.txt"; then git status --porcelain + git diff *requirements.txt EXIT_STATUS=1 fi diff --git a/pyproject.toml b/pyproject.toml index 96396faddc..0cce1106b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,19 +1,51 @@ [tool.black] target-version = ['py37'] +[tool.flake8] +extend-ignore = ['D', 'E', 'W', 'F403', 'F405', 'F821', 'F822'] +per-file-ignores = [ + 'trio/__init__.py: F401', + 'trio/_core/__init__.py: F401', + 'trio/_core/_generated*.py: F401', + 'trio/_core/_tests/test_multierror_scripts/*: F401', + 'trio/abc.py: F401', + 'trio/lowlevel.py: F401', + 'trio/socket.py: F401', + 'trio/testing/__init__.py: F401' +] + +[tool.pytest.ini_options] +addopts = ["--strict-markers", "--strict-config"] +faulthandler_timeout = 60 +filterwarnings = [ + "error", + # https://gitter.im/python-trio/general?at=63bb8d0740557a3d5c688d67 + 'ignore:You are using cryptography on a 32-bit Python on a 64-bit Windows Operating System. Cryptography will be significantly faster if you switch to using a 64-bit Python.:UserWarning', + # this should remain until https://github.com/pytest-dev/pytest/pull/10894 is merged + 'ignore:ast.Str is deprecated:DeprecationWarning', + 'ignore:Attribute s is deprecated and will be removed:DeprecationWarning', + 'ignore:ast.NameConstant is deprecated:DeprecationWarning', + 'ignore:ast.Num is deprecated:DeprecationWarning', + # https://github.com/python/mypy/issues/15330 + 'ignore:ast.Ellipsis is deprecated:DeprecationWarning', + 'ignore:ast.Bytes is deprecated:DeprecationWarning' +] +junit_family = "xunit2" +markers = ["redistributors_should_skip: tests that should be skipped by downstream redistributors"] +xfail_strict = true [tool.towncrier] +directory = "newsfragments" +filename = "docs/source/history.rst" +issue_format = "`#{issue} `__" # Usage: # - PRs should drop a file like "issuenumber.feature" in newsfragments -# (or "bugfix", "doc", "removal", "misc"; misc gets no text, we can -# customize this) +# (or "bugfix", "doc", "removal", "misc"; misc gets no text, we can +# customize this) # - At release time after bumping version number, run: towncrier -# (or towncrier --draft) +# (or towncrier --draft) package = "trio" -filename = "docs/source/history.rst" -directory = "newsfragments" underlines = ["-", "~", "^"] -issue_format = "`#{issue} `__" [[tool.towncrier.type]] directory = "headline" @@ -49,23 +81,3 @@ showcontent = true directory = "misc" name = "Miscellaneous internal changes" showcontent = true - -[tool.pytest.ini_options] -addopts = ["--strict-markers", "--strict-config"] -xfail_strict = true -faulthandler_timeout = 60 -markers = ["redistributors_should_skip: tests that should be skipped by downstream redistributors"] -junit_family = "xunit2" -filterwarnings = [ - "error", - # https://gitter.im/python-trio/general?at=63bb8d0740557a3d5c688d67 - 'ignore:You are using cryptography on a 32-bit Python on a 64-bit Windows Operating System. Cryptography will be significantly faster if you switch to using a 64-bit Python.:UserWarning', - # this should remain until https://github.com/pytest-dev/pytest/pull/10894 is merged - 'ignore:ast.Str is deprecated:DeprecationWarning', - 'ignore:Attribute s is deprecated and will be removed:DeprecationWarning', - 'ignore:ast.NameConstant is deprecated:DeprecationWarning', - 'ignore:ast.Num is deprecated:DeprecationWarning', - # https://github.com/python/mypy/issues/15330 - 'ignore:ast.Ellipsis is deprecated:DeprecationWarning', - 'ignore:ast.Bytes is deprecated:DeprecationWarning', -] diff --git a/test-requirements.in b/test-requirements.in index d95e9dc843..1e1e23c2b8 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -16,6 +16,7 @@ black; implementation_name == "cpython" mypy; implementation_name == "cpython" types-pyOpenSSL; implementation_name == "cpython" flake8 < 6.0.0 # 6.0.0 drops python 3.7 +flake8-pyproject astor # code generation pip-tools >= 6.13.0 diff --git a/test-requirements.txt b/test-requirements.txt index 5cc5cc87f8..727a1d97f7 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -43,6 +43,10 @@ exceptiongroup==1.1.1 ; python_version < "3.11" # -r test-requirements.in # pytest flake8==5.0.4 + # via + # -r test-requirements.in + # flake8-pyproject +flake8-pyproject==1.2.3 # via -r test-requirements.in idna==3.4 # via @@ -130,6 +134,7 @@ tomli==2.0.1 # via # black # build + # flake8-pyproject # mypy # pylint # pytest diff --git a/trio/_channel.py b/trio/_channel.py index 2059b1fb4b..3ad08b7109 100644 --- a/trio/_channel.py +++ b/trio/_channel.py @@ -1,14 +1,11 @@ from __future__ import annotations from collections import deque, OrderedDict -from collections.abc import Callable from math import inf from types import TracebackType from typing import ( - Any, Generic, - NoReturn, TypeVar, TYPE_CHECKING, Tuple, # only needed for typechecking on <3.9 @@ -17,7 +14,7 @@ import attr from outcome import Error, Value -from ._abc import SendChannel, ReceiveChannel, Channel, ReceiveType, SendType, T +from ._abc import SendChannel, ReceiveChannel, ReceiveType, SendType, T from ._util import generic_function, NoPublicConstructor import trio diff --git a/trio/_core/_exceptions.py b/trio/_core/_exceptions.py index 6189c484b4..8c26162ee9 100644 --- a/trio/_core/_exceptions.py +++ b/trio/_core/_exceptions.py @@ -1,5 +1,3 @@ -import attr - from trio._util import NoPublicConstructor diff --git a/trio/_core/_instrumentation.py b/trio/_core/_instrumentation.py index b133d47406..a0757a5b83 100644 --- a/trio/_core/_instrumentation.py +++ b/trio/_core/_instrumentation.py @@ -1,7 +1,6 @@ import logging import types -import attr -from typing import Any, Callable, Dict, List, Sequence, Iterator, TypeVar +from typing import Any, Callable, Dict, Sequence, TypeVar from .._abc import Instrument diff --git a/trio/_core/_multierror.py b/trio/_core/_multierror.py index c9c355408a..9e69928162 100644 --- a/trio/_core/_multierror.py +++ b/trio/_core/_multierror.py @@ -1,6 +1,5 @@ import sys import warnings -from typing import Sequence import attr diff --git a/trio/_core/_tests/test_asyncgen.py b/trio/_core/_tests/test_asyncgen.py index 65bde5857f..92a267540f 100644 --- a/trio/_core/_tests/test_asyncgen.py +++ b/trio/_core/_tests/test_asyncgen.py @@ -3,7 +3,6 @@ import pytest import contextlib from math import inf -from functools import partial from ... import _core from .tutil import gc_collect_harder, buggy_pypy_asyncgens, restore_unraisablehook diff --git a/trio/_core/_tests/test_io.py b/trio/_core/_tests/test_io.py index 916ba6cd6f..106dae4047 100644 --- a/trio/_core/_tests/test_io.py +++ b/trio/_core/_tests/test_io.py @@ -1,13 +1,11 @@ import pytest import socket as stdlib_socket -import select import random -import errno from contextlib import suppress from ... import _core -from ...testing import wait_all_tasks_blocked, Sequencer, assert_checkpoints +from ...testing import wait_all_tasks_blocked, assert_checkpoints import trio # Cross-platform tests for IO handling diff --git a/trio/_core/_tests/test_ki.py b/trio/_core/_tests/test_ki.py index 101e21441d..c8b549c0a8 100644 --- a/trio/_core/_tests/test_ki.py +++ b/trio/_core/_tests/test_ki.py @@ -1,11 +1,8 @@ import outcome import pytest -import sys -import os import signal import threading import contextlib -import time import inspect try: @@ -15,9 +12,8 @@ from ... import _core from ...testing import wait_all_tasks_blocked -from ..._util import signal_raise, is_main_thread +from ..._util import signal_raise from ..._timeouts import sleep -from .tutil import slow def ki_self(): diff --git a/trio/_core/_tests/test_multierror.py b/trio/_core/_tests/test_multierror.py index fc4a5a6637..354f6e01df 100644 --- a/trio/_core/_tests/test_multierror.py +++ b/trio/_core/_tests/test_multierror.py @@ -11,7 +11,6 @@ extract_tb, print_exception, ) -from traceback import _cause_message # type: ignore import sys import re @@ -496,7 +495,7 @@ def check_simple_excepthook(completed, uses_ipython): try: - import IPython + import IPython # noqa: F401 except ImportError: # pragma: no cover have_ipython = False else: diff --git a/trio/_core/_tests/test_run.py b/trio/_core/_tests/test_run.py index 082a1f8f5c..249637e5fe 100644 --- a/trio/_core/_tests/test_run.py +++ b/trio/_core/_tests/test_run.py @@ -1,18 +1,14 @@ import contextvars import functools -import platform import sys import threading import time import types -import warnings import weakref -from contextlib import contextmanager, ExitStack +from contextlib import ExitStack from math import inf -from textwrap import dedent import gc -import attr import outcome import sniffio import pytest @@ -1598,8 +1594,6 @@ async def test_asyncio_function_inside_nursery_does_not_explode(): # Regression test for https://github.com/python-trio/trio/issues/552 with pytest.raises(TypeError) as excinfo: async with _core.open_nursery() as nursery: - import asyncio - nursery.start_soon(sleep_forever) await create_asyncio_future_in_new_loop() assert "asyncio" in str(excinfo.value) diff --git a/trio/_core/_tests/test_thread_cache.py b/trio/_core/_tests/test_thread_cache.py index 5f19a5ac64..d7b50cfc51 100644 --- a/trio/_core/_tests/test_thread_cache.py +++ b/trio/_core/_tests/test_thread_cache.py @@ -2,10 +2,9 @@ import threading from queue import Queue import time -import sys from contextlib import contextmanager -from .tutil import slow, gc_collect_harder, disable_threading_excepthook +from .tutil import slow, gc_collect_harder from .. import _thread_cache from .._thread_cache import start_thread_soon, ThreadCache diff --git a/trio/_core/_tests/test_util.py b/trio/_core/_tests/test_util.py deleted file mode 100644 index 5871ed8eef..0000000000 --- a/trio/_core/_tests/test_util.py +++ /dev/null @@ -1 +0,0 @@ -import pytest diff --git a/trio/_core/_tests/test_windows.py b/trio/_core/_tests/test_windows.py index eb94b82b87..c335076ca4 100644 --- a/trio/_core/_tests/test_windows.py +++ b/trio/_core/_tests/test_windows.py @@ -9,7 +9,7 @@ pytestmark = pytest.mark.skipif(not on_windows, reason="windows only") from .tutil import slow, gc_collect_harder, restore_unraisablehook -from ... import _core, sleep, move_on_after +from ... import _core, sleep from ...testing import wait_all_tasks_blocked if on_windows: diff --git a/trio/_subprocess_platform/__init__.py b/trio/_subprocess_platform/__init__.py index 7a131e090c..17444b8473 100644 --- a/trio/_subprocess_platform/__init__.py +++ b/trio/_subprocess_platform/__init__.py @@ -74,7 +74,8 @@ def create_pipe_from_child_output() -> Tuple["ClosableReceiveStream", int]: elif sys.platform != "linux" and (TYPE_CHECKING or hasattr(_core, "wait_kevent")): from .kqueue import wait_child_exiting # noqa: F811 else: - from .waitid import wait_child_exiting # noqa: F811 + # noqa'd as it's an exported symbol + from .waitid import wait_child_exiting # noqa: F811, F401 except ImportError as ex: # pragma: no cover _wait_child_exiting_error = ex diff --git a/trio/_tests/test_abc.py b/trio/_tests/test_abc.py index c445c97103..9db53b891a 100644 --- a/trio/_tests/test_abc.py +++ b/trio/_tests/test_abc.py @@ -2,7 +2,6 @@ import attr -from ..testing import assert_checkpoints from .. import abc as tabc diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index db79434071..9006f8a8ef 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -339,8 +339,6 @@ def lookup_symbol(symbol): static_names = no_hidden(c.name for c in completions) - ignore_names elif tool == "mypy": - import itertools - # load the cached type information cached_type_info = cache_json["names"][class_name] if "node" not in cached_type_info: diff --git a/trio/_tests/test_highlevel_open_tcp_listeners.py b/trio/_tests/test_highlevel_open_tcp_listeners.py index 4942f94d38..9d0a3aeedc 100644 --- a/trio/_tests/test_highlevel_open_tcp_listeners.py +++ b/trio/_tests/test_highlevel_open_tcp_listeners.py @@ -11,7 +11,7 @@ from trio import open_tcp_listeners, serve_tcp, SocketListener, open_tcp_stream from trio.testing import open_stream_to_socket_listener from .. import socket as tsocket -from .._core._tests.tutil import slow, creates_ipv6, binds_ipv6 +from .._core._tests.tutil import binds_ipv6 if sys.version_info < (3, 11): from exceptiongroup import BaseExceptionGroup diff --git a/trio/_tests/test_highlevel_ssl_helpers.py b/trio/_tests/test_highlevel_ssl_helpers.py index c00f5dc464..d8a2bb74d4 100644 --- a/trio/_tests/test_highlevel_ssl_helpers.py +++ b/trio/_tests/test_highlevel_ssl_helpers.py @@ -7,7 +7,9 @@ import trio from trio.socket import AF_INET, SOCK_STREAM, IPPROTO_TCP import trio.testing -from .test_ssl import client_ctx, SERVER_CTX + +# noqa is needed because flake8 doesn't understand how pytest fixtures work. +from .test_ssl import client_ctx, SERVER_CTX # noqa: F401 from .._highlevel_ssl_helpers import ( open_ssl_over_tcp_stream, diff --git a/trio/_tests/test_socket.py b/trio/_tests/test_socket.py index 44d285452f..1ed612924d 100644 --- a/trio/_tests/test_socket.py +++ b/trio/_tests/test_socket.py @@ -10,7 +10,6 @@ import sys as _sys from .._core._tests.tutil import creates_ipv6, binds_ipv6 from .. import _core -from .. import _socket as _tsocket from .. import socket as tsocket from .._socket import _NUMERIC_ONLY, _try_sync from ..testing import assert_checkpoints, wait_all_tasks_blocked diff --git a/trio/_tests/test_ssl.py b/trio/_tests/test_ssl.py index 7c63da56e8..234290e185 100644 --- a/trio/_tests/test_ssl.py +++ b/trio/_tests/test_ssl.py @@ -1,7 +1,6 @@ from __future__ import annotations import os -import re import sys from typing import TYPE_CHECKING diff --git a/trio/_tests/test_sync.py b/trio/_tests/test_sync.py index 33f79c4df2..fa903f3492 100644 --- a/trio/_tests/test_sync.py +++ b/trio/_tests/test_sync.py @@ -5,8 +5,7 @@ from ..testing import wait_all_tasks_blocked, assert_checkpoints from .. import _core -from .. import _timeouts -from .._timeouts import sleep_forever, move_on_after +from .._timeouts import sleep_forever from .._sync import * diff --git a/trio/_tests/test_unix_pipes.py b/trio/_tests/test_unix_pipes.py index 2109ab8dd2..ce6a4f80a0 100644 --- a/trio/_tests/test_unix_pipes.py +++ b/trio/_tests/test_unix_pipes.py @@ -1,13 +1,12 @@ import errno import select import os -import tempfile import sys import pytest from .._core._tests.tutil import gc_collect_harder, skip_if_fbsd_pipes_broken -from .. import _core, move_on_after +from .. import _core from ..testing import wait_all_tasks_blocked, check_one_way_stream posix = os.name == "posix" diff --git a/trio/_tests/test_windows_pipes.py b/trio/_tests/test_windows_pipes.py index 4443cc0a2f..4837d24ba9 100644 --- a/trio/_tests/test_windows_pipes.py +++ b/trio/_tests/test_windows_pipes.py @@ -1,15 +1,10 @@ -import errno -import select - -import os import sys from typing import Any from typing import Tuple import pytest -from .._core._tests.tutil import gc_collect_harder -from .. import _core, move_on_after +from .. import _core from ..testing import wait_all_tasks_blocked, check_one_way_stream if sys.platform == "win32": diff --git a/trio/_tests/tools/test_gen_exports.py b/trio/_tests/tools/test_gen_exports.py index 73eacc098a..55495c920d 100644 --- a/trio/_tests/tools/test_gen_exports.py +++ b/trio/_tests/tools/test_gen_exports.py @@ -1,10 +1,6 @@ import ast -import astor import pytest -import os -import sys -from shutil import copyfile from trio._tools.gen_exports import ( get_public_methods, create_passthrough_args, diff --git a/trio/_wait_for_object.py b/trio/_wait_for_object.py index 2e24682444..9c763f7363 100644 --- a/trio/_wait_for_object.py +++ b/trio/_wait_for_object.py @@ -1,5 +1,4 @@ import math -from . import _timeouts import trio from ._core._windows_cffi import ( ffi, diff --git a/trio/_windows_pipes.py b/trio/_windows_pipes.py index 693792ba0e..bd5c34eee2 100644 --- a/trio/_windows_pipes.py +++ b/trio/_windows_pipes.py @@ -3,7 +3,7 @@ from . import _core from ._abc import SendStream, ReceiveStream from ._util import ConflictDetector, Final -from ._core._windows_cffi import _handle, raise_winerror, kernel32, ffi +from ._core._windows_cffi import _handle, raise_winerror, kernel32 assert sys.platform == "win32" or not TYPE_CHECKING diff --git a/trio/testing/_fake_net.py b/trio/testing/_fake_net.py index f0ea927734..9df5ab5b6c 100644 --- a/trio/testing/_fake_net.py +++ b/trio/testing/_fake_net.py @@ -9,12 +9,9 @@ import trio import attr import ipaddress -from collections import deque import errno import os -from typing import Union, List, Optional -import enum -from contextlib import contextmanager +from typing import Union, Optional from trio._util import Final, NoPublicConstructor From e8786acc081a87b11f9c5cfbaa60aad34e0fee36 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 2 Jul 2023 11:16:37 +0900 Subject: [PATCH 075/162] Dependency updates (#2679) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- docs-requirements.txt | 19 ++++++++++--------- test-requirements.txt | 19 ++++++++++--------- 2 files changed, 20 insertions(+), 18 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 93d9195e08..06136fd765 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -38,8 +38,10 @@ imagesize==1.4.1 # via sphinx immutables==0.19 # via -r docs-requirements.in -importlib-metadata==6.6.0 +importlib-metadata==6.7.0 # via sphinx +importlib-resources==5.12.0 + # via towncrier incremental==22.10.0 # via towncrier jinja2==3.0.3 @@ -47,7 +49,7 @@ jinja2==3.0.3 # -r docs-requirements.in # sphinx # towncrier -markupsafe==2.1.2 +markupsafe==2.1.3 # via jinja2 outcome==1.2.0 # via -r docs-requirements.in @@ -71,7 +73,7 @@ sphinx==6.1.3 # sphinx-rtd-theme # sphinxcontrib-jquery # sphinxcontrib-trio -sphinx-rtd-theme==1.2.1 +sphinx-rtd-theme==1.2.2 # via -r docs-requirements.in sphinxcontrib-applehelp==1.0.4 # via sphinx @@ -91,12 +93,11 @@ sphinxcontrib-trio==1.1.2 # via -r docs-requirements.in tomli==2.0.1 # via towncrier -towncrier==22.12.0 +towncrier==23.6.0 # via -r docs-requirements.in -urllib3==2.0.2 +urllib3==2.0.3 # via requests zipp==3.15.0 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# setuptools + # via + # importlib-metadata + # importlib-resources diff --git a/test-requirements.txt b/test-requirements.txt index 727a1d97f7..6babde688e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -72,14 +72,14 @@ mccabe==0.7.0 # via # flake8 # pylint -mypy==1.3.0 ; implementation_name == "cpython" +mypy==1.4.1 ; implementation_name == "cpython" # via -r test-requirements.in mypy-extensions==1.0.0 ; implementation_name == "cpython" # via # -r test-requirements.in # black # mypy -nodeenv==1.7.0 +nodeenv==1.8.0 # via pyright outcome==1.2.0 # via -r test-requirements.in @@ -96,13 +96,13 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.13.0 +pip-tools==6.14.0 # via -r test-requirements.in -platformdirs==3.5.1 +platformdirs==3.8.0 # via # black # pylint -pluggy==1.0.0 +pluggy==1.2.0 # via pytest prompt-toolkit==3.0.38 # via ipython @@ -122,9 +122,9 @@ pyopenssl==23.2.0 # via -r test-requirements.in pyproject-hooks==1.0.0 # via build -pyright==1.1.310 +pyright==1.1.316 # via -r test-requirements.in -pytest==7.3.1 +pytest==7.4.0 # via -r test-requirements.in sniffio==1.3.0 # via -r test-requirements.in @@ -136,6 +136,7 @@ tomli==2.0.1 # build # flake8-pyproject # mypy + # pip-tools # pylint # pytest tomlkit==0.11.8 @@ -146,9 +147,9 @@ traitlets==5.9.0 # matplotlib-inline trustme==1.0.0 # via -r test-requirements.in -types-pyopenssl==23.1.0.3 ; implementation_name == "cpython" +types-pyopenssl==23.2.0.1 ; implementation_name == "cpython" # via -r test-requirements.in -typing-extensions==4.6.2 +typing-extensions==4.6.3 # via # -r test-requirements.in # astroid From b2d72ac726ecce99865f201c7000f20e8c5d7ded Mon Sep 17 00:00:00 2001 From: A5rocks Date: Sun, 2 Jul 2023 11:21:10 +0900 Subject: [PATCH 076/162] Update a few incorrect deprecations --- trio/tests.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/trio/tests.py b/trio/tests.py index f1940a6929..573a076da8 100644 --- a/trio/tests.py +++ b/trio/tests.py @@ -7,7 +7,7 @@ warn_deprecated( "trio.tests", - "0.24.0", + "0.22.1", instead="trio._tests", issue="https://github.com/python-trio/trio/issues/274", ) @@ -21,7 +21,7 @@ class TestsDeprecationWrapper: def __getattr__(self, attr: str) -> Any: warn_deprecated( f"trio.tests.{attr}", - "0.24.0", + "0.22.1", instead=f"trio._tests.{attr}", issue="https://github.com/python-trio/trio/issues/274", ) From c7e897d5d569f4edc5602edd515a7fb9ccdcabe9 Mon Sep 17 00:00:00 2001 From: A5rocks Date: Sun, 2 Jul 2023 11:27:55 +0900 Subject: [PATCH 077/162] Release 0.22.1 --- docs/source/history.rst | 30 ++++++++++++++++++++++++++++++ newsfragments/1148.feature.rst | 1 - newsfragments/1810.feature.rst | 1 - newsfragments/2333.bugfix.rst | 1 - newsfragments/2462.bugfix.rst | 1 - newsfragments/2493.breaking.rst | 1 - newsfragments/970.doc.rst | 1 - trio/_version.py | 2 +- 8 files changed, 31 insertions(+), 7 deletions(-) delete mode 100644 newsfragments/1148.feature.rst delete mode 100644 newsfragments/1810.feature.rst delete mode 100644 newsfragments/2333.bugfix.rst delete mode 100644 newsfragments/2462.bugfix.rst delete mode 100644 newsfragments/2493.breaking.rst delete mode 100644 newsfragments/970.doc.rst diff --git a/docs/source/history.rst b/docs/source/history.rst index bcf90c79ab..8e1d0209d6 100644 --- a/docs/source/history.rst +++ b/docs/source/history.rst @@ -5,6 +5,36 @@ Release history .. towncrier release notes start +Trio 0.22.1 (2023-07-02) +------------------------ + +Breaking changes +~~~~~~~~~~~~~~~~ + +- Timeout functions now raise `ValueError` if passed `math.nan`. This includes `trio.sleep`, `trio.sleep_until`, `trio.move_on_at`, `trio.move_on_after`, `trio.fail_at` and `trio.fail_after`. (`#2493 `__) + + +Features +~~~~~~~~ + +- Added support for naming threads created with `trio.to_thread.run_sync`, requires pthreads so is only available on POSIX platforms with glibc installed. (`#1148 `__) +- `trio.socket.socket` now prints the address it tried to connect to upon failure. (`#1810 `__) + + +Bugfixes +~~~~~~~~ + +- Fixed a crash that can occur when running Trio within an embedded Python interpreter, by handling the `TypeError` that is raised when trying to (re-)install a C signal handler. (`#2333 `__) +- Fix :func:`sniffio.current_async_library` when Trio tasks are spawned from a non-Trio context (such as when using trio-asyncio). Previously, a regular Trio task would inherit the non-Trio library name, and spawning a system task would cause the non-Trio caller to start thinking it was Trio. (`#2462 `__) +- Issued a new release as in the git tag for 0.22.0, ``trio.__version__`` is incorrectly set to 0.21.0+dev. (`#2485 `__) + + +Improved documentation +~~~~~~~~~~~~~~~~~~~~~~ + +- Documented that :obj:`Nursery.start_soon` does not guarantee task ordering. (`#970 `__) + + Trio 0.22.0 (2022-09-28) ------------------------ diff --git a/newsfragments/1148.feature.rst b/newsfragments/1148.feature.rst deleted file mode 100644 index 51f2b792c3..0000000000 --- a/newsfragments/1148.feature.rst +++ /dev/null @@ -1 +0,0 @@ -Added support for naming threads created with `trio.to_thread.run_sync`, requires pthreads so is only available on POSIX platforms with glibc installed. diff --git a/newsfragments/1810.feature.rst b/newsfragments/1810.feature.rst deleted file mode 100644 index a2599d32b0..0000000000 --- a/newsfragments/1810.feature.rst +++ /dev/null @@ -1 +0,0 @@ -`trio.socket.socket` now prints the address it tried to connect to upon failure. diff --git a/newsfragments/2333.bugfix.rst b/newsfragments/2333.bugfix.rst deleted file mode 100644 index a0f4c9fd37..0000000000 --- a/newsfragments/2333.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed a crash that can occur when running Trio within an embedded Python interpreter, by handling the `TypeError` that is raised when trying to (re-)install a C signal handler. diff --git a/newsfragments/2462.bugfix.rst b/newsfragments/2462.bugfix.rst deleted file mode 100644 index 9208289ed9..0000000000 --- a/newsfragments/2462.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix :func:`sniffio.current_async_library` when Trio tasks are spawned from a non-Trio context (such as when using trio-asyncio). Previously, a regular Trio task would inherit the non-Trio library name, and spawning a system task would cause the non-Trio caller to start thinking it was Trio. diff --git a/newsfragments/2493.breaking.rst b/newsfragments/2493.breaking.rst deleted file mode 100644 index 51c2985d1a..0000000000 --- a/newsfragments/2493.breaking.rst +++ /dev/null @@ -1 +0,0 @@ -Timeout functions now raise `ValueError` if passed `math.nan`. This includes `trio.sleep`, `trio.sleep_until`, `trio.move_on_at`, `trio.move_on_after`, `trio.fail_at` and `trio.fail_after`. diff --git a/newsfragments/970.doc.rst b/newsfragments/970.doc.rst deleted file mode 100644 index 6e114abf5b..0000000000 --- a/newsfragments/970.doc.rst +++ /dev/null @@ -1 +0,0 @@ -Documented that :obj:`Nursery.start_soon` does not guarantee task ordering. diff --git a/trio/_version.py b/trio/_version.py index 7111a4849d..78de322caf 100644 --- a/trio/_version.py +++ b/trio/_version.py @@ -1,3 +1,3 @@ # This file is imported from __init__.py and exec'd from setup.py -__version__ = "0.22.0+dev" +__version__ = "0.22.1" From 87d8d381c4506a5401daeeb98bb75da34fa3e617 Mon Sep 17 00:00:00 2001 From: A5rocks Date: Sun, 2 Jul 2023 11:51:51 +0900 Subject: [PATCH 078/162] Released 0.22.1, start new cycle --- trio/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trio/_version.py b/trio/_version.py index 78de322caf..48fcaf534e 100644 --- a/trio/_version.py +++ b/trio/_version.py @@ -1,3 +1,3 @@ # This file is imported from __init__.py and exec'd from setup.py -__version__ = "0.22.1" +__version__ = "0.22.1+dev" From 4816f0e47a17b996ca3d44319b593fa35320e55f Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Mon, 3 Jul 2023 14:01:41 +0200 Subject: [PATCH 079/162] Type completeness improvement (timeouts, CancelScope, and more) (#2671) * Add --full-diagnostics-file to check_type_completeness.py for use when developing * _timeouts.py, trio/_core, trio/_tests, open_nursery, move_on_at, move_on_after and CancelScope now type complete --- trio/_core/_exceptions.py | 2 +- trio/_core/_run.py | 44 ++++++++++++++++---------- trio/_tests/check_type_completeness.py | 14 ++++++++ trio/_tests/verify_types.json | 43 ++++--------------------- trio/_timeouts.py | 27 ++++++++++------ trio/_util.py | 9 ++++-- 6 files changed, 71 insertions(+), 68 deletions(-) diff --git a/trio/_core/_exceptions.py b/trio/_core/_exceptions.py index 8c26162ee9..bdc7b31c21 100644 --- a/trio/_core/_exceptions.py +++ b/trio/_core/_exceptions.py @@ -59,7 +59,7 @@ class Cancelled(BaseException, metaclass=NoPublicConstructor): """ - def __str__(self): + def __str__(self) -> str: return "Cancelled" diff --git a/trio/_core/_run.py b/trio/_core/_run.py index c07e29ab97..2727fe1e89 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -17,6 +17,9 @@ from math import inf from time import perf_counter from typing import TYPE_CHECKING, Any, NoReturn, TypeVar +from types import TracebackType +from collections.abc import Iterator +from contextlib import AbstractAsyncContextManager import attr from outcome import Error, Outcome, Value, capture @@ -475,15 +478,15 @@ class CancelScope(metaclass=Final): has been entered yet, and changes take immediate effect. """ - _cancel_status = attr.ib(default=None, init=False) - _has_been_entered = attr.ib(default=False, init=False) - _registered_deadline = attr.ib(default=inf, init=False) - _cancel_called = attr.ib(default=False, init=False) - cancelled_caught = attr.ib(default=False, init=False) + _cancel_status: CancelStatus | None = attr.ib(default=None, init=False) + _has_been_entered: bool = attr.ib(default=False, init=False) + _registered_deadline: float = attr.ib(default=inf, init=False) + _cancel_called: bool = attr.ib(default=False, init=False) + cancelled_caught: bool = attr.ib(default=False, init=False) # Constructor arguments: - _deadline = attr.ib(default=inf, kw_only=True) - _shield = attr.ib(default=False, kw_only=True) + _deadline: float = attr.ib(default=inf, kw_only=True) + _shield: bool = attr.ib(default=False, kw_only=True) @enable_ki_protection def __enter__(self): @@ -573,7 +576,12 @@ def _close(self, exc): self._cancel_status = None return exc - def __exit__(self, etype, exc, tb): + def __exit__( + self, + etype: type[BaseException] | None, + exc: BaseException | None, + tb: TracebackType | None, + ) -> bool: # NB: NurseryManager calls _close() directly rather than __exit__(), # so __exit__() must be just _close() plus this logic for adapting # the exception-filtering result to the context manager API. @@ -607,7 +615,7 @@ def __exit__(self, etype, exc, tb): # TODO: check if PEP558 changes the need for this call # https://github.com/python/cpython/pull/3640 - def __repr__(self): + def __repr__(self) -> str: if self._cancel_status is not None: binding = "active" elif self._has_been_entered: @@ -634,7 +642,7 @@ def __repr__(self): @contextmanager @enable_ki_protection - def _might_change_registered_deadline(self): + def _might_change_registered_deadline(self) -> Iterator[None]: try: yield finally: @@ -658,7 +666,7 @@ def _might_change_registered_deadline(self): runner.force_guest_tick_asap() @property - def deadline(self): + def deadline(self) -> float: """Read-write, :class:`float`. An absolute time on the current run's clock at which this scope will automatically become cancelled. You can adjust the deadline by modifying this @@ -684,12 +692,12 @@ def deadline(self): return self._deadline @deadline.setter - def deadline(self, new_deadline): + def deadline(self, new_deadline: float) -> None: with self._might_change_registered_deadline(): self._deadline = float(new_deadline) @property - def shield(self): + def shield(self) -> bool: """Read-write, :class:`bool`, default :data:`False`. So long as this is set to :data:`True`, then the code inside this scope will not receive :exc:`~trio.Cancelled` exceptions from scopes @@ -714,7 +722,7 @@ def shield(self): @shield.setter @enable_ki_protection - def shield(self, new_value): + def shield(self, new_value: bool) -> None: if not isinstance(new_value, bool): raise TypeError("shield must be a bool") self._shield = new_value @@ -722,7 +730,7 @@ def shield(self, new_value): self._cancel_status.recalculate() @enable_ki_protection - def cancel(self): + def cancel(self) -> None: """Cancels this scope immediately. This method is idempotent, i.e., if the scope was already @@ -736,7 +744,7 @@ def cancel(self): self._cancel_status.recalculate() @property - def cancel_called(self): + def cancel_called(self) -> bool: """Readonly :class:`bool`. Records whether cancellation has been requested for this scope, either by an explicit call to :meth:`cancel` or by the deadline expiring. @@ -890,7 +898,9 @@ def __exit__(self): # pragma: no cover assert False, """Never called, but should be defined""" -def open_nursery(strict_exception_groups=None): +def open_nursery( + strict_exception_groups: bool | None = None, +) -> AbstractAsyncContextManager[Nursery]: """Returns an async context manager which must be used to create a new `Nursery`. diff --git a/trio/_tests/check_type_completeness.py b/trio/_tests/check_type_completeness.py index d67d11958e..15b2da2d94 100755 --- a/trio/_tests/check_type_completeness.py +++ b/trio/_tests/check_type_completeness.py @@ -75,6 +75,19 @@ def main(args: argparse.Namespace) -> int: if res.stderr: print(res.stderr) + if args.full_diagnostics_file is not None: + with open(args.full_diagnostics_file, "w") as file: + json.dump( + [ + sym + for sym in current_result["typeCompleteness"]["symbols"] + if sym["diagnostics"] + ], + file, + sort_keys=True, + indent=2, + ) + last_result = json.loads(RESULT_FILE.read_text()) for key in "errorCount", "warningCount", "informationCount": @@ -153,6 +166,7 @@ def main(args: argparse.Namespace) -> int: parser = argparse.ArgumentParser() parser.add_argument("--overwrite-file", action="store_true", default=False) +parser.add_argument("--full-diagnostics-file", type=Path, default=None) args = parser.parse_args() assert __name__ == "__main__", "This script should be run standalone" diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index 7b0c39d20d..e54af12444 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.8155339805825242, + "completenessScore": 0.8317152103559871, "exportedSymbolCounts": { "withAmbiguousType": 1, - "withKnownType": 504, - "withUnknownType": 113 + "withKnownType": 514, + "withUnknownType": 103 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -45,54 +45,32 @@ } ], "otherSymbolCounts": { - "withAmbiguousType": 15, - "withKnownType": 231, - "withUnknownType": 236 + "withAmbiguousType": 14, + "withKnownType": 244, + "withUnknownType": 224 }, "packageName": "trio", "symbols": [ - "trio._core._exceptions.Cancelled", - "trio._core._exceptions.Cancelled.__str__", - "trio._util.NoPublicConstructor", - "trio._util.NoPublicConstructor.__call__", - "trio._util.Final.__new__", "trio.run", - "trio.open_nursery", - "trio._core._run.CancelScope", - "trio._core._run.CancelScope.cancelled_caught", - "trio._core._run.CancelScope.__exit__", - "trio._core._run.CancelScope.__repr__", - "trio._core._run.CancelScope.deadline", - "trio._core._run.CancelScope.cancel_called", "trio.current_effective_deadline", "trio._core._run._TaskStatusIgnored.__repr__", "trio._core._run._TaskStatusIgnored.started", "trio.current_time", - "trio._core._run.Nursery", "trio._core._run.Nursery.__init__", "trio._core._run.Nursery.child_tasks", "trio._core._run.Nursery.parent_task", "trio._core._run.Nursery.start_soon", "trio._core._run.Nursery.start", "trio._core._run.Nursery.__del__", - "trio.move_on_at", - "trio.move_on_after", - "trio.sleep_forever", - "trio.sleep_until", - "trio.sleep", - "trio.fail_after", - "trio._sync.Event", "trio._sync.Event.is_set", "trio._sync.Event.wait", "trio._sync.Event.statistics", - "trio._sync.CapacityLimiter", "trio._sync.CapacityLimiter.__init__", "trio._sync.CapacityLimiter.__repr__", "trio._sync.CapacityLimiter.total_tokens", "trio._sync.CapacityLimiter.borrowed_tokens", "trio._sync.CapacityLimiter.available_tokens", "trio._sync.CapacityLimiter.statistics", - "trio._sync.Semaphore", "trio._sync.Semaphore.__init__", "trio._sync.Semaphore.__repr__", "trio._sync.Semaphore.value", @@ -103,7 +81,6 @@ "trio._sync._LockImpl.locked", "trio._sync._LockImpl.statistics", "trio._sync.StrictFIFOLock", - "trio._sync.Condition", "trio._sync.Condition.__init__", "trio._sync.Condition.locked", "trio._sync.Condition.acquire_nowait", @@ -165,7 +142,6 @@ "trio._path.Path.__bytes__", "trio._path.Path.__truediv__", "trio._path.Path.__rtruediv__", - "trio._path.AsyncAutoWrapperType", "trio._path.AsyncAutoWrapperType.__init__", "trio._path.AsyncAutoWrapperType.generate_forwards", "trio._path.AsyncAutoWrapperType.generate_wraps", @@ -203,7 +179,6 @@ "trio._ssl.SSLListener.__init__", "trio._ssl.SSLListener.accept", "trio._ssl.SSLListener.aclose", - "trio._dtls.DTLSEndpoint", "trio._dtls.DTLSEndpoint.__init__", "trio._dtls.DTLSEndpoint.__del__", "trio._dtls.DTLSEndpoint.close", @@ -254,7 +229,6 @@ "trio.from_thread.run_sync", "trio.lowlevel.cancel_shielded_checkpoint", "trio.lowlevel.currently_ki_protected", - "trio._core._run.Task", "trio._core._run.Task.coro", "trio._core._run.Task.name", "trio._core._run.Task.context", @@ -266,13 +240,11 @@ "trio._core._run.Task.iter_await_frames", "trio.lowlevel.checkpoint", "trio.lowlevel.current_task", - "trio._core._parking_lot.ParkingLot", "trio._core._parking_lot.ParkingLot.__len__", "trio._core._parking_lot.ParkingLot.__bool__", "trio._core._parking_lot.ParkingLot.unpark_all", "trio._core._parking_lot.ParkingLot.repark_all", "trio._core._parking_lot.ParkingLot.statistics", - "trio._core._unbounded_queue.UnboundedQueue", "trio._core._unbounded_queue.UnboundedQueue.__repr__", "trio._core._unbounded_queue.UnboundedQueue.qsize", "trio._core._unbounded_queue.UnboundedQueue.empty", @@ -281,12 +253,10 @@ "trio._core._unbounded_queue.UnboundedQueue.statistics", "trio._core._unbounded_queue.UnboundedQueue.__aiter__", "trio._core._unbounded_queue.UnboundedQueue.__anext__", - "trio._core._local.RunVar", "trio._core._local.RunVar.get", "trio._core._local.RunVar.set", "trio._core._local.RunVar.reset", "trio._core._local.RunVar.__repr__", - "trio._core._entry_queue.TrioToken", "trio._core._entry_queue.TrioToken.run_sync_soon", "trio.lowlevel.current_trio_token", "trio.lowlevel.temporarily_detach_coroutine_object", @@ -329,7 +299,6 @@ "trio.testing.trio_test", "trio.testing.assert_checkpoints", "trio.testing.assert_no_checkpoints", - "trio.testing._sequencer.Sequencer", "trio.testing.check_one_way_stream", "trio.testing.check_two_way_stream", "trio.testing.check_half_closeable_stream", diff --git a/trio/_timeouts.py b/trio/_timeouts.py index ad31e78404..1d03b2f2e3 100644 --- a/trio/_timeouts.py +++ b/trio/_timeouts.py @@ -1,10 +1,13 @@ +from __future__ import annotations + import math -from contextlib import contextmanager +from contextlib import AbstractContextManager, contextmanager +from typing import TYPE_CHECKING import trio -def move_on_at(deadline): +def move_on_at(deadline: float) -> trio.CancelScope: """Use as a context manager to create a cancel scope with the given absolute deadline. @@ -20,7 +23,7 @@ def move_on_at(deadline): return trio.CancelScope(deadline=deadline) -def move_on_after(seconds): +def move_on_after(seconds: float) -> trio.CancelScope: """Use as a context manager to create a cancel scope whose deadline is set to now + *seconds*. @@ -36,7 +39,7 @@ def move_on_after(seconds): return move_on_at(trio.current_time() + seconds) -async def sleep_forever(): +async def sleep_forever() -> None: """Pause execution of the current task forever (or until cancelled). Equivalent to calling ``await sleep(math.inf)``. @@ -45,7 +48,7 @@ async def sleep_forever(): await trio.lowlevel.wait_task_rescheduled(lambda _: trio.lowlevel.Abort.SUCCEEDED) -async def sleep_until(deadline): +async def sleep_until(deadline: float) -> None: """Pause execution of the current task until the given time. The difference between :func:`sleep` and :func:`sleep_until` is that the @@ -65,7 +68,7 @@ async def sleep_until(deadline): await sleep_forever() -async def sleep(seconds): +async def sleep(seconds: float) -> None: """Pause execution of the current task for the given number of seconds. Args: @@ -91,8 +94,9 @@ class TooSlowError(Exception): """ -@contextmanager -def fail_at(deadline): +# workaround for PyCharm not being able to infer return type from @contextmanager +# see https://youtrack.jetbrains.com/issue/PY-36444/PyCharm-doesnt-infer-types-when-using-contextlib.contextmanager-decorator +def fail_at(deadline: float) -> AbstractContextManager[trio.CancelScope]: # type: ignore[misc] """Creates a cancel scope with the given deadline, and raises an error if it is actually cancelled. @@ -113,14 +117,17 @@ def fail_at(deadline): ValueError: if deadline is NaN. """ - with move_on_at(deadline) as scope: yield scope if scope.cancelled_caught: raise TooSlowError -def fail_after(seconds): +if not TYPE_CHECKING: + fail_at = contextmanager(fail_at) + + +def fail_after(seconds: float) -> AbstractContextManager[trio.CancelScope]: """Creates a cancel scope with the given timeout, and raises an error if it is actually cancelled. diff --git a/trio/_util.py b/trio/_util.py index b60e0104e8..89a2dea7de 100644 --- a/trio/_util.py +++ b/trio/_util.py @@ -1,4 +1,5 @@ # Little utilities we use internally +from __future__ import annotations from abc import ABCMeta import os @@ -280,7 +281,9 @@ class SomeClass(metaclass=Final): - TypeError if a subclass is created """ - def __new__(cls, name, bases, cls_namespace): + def __new__( + cls, name: str, bases: tuple[type, ...], cls_namespace: dict[str, object] + ) -> Final: for base in bases: if isinstance(base, Final): raise TypeError( @@ -312,12 +315,12 @@ class SomeClass(metaclass=NoPublicConstructor): - TypeError if a subclass or an instance is created. """ - def __call__(cls, *args, **kwargs): + def __call__(cls, *args: object, **kwargs: object) -> None: raise TypeError( f"{cls.__module__}.{cls.__qualname__} has no public constructor" ) - def _create(cls: t.Type[T], *args: t.Any, **kwargs: t.Any) -> T: + def _create(cls: t.Type[T], *args: object, **kwargs: object) -> T: return super().__call__(*args, **kwargs) # type: ignore From 042f035035bb2af381ba39402d4c2e06f0ed7efd Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Thu, 6 Jul 2023 14:08:35 +0200 Subject: [PATCH 080/162] don't open git diff in pager, mypy export tests now error on any mypy output (#2685) * don't open git diff in pager and always print in color, mypy export tests now error out if there's any output --- check.sh | 4 +-- trio/_tests/test_exports.py | 56 +++++++++++++++++-------------------- 2 files changed, 28 insertions(+), 32 deletions(-) diff --git a/check.sh b/check.sh index a9b2eb1e5e..77c55838ba 100755 --- a/check.sh +++ b/check.sh @@ -32,14 +32,14 @@ pip-compile docs-requirements.in if git status --porcelain | grep -q "requirements.txt"; then git status --porcelain - git diff *requirements.txt + git --no-pager diff --color *requirements.txt EXIT_STATUS=1 fi python trio/_tests/check_type_completeness.py --overwrite-file || EXIT_STATUS=$? if git status --porcelain trio/_tests/verify_types.json | grep -q "M"; then echo "Type completeness changed, please update!" - git diff trio/_tests/verify_types.json + git --no-pager diff --color trio/_tests/verify_types.json EXIT_STATUS=1 fi diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index 9006f8a8ef..77b390f22f 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -21,6 +21,28 @@ mypy_cache_updated = False +def _ensure_mypy_cache_updated(): + # This pollutes the `empty` dir. Should this be changed? + from mypy.api import run + + global mypy_cache_updated + if not mypy_cache_updated: + # mypy cache was *probably* already updated by the other tests, + # but `pytest -k ...` might run just this test on its own + result = run( + [ + "--config-file=", + "--cache-dir=./.mypy_cache", + "--no-error-summary", + "-c", + "import trio", + ] + ) + assert not result[1] # stderr + assert not result[0] # stdout + mypy_cache_updated = True + + def test_core_is_properly_reexported(): # Each export from _core should be re-exported by exactly one of these # three modules: @@ -74,7 +96,6 @@ def public_modules(module): "ignore:module 'sre_constants' is deprecated:DeprecationWarning", ) def test_static_tool_sees_all_symbols(tool, modname, tmpdir): - global mypy_cache_updated module = importlib.import_module(modname) def no_underscores(symbols): @@ -113,19 +134,8 @@ def no_underscores(symbols): pytest.skip("mypy not installed in tests on pypy") cache = Path.cwd() / ".mypy_cache" - from mypy.api import run - - # This pollutes the `empty` dir. Should this be changed? - if not mypy_cache_updated: - run( - [ - "--config-file=", - "--cache-dir=./.mypy_cache", - "-c", - f"import {modname}", - ] - ) - mypy_cache_updated = True + + _ensure_mypy_cache_updated() trio_cache = next(cache.glob("*/trio")) _, modname = (modname + ".").split(".", 1) @@ -209,7 +219,6 @@ def no_underscores(symbols): @pytest.mark.parametrize("module_name", PUBLIC_MODULE_NAMES) @pytest.mark.parametrize("tool", ["jedi", "mypy"]) def test_static_tool_sees_class_members(tool, module_name, tmpdir) -> None: - global mypy_cache_updated module = PUBLIC_MODULES[PUBLIC_MODULE_NAMES.index(module_name)] # ignore hidden, but not dunder, symbols @@ -232,21 +241,8 @@ def no_hidden(symbols): py_typed_path.write_text("") cache = Path.cwd() / ".mypy_cache" - from mypy.api import run - - # This pollutes the `empty` dir. Should this be changed? - if not mypy_cache_updated: # pragma: no cover - # mypy cache was *probably* already updated by the other tests, - # but `pytest -k ...` might run just this test on its own - run( - [ - "--config-file=", - "--cache-dir=./.mypy_cache", - "-c", - f"import {module_name}", - ] - ) - mypy_cache_updated = True + + _ensure_mypy_cache_updated() trio_cache = next(cache.glob("*/trio")) modname = module_name From 54e7be87db0164343ef395d41c87587a0fc9785d Mon Sep 17 00:00:00 2001 From: jakkdl Date: Fri, 7 Jul 2023 11:34:45 +0200 Subject: [PATCH 081/162] add isort --- check.sh | 6 ++++++ pyproject.toml | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/check.sh b/check.sh index 77c55838ba..8912f709e7 100755 --- a/check.sh +++ b/check.sh @@ -18,6 +18,11 @@ if ! black --check setup.py trio; then black --diff setup.py trio fi +if ! isort --check setup.py trio; then + EXIT_STATUS=1 + isort --diff setup.py trio +fi + # Run flake8, configured in pyproject.toml flake8 trio/ || EXIT_STATUS=$? @@ -54,6 +59,7 @@ To fix formatting and see remaining errors, run pip install -r test-requirements.txt black setup.py trio + isort setup.py trio ./check.sh in your local checkout. diff --git a/pyproject.toml b/pyproject.toml index 0cce1106b7..0f95a0cbc1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,11 @@ per-file-ignores = [ 'trio/testing/__init__.py: F401' ] +[tool.isort] +combine_as_imports = true +profile = "black" +skip_gitignore = true + [tool.pytest.ini_options] addopts = ["--strict-markers", "--strict-config"] faulthandler_timeout = 60 From 933f77b96f0092e1baab4474a9208fc2e379aa32 Mon Sep 17 00:00:00 2001 From: jakkdl Date: Fri, 7 Jul 2023 11:34:54 +0200 Subject: [PATCH 082/162] run isort on all files --- setup.py | 2 +- trio/__init__.py | 141 ++-- trio/_abc.py | 1 + trio/_channel.py | 18 +- trio/_core/__init__.py | 71 +- trio/_core/_asyncgens.py | 5 +- trio/_core/_entry_queue.py | 2 +- trio/_core/_generated_instrumentation.py | 5 +- trio/_core/_generated_io_epoll.py | 5 +- trio/_core/_generated_io_kqueue.py | 5 +- trio/_core/_generated_io_windows.py | 5 +- trio/_core/_generated_run.py | 5 +- trio/_core/_io_common.py | 2 + trio/_core/_io_epoll.py | 7 +- trio/_core/_io_kqueue.py | 6 +- trio/_core/_io_windows.py | 25 +- trio/_core/_ki.py | 2 +- trio/_core/_local.py | 3 +- trio/_core/_mock_clock.py | 2 +- trio/_core/_parking_lot.py | 3 +- trio/_core/_run.py | 8 +- trio/_core/_tests/test_asyncgen.py | 7 +- trio/_core/_tests/test_guest_mode.py | 14 +- trio/_core/_tests/test_instrumentation.py | 3 +- trio/_core/_tests/test_io.py | 11 +- trio/_core/_tests/test_ki.py | 15 +- trio/_core/_tests/test_mock_clock.py | 3 +- trio/_core/_tests/test_multierror.py | 18 +- .../apport_excepthook.py | 4 +- .../ipython_custom_exc.py | 4 +- .../simple_excepthook_IPython.py | 1 - trio/_core/_tests/test_run.py | 27 +- trio/_core/_tests/test_thread_cache.py | 9 +- trio/_core/_tests/test_windows.py | 12 +- trio/_core/_tests/tutil.py | 11 +- trio/_core/_thread_cache.py | 12 +- trio/_core/_traps.py | 5 +- trio/_core/_wakeup_socketpair.py | 2 +- trio/_core/_windows_cffi.py | 5 +- trio/_deprecate.py | 2 +- trio/_dtls.py | 12 +- trio/_file_io.py | 8 +- trio/_highlevel_generic.py | 4 +- trio/_highlevel_open_tcp_listeners.py | 1 + trio/_highlevel_open_tcp_stream.py | 2 +- trio/_highlevel_open_unix_stream.py | 2 +- trio/_highlevel_socket.py | 1 + trio/_highlevel_ssl_helpers.py | 3 +- trio/_signals.py | 5 +- trio/_socket.py | 5 +- trio/_ssl.py | 4 +- trio/_subprocess.py | 18 +- trio/_subprocess_platform/__init__.py | 11 +- trio/_subprocess_platform/kqueue.py | 3 +- trio/_sync.py | 2 +- trio/_tests/check_type_completeness.py | 6 +- trio/_tests/pytest_plugin.py | 5 +- trio/_tests/test_abc.py | 3 +- trio/_tests/test_channel.py | 5 +- trio/_tests/test_deprecate.py | 7 +- trio/_tests/test_dtls.py | 23 +- trio/_tests/test_exports.py | 8 + trio/_tests/test_file_io.py | 6 +- trio/_tests/test_highlevel_generic.py | 5 +- .../test_highlevel_open_tcp_listeners.py | 11 +- trio/_tests/test_highlevel_open_tcp_stream.py | 10 +- .../_tests/test_highlevel_open_unix_stream.py | 2 +- trio/_tests/test_highlevel_serve_listeners.py | 5 +- trio/_tests/test_highlevel_socket.py | 15 +- trio/_tests/test_highlevel_ssl_helpers.py | 13 +- trio/_tests/test_path.py | 2 +- trio/_tests/test_signals.py | 3 +- trio/_tests/test_socket.py | 17 +- trio/_tests/test_ssl.py | 32 +- trio/_tests/test_sync.py | 9 +- trio/_tests/test_testing.py | 6 +- trio/_tests/test_timeouts.py | 7 +- trio/_tests/test_unix_pipes.py | 6 +- trio/_tests/test_util.py | 11 +- trio/_tests/test_wait_for_object.py | 11 +- trio/_tests/test_windows_pipes.py | 10 +- trio/_tests/tools/test_gen_exports.py | 7 +- trio/_tests/verify_types.json | 282 +++---- trio/_tools/gen_exports.py | 11 +- trio/_unix_pipes.py | 6 +- trio/_util.py | 10 +- trio/_wait_for_object.py | 10 +- trio/_windows_pipes.py | 5 +- trio/abc.py | 16 +- trio/from_thread.py | 3 +- trio/lowlevel.py | 54 +- trio/socket.py | 731 +++++++++--------- trio/testing/__init__.py | 32 +- trio/testing/_check_streams.py | 4 +- trio/testing/_fake_net.py | 9 +- trio/testing/_memory_streams.py | 5 +- trio/testing/_sequencer.py | 4 +- trio/testing/_trio_test.py | 2 +- trio/to_thread.py | 3 +- 99 files changed, 978 insertions(+), 1023 deletions(-) diff --git a/setup.py b/setup.py index c13d1eb78a..3f420f6c63 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -from setuptools import setup, find_packages +from setuptools import find_packages, setup exec(open("trio/_version.py", encoding="utf-8").read()) diff --git a/trio/__init__.py b/trio/__init__.py index 40aa3c430d..42b57e69c0 100644 --- a/trio/__init__.py +++ b/trio/__init__.py @@ -15,108 +15,89 @@ # # Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) -# pyright explicitly does not care about `__version__` -# see https://github.com/microsoft/pyright/blob/main/docs/typed-libraries.md#type-completeness -from ._version import __version__ +# must be imported early to avoid circular import +from ._core import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED # isort: skip +# Submodules imported by default +from . import abc, from_thread, lowlevel, socket, to_thread +from ._channel import ( + MemoryReceiveChannel as MemoryReceiveChannel, + MemorySendChannel as MemorySendChannel, + open_memory_channel as open_memory_channel, +) from ._core import ( - TrioInternalError as TrioInternalError, - RunFinishedError as RunFinishedError, - WouldBlock as WouldBlock, - Cancelled as Cancelled, + BrokenResourceError as BrokenResourceError, BusyResourceError as BusyResourceError, - ClosedResourceError as ClosedResourceError, - run as run, - open_nursery as open_nursery, + Cancelled as Cancelled, CancelScope as CancelScope, - current_effective_deadline as current_effective_deadline, - TASK_STATUS_IGNORED as TASK_STATUS_IGNORED, - current_time as current_time, - BrokenResourceError as BrokenResourceError, + ClosedResourceError as ClosedResourceError, EndOfChannel as EndOfChannel, Nursery as Nursery, + RunFinishedError as RunFinishedError, + TrioInternalError as TrioInternalError, + WouldBlock as WouldBlock, + current_effective_deadline as current_effective_deadline, + current_time as current_time, + open_nursery as open_nursery, + run as run, ) - -from ._timeouts import ( - move_on_at as move_on_at, - move_on_after as move_on_after, - sleep_forever as sleep_forever, - sleep_until as sleep_until, - sleep as sleep, - fail_at as fail_at, - fail_after as fail_after, - TooSlowError as TooSlowError, -) - -from ._sync import ( - Event as Event, - CapacityLimiter as CapacityLimiter, - Semaphore as Semaphore, - Lock as Lock, - StrictFIFOLock as StrictFIFOLock, - Condition as Condition, +from ._core._multierror import ( + MultiError as _MultiError, + NonBaseMultiError as _NonBaseMultiError, ) - +from ._deprecate import TrioDeprecationWarning as TrioDeprecationWarning +from ._dtls import DTLSChannel as DTLSChannel, DTLSEndpoint as DTLSEndpoint +from ._file_io import open_file as open_file, wrap_file as wrap_file from ._highlevel_generic import ( - aclose_forcefully as aclose_forcefully, StapledStream as StapledStream, + aclose_forcefully as aclose_forcefully, ) - -from ._channel import ( - open_memory_channel as open_memory_channel, - MemorySendChannel as MemorySendChannel, - MemoryReceiveChannel as MemoryReceiveChannel, +from ._highlevel_open_tcp_listeners import ( + open_tcp_listeners as open_tcp_listeners, + serve_tcp as serve_tcp, ) - -from ._signals import open_signal_receiver as open_signal_receiver - +from ._highlevel_open_tcp_stream import open_tcp_stream as open_tcp_stream +from ._highlevel_open_unix_stream import open_unix_socket as open_unix_socket +from ._highlevel_serve_listeners import serve_listeners as serve_listeners from ._highlevel_socket import ( - SocketStream as SocketStream, SocketListener as SocketListener, + SocketStream as SocketStream, +) +from ._highlevel_ssl_helpers import ( + open_ssl_over_tcp_listeners as open_ssl_over_tcp_listeners, + open_ssl_over_tcp_stream as open_ssl_over_tcp_stream, + serve_ssl_over_tcp as serve_ssl_over_tcp, ) - -from ._file_io import open_file as open_file, wrap_file as wrap_file - from ._path import Path as Path - -from ._subprocess import Process as Process, run_process as run_process - +from ._signals import open_signal_receiver as open_signal_receiver from ._ssl import ( - SSLStream as SSLStream, - SSLListener as SSLListener, NeedHandshakeError as NeedHandshakeError, + SSLListener as SSLListener, + SSLStream as SSLStream, ) - -from ._dtls import DTLSEndpoint as DTLSEndpoint, DTLSChannel as DTLSChannel - -from ._highlevel_serve_listeners import serve_listeners as serve_listeners - -from ._highlevel_open_tcp_stream import open_tcp_stream as open_tcp_stream - -from ._highlevel_open_tcp_listeners import ( - open_tcp_listeners as open_tcp_listeners, - serve_tcp as serve_tcp, +from ._subprocess import Process as Process, run_process as run_process +from ._sync import ( + CapacityLimiter as CapacityLimiter, + Condition as Condition, + Event as Event, + Lock as Lock, + Semaphore as Semaphore, + StrictFIFOLock as StrictFIFOLock, ) - -from ._highlevel_open_unix_stream import open_unix_socket as open_unix_socket - -from ._highlevel_ssl_helpers import ( - open_ssl_over_tcp_stream as open_ssl_over_tcp_stream, - open_ssl_over_tcp_listeners as open_ssl_over_tcp_listeners, - serve_ssl_over_tcp as serve_ssl_over_tcp, +from ._timeouts import ( + TooSlowError as TooSlowError, + fail_after as fail_after, + fail_at as fail_at, + move_on_after as move_on_after, + move_on_at as move_on_at, + sleep as sleep, + sleep_forever as sleep_forever, + sleep_until as sleep_until, ) -from ._core._multierror import MultiError as _MultiError -from ._core._multierror import NonBaseMultiError as _NonBaseMultiError - -from ._deprecate import TrioDeprecationWarning as TrioDeprecationWarning - -# Submodules imported by default -from . import lowlevel -from . import socket -from . import abc -from . import from_thread -from . import to_thread +# pyright explicitly does not care about `__version__` +# see https://github.com/microsoft/pyright/blob/main/docs/typed-libraries.md#type-completeness +from ._version import __version__ # Not imported by default, but mentioned here so static analysis tools like # pylint will know that it exists. diff --git a/trio/_abc.py b/trio/_abc.py index c085c82b89..a01812dae8 100644 --- a/trio/_abc.py +++ b/trio/_abc.py @@ -1,5 +1,6 @@ from abc import ABCMeta, abstractmethod from typing import Generic, TypeVar + import trio diff --git a/trio/_channel.py b/trio/_channel.py index 3ad08b7109..2bdec5bd09 100644 --- a/trio/_channel.py +++ b/trio/_channel.py @@ -1,25 +1,19 @@ from __future__ import annotations -from collections import deque, OrderedDict +from collections import OrderedDict, deque from math import inf - from types import TracebackType -from typing import ( - Generic, - TypeVar, - TYPE_CHECKING, - Tuple, # only needed for typechecking on <3.9 -) +from typing import Tuple # only needed for typechecking on <3.9 +from typing import TYPE_CHECKING, Generic, TypeVar import attr from outcome import Error, Value -from ._abc import SendChannel, ReceiveChannel, ReceiveType, SendType, T -from ._util import generic_function, NoPublicConstructor - import trio -from ._core import enable_ki_protection, Task, Abort, RaiseCancelT +from ._abc import ReceiveChannel, ReceiveType, SendChannel, SendType, T +from ._core import Abort, RaiseCancelT, Task, enable_ki_protection +from ._util import NoPublicConstructor, generic_function # Temporary TypeVar needed until mypy release supports Self as a type SelfT = TypeVar("SelfT") diff --git a/trio/_core/__init__.py b/trio/_core/__init__.py index f9919b8323..c2991a4048 100644 --- a/trio/_core/__init__.py +++ b/trio/_core/__init__.py @@ -6,83 +6,72 @@ import sys +from ._entry_queue import TrioToken from ._exceptions import ( - TrioInternalError, - RunFinishedError, - WouldBlock, - Cancelled, + BrokenResourceError, BusyResourceError, + Cancelled, ClosedResourceError, - BrokenResourceError, EndOfChannel, + RunFinishedError, + TrioInternalError, + WouldBlock, ) - -from ._ki import ( - enable_ki_protection, - disable_ki_protection, - currently_ki_protected, -) +from ._ki import currently_ki_protected, disable_ki_protection, enable_ki_protection +from ._local import RunVar +from ._mock_clock import MockClock +from ._parking_lot import ParkingLot # Imports that always exist from ._run import ( - Task, + TASK_STATUS_IGNORED, CancelScope, - run, - open_nursery, + Nursery, + Task, + add_instrument, checkpoint, - current_task, - current_effective_deadline, checkpoint_if_cancelled, - TASK_STATUS_IGNORED, + current_clock, + current_effective_deadline, + current_root_task, current_statistics, + current_task, + current_time, current_trio_token, - reschedule, + notify_closing, + open_nursery, remove_instrument, - add_instrument, - current_clock, - current_root_task, + reschedule, + run, spawn_system_task, - current_time, + start_guest_run, wait_all_tasks_blocked, wait_readable, wait_writable, - notify_closing, - Nursery, - start_guest_run, ) +from ._thread_cache import start_thread_soon # Has to come after _run to resolve a circular import from ._traps import ( - cancel_shielded_checkpoint, Abort, RaiseCancelT, - wait_task_rescheduled, - temporarily_detach_coroutine_object, + cancel_shielded_checkpoint, permanently_detach_coroutine_object, reattach_detached_coroutine_object, + temporarily_detach_coroutine_object, + wait_task_rescheduled, ) - -from ._entry_queue import TrioToken - -from ._parking_lot import ParkingLot - from ._unbounded_queue import UnboundedQueue -from ._local import RunVar - -from ._thread_cache import start_thread_soon - -from ._mock_clock import MockClock - # Windows imports if sys.platform == "win32": from ._run import ( - monitor_completion_key, current_iocp, + monitor_completion_key, + readinto_overlapped, register_with_iocp, wait_overlapped, write_overlapped, - readinto_overlapped, ) # Kqueue imports elif sys.platform != "linux" and sys.platform != "win32": diff --git a/trio/_core/_asyncgens.py b/trio/_core/_asyncgens.py index 1eab150488..5f02ebe76d 100644 --- a/trio/_core/_asyncgens.py +++ b/trio/_core/_asyncgens.py @@ -1,12 +1,13 @@ -import attr import logging import sys import warnings import weakref +import attr + +from .. import _core from .._util import name_asyncgen from . import _run -from .. import _core # Used to log exceptions in async generator finalizers ASYNCGEN_LOGGER = logging.getLogger("trio.async_generator_errors") diff --git a/trio/_core/_entry_queue.py b/trio/_core/_entry_queue.py index 9f3301b3d2..878506bb2b 100644 --- a/trio/_core/_entry_queue.py +++ b/trio/_core/_entry_queue.py @@ -1,5 +1,5 @@ -from collections import deque import threading +from collections import deque import attr diff --git a/trio/_core/_generated_instrumentation.py b/trio/_core/_generated_instrumentation.py index 986ab2c7f5..30c2f26b4e 100644 --- a/trio/_core/_generated_instrumentation.py +++ b/trio/_core/_generated_instrumentation.py @@ -1,9 +1,10 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -from ._run import GLOBAL_RUN_CONTEXT, _NO_SEND -from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +# isort: skip from ._instrumentation import Instrument +from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT # fmt: off diff --git a/trio/_core/_generated_io_epoll.py b/trio/_core/_generated_io_epoll.py index 9ae54e4f68..02fb3bc348 100644 --- a/trio/_core/_generated_io_epoll.py +++ b/trio/_core/_generated_io_epoll.py @@ -1,9 +1,10 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -from ._run import GLOBAL_RUN_CONTEXT, _NO_SEND -from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +# isort: skip from ._instrumentation import Instrument +from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT # fmt: off diff --git a/trio/_core/_generated_io_kqueue.py b/trio/_core/_generated_io_kqueue.py index 7549899dbe..94e819769c 100644 --- a/trio/_core/_generated_io_kqueue.py +++ b/trio/_core/_generated_io_kqueue.py @@ -1,9 +1,10 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -from ._run import GLOBAL_RUN_CONTEXT, _NO_SEND -from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +# isort: skip from ._instrumentation import Instrument +from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT # fmt: off diff --git a/trio/_core/_generated_io_windows.py b/trio/_core/_generated_io_windows.py index e6337e94b0..26b4da697d 100644 --- a/trio/_core/_generated_io_windows.py +++ b/trio/_core/_generated_io_windows.py @@ -1,9 +1,10 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -from ._run import GLOBAL_RUN_CONTEXT, _NO_SEND -from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +# isort: skip from ._instrumentation import Instrument +from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT # fmt: off diff --git a/trio/_core/_generated_run.py b/trio/_core/_generated_run.py index d20891c55e..d1e74a93f4 100644 --- a/trio/_core/_generated_run.py +++ b/trio/_core/_generated_run.py @@ -1,9 +1,10 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -from ._run import GLOBAL_RUN_CONTEXT, _NO_SEND -from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +# isort: skip from ._instrumentation import Instrument +from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT # fmt: off diff --git a/trio/_core/_io_common.py b/trio/_core/_io_common.py index 9891849bc9..b141474fda 100644 --- a/trio/_core/_io_common.py +++ b/trio/_core/_io_common.py @@ -1,5 +1,7 @@ import copy + import outcome + from .. import _core diff --git a/trio/_core/_io_epoll.py b/trio/_core/_io_epoll.py index c1537cf53e..376dd18a4e 100644 --- a/trio/_core/_io_epoll.py +++ b/trio/_core/_io_epoll.py @@ -1,12 +1,13 @@ import select import sys -import attr from collections import defaultdict -from typing import Dict, TYPE_CHECKING +from typing import TYPE_CHECKING, Dict + +import attr from .. import _core -from ._run import _public from ._io_common import wake_all +from ._run import _public from ._wakeup_socketpair import WakeupSocketpair assert not TYPE_CHECKING or sys.platform == "linux" diff --git a/trio/_core/_io_kqueue.py b/trio/_core/_io_kqueue.py index 31940d5694..d1151843e8 100644 --- a/trio/_core/_io_kqueue.py +++ b/trio/_core/_io_kqueue.py @@ -1,11 +1,11 @@ +import errno import select import sys +from contextlib import contextmanager from typing import TYPE_CHECKING -import outcome -from contextlib import contextmanager import attr -import errno +import outcome from .. import _core from ._run import _public diff --git a/trio/_core/_io_windows.py b/trio/_core/_io_windows.py index 9b5ebfc268..4084f72b6e 100644 --- a/trio/_core/_io_windows.py +++ b/trio/_core/_io_windows.py @@ -1,31 +1,30 @@ -import itertools -from contextlib import contextmanager import enum +import itertools import socket import sys +from contextlib import contextmanager from typing import TYPE_CHECKING import attr from outcome import Value from .. import _core -from ._run import _public from ._io_common import wake_all - +from ._run import _public from ._windows_cffi import ( - ffi, - kernel32, - ntdll, - ws2_32, INVALID_HANDLE_VALUE, - raise_winerror, - _handle, - ErrorCodes, - FileFlags, AFDPollFlags, - WSAIoctls, CompletionModes, + ErrorCodes, + FileFlags, IoControlCodes, + WSAIoctls, + _handle, + ffi, + kernel32, + ntdll, + raise_winerror, + ws2_32, ) assert not TYPE_CHECKING or sys.platform == "win32" diff --git a/trio/_core/_ki.py b/trio/_core/_ki.py index fec23863f1..cc05ef9177 100644 --- a/trio/_core/_ki.py +++ b/trio/_core/_ki.py @@ -11,7 +11,7 @@ from .._util import is_main_thread if TYPE_CHECKING: - from typing import Any, TypeVar, Callable + from typing import Any, Callable, TypeVar F = TypeVar("F", bound=Callable[..., Any]) diff --git a/trio/_core/_local.py b/trio/_core/_local.py index f898a13cff..a54f424fdf 100644 --- a/trio/_core/_local.py +++ b/trio/_core/_local.py @@ -1,9 +1,8 @@ # Runvar implementations import attr -from . import _run - from .._util import Final +from . import _run @attr.s(eq=False, hash=False, slots=True) diff --git a/trio/_core/_mock_clock.py b/trio/_core/_mock_clock.py index 0e95e4e5c5..0eb76b6356 100644 --- a/trio/_core/_mock_clock.py +++ b/trio/_core/_mock_clock.py @@ -2,9 +2,9 @@ from math import inf from .. import _core -from ._run import GLOBAL_RUN_CONTEXT from .._abc import Clock from .._util import Final +from ._run import GLOBAL_RUN_CONTEXT ################################################################ # The glorious MockClock diff --git a/trio/_core/_parking_lot.py b/trio/_core/_parking_lot.py index f38123540f..69882c787b 100644 --- a/trio/_core/_parking_lot.py +++ b/trio/_core/_parking_lot.py @@ -70,9 +70,10 @@ # # See: https://github.com/python-trio/trio/issues/53 -import attr from collections import OrderedDict +import attr + from .. import _core from .._util import Final diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 2727fe1e89..0b6d326546 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -10,16 +10,14 @@ import threading import warnings from collections import deque -from collections.abc import Callable -from contextlib import contextmanager +from collections.abc import Callable, Iterator +from contextlib import AbstractAsyncContextManager, contextmanager from contextvars import copy_context from heapq import heapify, heappop, heappush from math import inf from time import perf_counter -from typing import TYPE_CHECKING, Any, NoReturn, TypeVar from types import TracebackType -from collections.abc import Iterator -from contextlib import AbstractAsyncContextManager +from typing import TYPE_CHECKING, Any, NoReturn, TypeVar import attr from outcome import Error, Outcome, Value, capture diff --git a/trio/_core/_tests/test_asyncgen.py b/trio/_core/_tests/test_asyncgen.py index 92a267540f..f72d5c6859 100644 --- a/trio/_core/_tests/test_asyncgen.py +++ b/trio/_core/_tests/test_asyncgen.py @@ -1,11 +1,12 @@ +import contextlib import sys import weakref -import pytest -import contextlib from math import inf +import pytest + from ... import _core -from .tutil import gc_collect_harder, buggy_pypy_asyncgens, restore_unraisablehook +from .tutil import buggy_pypy_asyncgens, gc_collect_harder, restore_unraisablehook @pytest.mark.skipif(sys.version_info < (3, 10), reason="no aclosing() in stdlib<3.10") diff --git a/trio/_core/_tests/test_guest_mode.py b/trio/_core/_tests/test_guest_mode.py index 9fed232214..7b004cf04d 100644 --- a/trio/_core/_tests/test_guest_mode.py +++ b/trio/_core/_tests/test_guest_mode.py @@ -1,21 +1,23 @@ -import pytest import asyncio import contextvars -import sys -import traceback import queue -from functools import partial -from math import inf import signal import socket +import sys import threading import time +import traceback import warnings +from functools import partial +from math import inf + +import pytest import trio import trio.testing -from .tutil import gc_collect_harder, buggy_pypy_asyncgens, restore_unraisablehook + from ..._util import signal_raise +from .tutil import buggy_pypy_asyncgens, gc_collect_harder, restore_unraisablehook # The simplest possible "host" loop. diff --git a/trio/_core/_tests/test_instrumentation.py b/trio/_core/_tests/test_instrumentation.py index 57d3461d3b..498a3eb272 100644 --- a/trio/_core/_tests/test_instrumentation.py +++ b/trio/_core/_tests/test_instrumentation.py @@ -1,6 +1,7 @@ import attr import pytest -from ... import _core, _abc + +from ... import _abc, _core from .tutil import check_sequence_matches diff --git a/trio/_core/_tests/test_io.py b/trio/_core/_tests/test_io.py index 106dae4047..21a954941c 100644 --- a/trio/_core/_tests/test_io.py +++ b/trio/_core/_tests/test_io.py @@ -1,13 +1,14 @@ -import pytest - -import socket as stdlib_socket import random +import socket as stdlib_socket from contextlib import suppress -from ... import _core -from ...testing import wait_all_tasks_blocked, assert_checkpoints +import pytest + import trio +from ... import _core +from ...testing import assert_checkpoints, wait_all_tasks_blocked + # Cross-platform tests for IO handling diff --git a/trio/_core/_tests/test_ki.py b/trio/_core/_tests/test_ki.py index c8b549c0a8..fdbada4624 100644 --- a/trio/_core/_tests/test_ki.py +++ b/trio/_core/_tests/test_ki.py @@ -1,19 +1,20 @@ -import outcome -import pytest -import signal -import threading import contextlib import inspect +import signal +import threading + +import outcome +import pytest try: - from async_generator import yield_, async_generator + from async_generator import async_generator, yield_ except ImportError: # pragma: no cover async_generator = yield_ = None from ... import _core -from ...testing import wait_all_tasks_blocked -from ..._util import signal_raise from ..._timeouts import sleep +from ..._util import signal_raise +from ...testing import wait_all_tasks_blocked def ki_self(): diff --git a/trio/_core/_tests/test_mock_clock.py b/trio/_core/_tests/test_mock_clock.py index e5b2373ca5..9c74df3334 100644 --- a/trio/_core/_tests/test_mock_clock.py +++ b/trio/_core/_tests/test_mock_clock.py @@ -1,9 +1,10 @@ -from math import inf import time +from math import inf import pytest from trio import sleep + from ... import _core from .. import wait_all_tasks_blocked from .._mock_clock import MockClock diff --git a/trio/_core/_tests/test_multierror.py b/trio/_core/_tests/test_multierror.py index 354f6e01df..498f4d435b 100644 --- a/trio/_core/_tests/test_multierror.py +++ b/trio/_core/_tests/test_multierror.py @@ -1,23 +1,19 @@ import gc import os -import subprocess -from pathlib import Path import pickle +import re +import subprocess +import sys import warnings +from pathlib import Path +from traceback import extract_tb, print_exception import pytest -from traceback import ( - extract_tb, - print_exception, -) -import sys -import re - -from .tutil import slow -from .._multierror import MultiError, concat_tb, NonBaseMultiError from ... import TrioDeprecationWarning from ..._core import open_nursery +from .._multierror import MultiError, NonBaseMultiError, concat_tb +from .tutil import slow if sys.version_info < (3, 11): from exceptiongroup import ExceptionGroup diff --git a/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py b/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py index b5b2e16c8e..3e1d23ca8e 100644 --- a/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py +++ b/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py @@ -1,10 +1,10 @@ -import _common - # The apport_python_hook package is only installed as part of Ubuntu's system # python, and not available in venvs. So before we can import it we have to # make sure it's on sys.path. import sys +import _common + sys.path.append("/usr/lib/python3/dist-packages") import apport_python_hook diff --git a/trio/_core/_tests/test_multierror_scripts/ipython_custom_exc.py b/trio/_core/_tests/test_multierror_scripts/ipython_custom_exc.py index b3fd110e50..80e42b6a2c 100644 --- a/trio/_core/_tests/test_multierror_scripts/ipython_custom_exc.py +++ b/trio/_core/_tests/test_multierror_scripts/ipython_custom_exc.py @@ -1,10 +1,10 @@ -import _common - # Override the regular excepthook too -- it doesn't change anything either way # because ipython doesn't use it, but we want to make sure Trio doesn't warn # about it. import sys +import _common + def custom_excepthook(*args): print("custom running!") diff --git a/trio/_core/_tests/test_multierror_scripts/simple_excepthook_IPython.py b/trio/_core/_tests/test_multierror_scripts/simple_excepthook_IPython.py index 6aa12493b0..51a88c96ce 100644 --- a/trio/_core/_tests/test_multierror_scripts/simple_excepthook_IPython.py +++ b/trio/_core/_tests/test_multierror_scripts/simple_excepthook_IPython.py @@ -3,5 +3,4 @@ # To tickle the "is IPython loaded?" logic, make sure that Trio tolerates # IPython loaded but not actually in use import IPython - import simple_excepthook diff --git a/trio/_core/_tests/test_run.py b/trio/_core/_tests/test_run.py index 249637e5fe..81c3b73cc4 100644 --- a/trio/_core/_tests/test_run.py +++ b/trio/_core/_tests/test_run.py @@ -1,5 +1,6 @@ import contextvars import functools +import gc import sys import threading import time @@ -7,31 +8,25 @@ import weakref from contextlib import ExitStack from math import inf -import gc import outcome -import sniffio import pytest +import sniffio +from ... import _core +from ..._core._multierror import MultiError, NonBaseMultiError +from ..._threads import to_thread_run_sync +from ..._timeouts import fail_after, sleep +from ...testing import Sequencer, assert_checkpoints, wait_all_tasks_blocked +from .._run import DEADLINE_HEAP_MIN_PRUNE_THRESHOLD from .tutil import ( - slow, + buggy_pypy_asyncgens, check_sequence_matches, + create_asyncio_future_in_new_loop, gc_collect_harder, ignore_coroutine_never_awaited_warnings, - buggy_pypy_asyncgens, restore_unraisablehook, - create_asyncio_future_in_new_loop, -) - -from ... import _core -from ..._core._multierror import MultiError, NonBaseMultiError -from .._run import DEADLINE_HEAP_MIN_PRUNE_THRESHOLD -from ..._threads import to_thread_run_sync -from ..._timeouts import sleep, fail_after -from ...testing import ( - wait_all_tasks_blocked, - Sequencer, - assert_checkpoints, + slow, ) if sys.version_info < (3, 11): diff --git a/trio/_core/_tests/test_thread_cache.py b/trio/_core/_tests/test_thread_cache.py index d7b50cfc51..de78443f4e 100644 --- a/trio/_core/_tests/test_thread_cache.py +++ b/trio/_core/_tests/test_thread_cache.py @@ -1,12 +1,13 @@ -import pytest import threading -from queue import Queue import time from contextlib import contextmanager +from queue import Queue + +import pytest -from .tutil import slow, gc_collect_harder from .. import _thread_cache -from .._thread_cache import start_thread_soon, ThreadCache +from .._thread_cache import ThreadCache, start_thread_soon +from .tutil import gc_collect_harder, slow def test_thread_cache_basics(): diff --git a/trio/_core/_tests/test_windows.py b/trio/_core/_tests/test_windows.py index c335076ca4..0dac94543c 100644 --- a/trio/_core/_tests/test_windows.py +++ b/trio/_core/_tests/test_windows.py @@ -8,17 +8,17 @@ # Mark all the tests in this file as being windows-only pytestmark = pytest.mark.skipif(not on_windows, reason="windows only") -from .tutil import slow, gc_collect_harder, restore_unraisablehook from ... import _core, sleep from ...testing import wait_all_tasks_blocked +from .tutil import gc_collect_harder, restore_unraisablehook, slow if on_windows: from .._windows_cffi import ( + INVALID_HANDLE_VALUE, + FileFlags, ffi, kernel32, - INVALID_HANDLE_VALUE, raise_winerror, - FileFlags, ) @@ -99,8 +99,8 @@ async def read_region(start, end): @contextmanager def pipe_with_overlapped_read(): - from asyncio.windows_utils import pipe import msvcrt + from asyncio.windows_utils import pipe read_handle, write_handle = pipe(overlapped=(True, False)) try: @@ -175,8 +175,8 @@ async def test_too_late_to_cancel(): def test_lsp_that_hooks_select_gives_good_error(monkeypatch): - from .._windows_cffi import WSAIoctls, _handle from .. import _io_windows + from .._windows_cffi import WSAIoctls, _handle def patched_get_underlying(sock, *, which=WSAIoctls.SIO_BASE_HANDLE): if hasattr(sock, "fileno"): # pragma: no branch @@ -199,8 +199,8 @@ def test_lsp_that_completely_hides_base_socket_gives_good_error(monkeypatch): # self for SIO_BSP_HANDLE_POLL. No known LSP does this, but we want to # make sure we get an error rather than an infinite loop. - from .._windows_cffi import WSAIoctls, _handle from .. import _io_windows + from .._windows_cffi import WSAIoctls, _handle def patched_get_underlying(sock, *, which=WSAIoctls.SIO_BASE_HANDLE): if hasattr(sock, "fileno"): # pragma: no branch diff --git a/trio/_core/_tests/tutil.py b/trio/_core/_tests/tutil.py index dc9a4f486d..f3a21364be 100644 --- a/trio/_core/_tests/tutil.py +++ b/trio/_core/_tests/tutil.py @@ -1,16 +1,15 @@ # Utilities for testing import asyncio -import socket as stdlib_socket -import threading +import gc import os +import socket as stdlib_socket import sys +import threading +import warnings +from contextlib import closing, contextmanager from typing import TYPE_CHECKING import pytest -import warnings -from contextlib import contextmanager, closing - -import gc # See trio/_tests/conftest.py for the other half of this from trio._tests.pytest_plugin import RUN_SLOW diff --git a/trio/_core/_thread_cache.py b/trio/_core/_thread_cache.py index a36181ee36..e570e8dead 100644 --- a/trio/_core/_thread_cache.py +++ b/trio/_core/_thread_cache.py @@ -1,13 +1,13 @@ -import sys -import traceback -from threading import Thread, Lock -import outcome import ctypes import ctypes.util +import sys +import traceback +from functools import partial from itertools import count - +from threading import Lock, Thread from typing import Callable, Optional, Tuple -from functools import partial + +import outcome def _to_os_thread_name(name: str) -> bytes: diff --git a/trio/_core/_traps.py b/trio/_core/_traps.py index aedf839a8d..08a8ceac01 100644 --- a/trio/_core/_traps.py +++ b/trio/_core/_traps.py @@ -1,15 +1,14 @@ # These are the only functions that ever yield back to the task runner. -import types import enum +import types +from typing import Any, Callable, NoReturn import attr import outcome from . import _run -from typing import Callable, NoReturn, Any - # Helper for the bottommost 'yield'. You can't use 'yield' inside an async # function, but you can inside a generator, and if you decorate your generator diff --git a/trio/_core/_wakeup_socketpair.py b/trio/_core/_wakeup_socketpair.py index c084403eaa..51a80ef024 100644 --- a/trio/_core/_wakeup_socketpair.py +++ b/trio/_core/_wakeup_socketpair.py @@ -1,5 +1,5 @@ -import socket import signal +import socket import warnings from .. import _core diff --git a/trio/_core/_windows_cffi.py b/trio/_core/_windows_cffi.py index a1071519e9..639e75b50e 100644 --- a/trio/_core/_windows_cffi.py +++ b/trio/_core/_windows_cffi.py @@ -1,6 +1,7 @@ -import cffi -import re import enum +import re + +import cffi ################################################################ # Functions and types diff --git a/trio/_deprecate.py b/trio/_deprecate.py index 7641baefd3..fe00192583 100644 --- a/trio/_deprecate.py +++ b/trio/_deprecate.py @@ -1,7 +1,7 @@ import sys +import warnings from functools import wraps from types import ModuleType -import warnings import attr diff --git a/trio/_dtls.py b/trio/_dtls.py index 910637455a..f46fc4fda0 100644 --- a/trio/_dtls.py +++ b/trio/_dtls.py @@ -6,19 +6,19 @@ # Hopefully they fix this before implementing DTLS 1.3, because it's a very different # protocol, and it's probably impossible to pull tricks like we do here. -import struct -import hmac -import os import enum -from itertools import count -import weakref import errno +import hmac +import os +import struct import warnings +import weakref +from itertools import count import attr import trio -from trio._util import NoPublicConstructor, Final +from trio._util import Final, NoPublicConstructor MAX_UDP_PACKET_SIZE = 65527 diff --git a/trio/_file_io.py b/trio/_file_io.py index 8c8425c775..9f7d81adef 100644 --- a/trio/_file_io.py +++ b/trio/_file_io.py @@ -1,11 +1,11 @@ -from functools import partial import io - -from .abc import AsyncResource -from ._util import async_wraps +from functools import partial import trio +from ._util import async_wraps +from .abc import AsyncResource + # This list is also in the docs, make sure to keep them in sync _FILE_SYNC_ATTRS = { "closed", diff --git a/trio/_highlevel_generic.py b/trio/_highlevel_generic.py index c31b4fdbf3..2ae381c8e2 100644 --- a/trio/_highlevel_generic.py +++ b/trio/_highlevel_generic.py @@ -1,10 +1,10 @@ import attr import trio -from .abc import HalfCloseableStream - from trio._util import Final +from .abc import HalfCloseableStream + async def aclose_forcefully(resource): """Close an async resource or async generator immediately, without diff --git a/trio/_highlevel_open_tcp_listeners.py b/trio/_highlevel_open_tcp_listeners.py index 2028d30766..6211917254 100644 --- a/trio/_highlevel_open_tcp_listeners.py +++ b/trio/_highlevel_open_tcp_listeners.py @@ -3,6 +3,7 @@ from math import inf import trio + from . import socket as tsocket if sys.version_info < (3, 11): diff --git a/trio/_highlevel_open_tcp_stream.py b/trio/_highlevel_open_tcp_stream.py index 0fcffbcb06..a2477104d9 100644 --- a/trio/_highlevel_open_tcp_stream.py +++ b/trio/_highlevel_open_tcp_stream.py @@ -3,7 +3,7 @@ import trio from trio._core._multierror import MultiError -from trio.socket import getaddrinfo, SOCK_STREAM, socket +from trio.socket import SOCK_STREAM, getaddrinfo, socket if sys.version_info < (3, 11): from exceptiongroup import ExceptionGroup diff --git a/trio/_highlevel_open_unix_stream.py b/trio/_highlevel_open_unix_stream.py index e5aba4695f..c2c3a3ca7c 100644 --- a/trio/_highlevel_open_unix_stream.py +++ b/trio/_highlevel_open_unix_stream.py @@ -2,7 +2,7 @@ from contextlib import contextmanager import trio -from trio.socket import socket, SOCK_STREAM +from trio.socket import SOCK_STREAM, socket try: from trio.socket import AF_UNIX diff --git a/trio/_highlevel_socket.py b/trio/_highlevel_socket.py index 1e8dc16ebc..ce23de17d7 100644 --- a/trio/_highlevel_socket.py +++ b/trio/_highlevel_socket.py @@ -4,6 +4,7 @@ from contextlib import contextmanager import trio + from . import socket as tsocket from ._util import ConflictDetector, Final from .abc import HalfCloseableStream, Listener diff --git a/trio/_highlevel_ssl_helpers.py b/trio/_highlevel_ssl_helpers.py index 19b1ff8777..ad77a302f0 100644 --- a/trio/_highlevel_ssl_helpers.py +++ b/trio/_highlevel_ssl_helpers.py @@ -1,6 +1,7 @@ -import trio import ssl +import trio + from ._highlevel_open_tcp_stream import DEFAULT_DELAY diff --git a/trio/_signals.py b/trio/_signals.py index cee3b7db53..fe2bde946e 100644 --- a/trio/_signals.py +++ b/trio/_signals.py @@ -1,9 +1,10 @@ import signal -from contextlib import contextmanager from collections import OrderedDict +from contextlib import contextmanager import trio -from ._util import signal_raise, is_main_thread, ConflictDetector + +from ._util import ConflictDetector, is_main_thread, signal_raise # Discussion of signal handling strategies: # diff --git a/trio/_socket.py b/trio/_socket.py index 2889f48113..b4ee4a7199 100644 --- a/trio/_socket.py +++ b/trio/_socket.py @@ -1,13 +1,14 @@ import os -import sys import select import socket as _stdlib_socket +import sys from functools import wraps as _wraps from typing import TYPE_CHECKING import idna as _idna import trio + from . import _core @@ -294,7 +295,7 @@ def _sniff_sockopts_for_fileno(family, type, proto, fileno): # and then we'll throw it away and construct a new one with the correct metadata. if sys.platform != "linux": return family, type, proto - from socket import SO_DOMAIN, SO_PROTOCOL, SOL_SOCKET, SO_TYPE + from socket import SO_DOMAIN, SO_PROTOCOL, SO_TYPE, SOL_SOCKET sockobj = _stdlib_socket.socket(family, type, proto, fileno=fileno) try: diff --git a/trio/_ssl.py b/trio/_ssl.py index 8f005c2c9a..bd8b3b06b6 100644 --- a/trio/_ssl.py +++ b/trio/_ssl.py @@ -155,10 +155,10 @@ import trio -from .abc import Stream, Listener -from ._highlevel_generic import aclose_forcefully from . import _sync +from ._highlevel_generic import aclose_forcefully from ._util import ConflictDetector, Final +from .abc import Listener, Stream ################################################################ # SSLStream diff --git a/trio/_subprocess.py b/trio/_subprocess.py index 34eeb22dbb..1f8d0a8253 100644 --- a/trio/_subprocess.py +++ b/trio/_subprocess.py @@ -1,24 +1,24 @@ import os import subprocess import sys +import warnings from contextlib import ExitStack -from typing import Optional from functools import partial -import warnings -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Optional + +import trio -from ._abc import AsyncResource, SendStream, ReceiveStream +from ._abc import AsyncResource, ReceiveStream, SendStream from ._core import ClosedResourceError +from ._deprecate import deprecated from ._highlevel_generic import StapledStream -from ._sync import Lock from ._subprocess_platform import ( - wait_child_exiting, - create_pipe_to_child_stdin, create_pipe_from_child_output, + create_pipe_to_child_stdin, + wait_child_exiting, ) -from ._deprecate import deprecated +from ._sync import Lock from ._util import NoPublicConstructor -import trio # Linux-specific, but has complex lifetime management stuff so we hard-code it # here instead of hiding it behind the _subprocess_platform abstraction diff --git a/trio/_subprocess_platform/__init__.py b/trio/_subprocess_platform/__init__.py index 17444b8473..b6767af8f5 100644 --- a/trio/_subprocess_platform/__init__.py +++ b/trio/_subprocess_platform/__init__.py @@ -2,12 +2,12 @@ import os import sys -from typing import Optional, Tuple, TYPE_CHECKING +from typing import TYPE_CHECKING, Optional, Tuple import trio -from .. import _core, _subprocess -from .._abc import SendStream, ReceiveStream +from .. import _core, _subprocess +from .._abc import ReceiveStream, SendStream _wait_child_exiting_error: Optional[ImportError] = None _create_child_pipe_error: Optional[ImportError] = None @@ -95,7 +95,7 @@ def create_pipe_from_child_output(): # noqa: F811 return trio.lowlevel.FdStream(rfd), wfd elif os.name == "nt": - from .._windows_pipes import PipeSendStream, PipeReceiveStream + import msvcrt # This isn't exported or documented, but it's also not # underscore-prefixed, and seems kosher to use. The asyncio docs @@ -104,7 +104,8 @@ def create_pipe_from_child_output(): # noqa: F811 # when asyncio.windows_utils.socketpair was removed in 3.7, the # removal was mentioned in the release notes. from asyncio.windows_utils import pipe as windows_pipe - import msvcrt + + from .._windows_pipes import PipeReceiveStream, PipeSendStream def create_pipe_to_child_stdin(): # noqa: F811 # for stdin, we want the write end (our end) to use overlapped I/O diff --git a/trio/_subprocess_platform/kqueue.py b/trio/_subprocess_platform/kqueue.py index 412ccf8732..9839fd046b 100644 --- a/trio/_subprocess_platform/kqueue.py +++ b/trio/_subprocess_platform/kqueue.py @@ -1,6 +1,7 @@ -import sys import select +import sys from typing import TYPE_CHECKING + from .. import _core, _subprocess assert (sys.platform != "win32" and sys.platform != "linux") or not TYPE_CHECKING diff --git a/trio/_sync.py b/trio/_sync.py index 8d2fdc0a2d..60d7074d9e 100644 --- a/trio/_sync.py +++ b/trio/_sync.py @@ -5,7 +5,7 @@ import trio from . import _core -from ._core import enable_ki_protection, ParkingLot +from ._core import ParkingLot, enable_ki_protection from ._util import Final diff --git a/trio/_tests/check_type_completeness.py b/trio/_tests/check_type_completeness.py index 15b2da2d94..6d0f43b6b0 100755 --- a/trio/_tests/check_type_completeness.py +++ b/trio/_tests/check_type_completeness.py @@ -1,10 +1,10 @@ #!/usr/bin/env python3 # this file is not run as part of the tests, instead it's run standalone from check.sh -import subprocess +import argparse import json -from pathlib import Path +import subprocess import sys -import argparse +from pathlib import Path # the result file is not marked in MANIFEST.in so it's not included in the package RESULT_FILE = Path(__file__).parent / "verify_types.json" diff --git a/trio/_tests/pytest_plugin.py b/trio/_tests/pytest_plugin.py index a893b466cb..c6d73e25ea 100644 --- a/trio/_tests/pytest_plugin.py +++ b/trio/_tests/pytest_plugin.py @@ -1,7 +1,8 @@ -import pytest import inspect -from ..testing import trio_test, MockClock +import pytest + +from ..testing import MockClock, trio_test RUN_SLOW = True diff --git a/trio/_tests/test_abc.py b/trio/_tests/test_abc.py index 9db53b891a..2b0b7088b0 100644 --- a/trio/_tests/test_abc.py +++ b/trio/_tests/test_abc.py @@ -1,6 +1,5 @@ -import pytest - import attr +import pytest from .. import abc as tabc diff --git a/trio/_tests/test_channel.py b/trio/_tests/test_channel.py index aabb368799..4478c523f5 100644 --- a/trio/_tests/test_channel.py +++ b/trio/_tests/test_channel.py @@ -1,8 +1,9 @@ import pytest -from ..testing import wait_all_tasks_blocked, assert_checkpoints import trio -from trio import open_memory_channel, EndOfChannel +from trio import EndOfChannel, open_memory_channel + +from ..testing import assert_checkpoints, wait_all_tasks_blocked async def test_channel(): diff --git a/trio/_tests/test_deprecate.py b/trio/_tests/test_deprecate.py index 856ff20673..33c05ffd25 100644 --- a/trio/_tests/test_deprecate.py +++ b/trio/_tests/test_deprecate.py @@ -1,15 +1,14 @@ -import pytest - import inspect import warnings +import pytest + from .._deprecate import ( TrioDeprecationWarning, - warn_deprecated, deprecated, deprecated_alias, + warn_deprecated, ) - from . import module_with_deprecations diff --git a/trio/_tests/test_dtls.py b/trio/_tests/test_dtls.py index 445ea4d1fd..b8c32c6d5f 100644 --- a/trio/_tests/test_dtls.py +++ b/trio/_tests/test_dtls.py @@ -1,17 +1,18 @@ -import pytest -import trio -import trio.testing -from trio import DTLSEndpoint import random -import attr from contextlib import asynccontextmanager from itertools import count +import attr +import pytest import trustme from OpenSSL import SSL +import trio +import trio.testing +from trio import DTLSEndpoint from trio.testing._fake_net import FakeNet -from .._core._tests.tutil import slow, binds_ipv6, gc_collect_harder + +from .._core._tests.tutil import binds_ipv6, gc_collect_harder, slow ca = trustme.CA() server_cert = ca.issue_cert("example.com") @@ -335,13 +336,13 @@ async def test_server_socket_doesnt_crash_on_garbage(autojump_clock): fn.enable() from trio._dtls import ( - Record, - encode_record, - HandshakeFragment, - encode_handshake_fragment, ContentType, + HandshakeFragment, HandshakeType, ProtocolVersion, + Record, + encode_handshake_fragment, + encode_record, ) client_hello = encode_record( @@ -446,7 +447,7 @@ async def test_invalid_cookie_rejected(autojump_clock): fn = FakeNet() fn.enable() - from trio._dtls import decode_client_hello_untrusted, BadPacket + from trio._dtls import BadPacket, decode_client_hello_untrusted with trio.CancelScope() as cscope: # the first 11 bytes of ClientHello aren't protected by the cookie, so only test diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index 77b390f22f..3ab0016386 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -175,6 +175,14 @@ def no_underscores(symbols): if modname == "trio": static_names.add("testing") + # these are hidden behind `if sys.plaftorm != "win32" or not TYPE_CHECKING` + # so presumably pyright is parsing that if statement, in which case we don't + # care about them being missing. + if modname == "trio.socket" and sys.platform == "win32": + ignored_missing_names = {"if_indextoname", "if_nameindex", "if_nametoindex"} + assert static_names.isdisjoint(ignored_missing_names) + static_names.update(ignored_missing_names) + else: # pragma: no cover assert False diff --git a/trio/_tests/test_file_io.py b/trio/_tests/test_file_io.py index dcbd1a63bb..e99788efc5 100644 --- a/trio/_tests/test_file_io.py +++ b/trio/_tests/test_file_io.py @@ -1,13 +1,13 @@ import io import os - -import pytest from unittest import mock from unittest.mock import sentinel +import pytest + import trio from trio import _core -from trio._file_io import AsyncIOWrapper, _FILE_SYNC_ATTRS, _FILE_ASYNC_METHODS +from trio._file_io import _FILE_ASYNC_METHODS, _FILE_SYNC_ATTRS, AsyncIOWrapper @pytest.fixture diff --git a/trio/_tests/test_highlevel_generic.py b/trio/_tests/test_highlevel_generic.py index df2b2cecf7..38bcedee25 100644 --- a/trio/_tests/test_highlevel_generic.py +++ b/trio/_tests/test_highlevel_generic.py @@ -1,9 +1,8 @@ -import pytest - import attr +import pytest -from ..abc import SendStream, ReceiveStream from .._highlevel_generic import StapledStream +from ..abc import ReceiveStream, SendStream @attr.s diff --git a/trio/_tests/test_highlevel_open_tcp_listeners.py b/trio/_tests/test_highlevel_open_tcp_listeners.py index 9d0a3aeedc..e58cbd13cc 100644 --- a/trio/_tests/test_highlevel_open_tcp_listeners.py +++ b/trio/_tests/test_highlevel_open_tcp_listeners.py @@ -1,15 +1,14 @@ -import sys - -import pytest - -import socket as stdlib_socket import errno +import socket as stdlib_socket +import sys import attr +import pytest import trio -from trio import open_tcp_listeners, serve_tcp, SocketListener, open_tcp_stream +from trio import SocketListener, open_tcp_listeners, open_tcp_stream, serve_tcp from trio.testing import open_stream_to_socket_listener + from .. import socket as tsocket from .._core._tests.tutil import binds_ipv6 diff --git a/trio/_tests/test_highlevel_open_tcp_stream.py b/trio/_tests/test_highlevel_open_tcp_stream.py index 35ddd3e118..24f82bddd5 100644 --- a/trio/_tests/test_highlevel_open_tcp_stream.py +++ b/trio/_tests/test_highlevel_open_tcp_stream.py @@ -1,17 +1,17 @@ -import pytest -import sys import socket +import sys import attr +import pytest import trio -from trio.socket import AF_INET, AF_INET6, SOCK_STREAM, IPPROTO_TCP from trio._highlevel_open_tcp_stream import ( - reorder_for_rfc_6555_section_5_4, close_all, - open_tcp_stream, format_host_port, + open_tcp_stream, + reorder_for_rfc_6555_section_5_4, ) +from trio.socket import AF_INET, AF_INET6, IPPROTO_TCP, SOCK_STREAM if sys.version_info < (3, 11): from exceptiongroup import BaseExceptionGroup diff --git a/trio/_tests/test_highlevel_open_unix_stream.py b/trio/_tests/test_highlevel_open_unix_stream.py index 211aff3e70..64a15f9e9d 100644 --- a/trio/_tests/test_highlevel_open_unix_stream.py +++ b/trio/_tests/test_highlevel_open_unix_stream.py @@ -4,7 +4,7 @@ import pytest -from trio import open_unix_socket, Path +from trio import Path, open_unix_socket from trio._highlevel_open_unix_stream import close_on_error if not hasattr(socket, "AF_UNIX"): diff --git a/trio/_tests/test_highlevel_serve_listeners.py b/trio/_tests/test_highlevel_serve_listeners.py index b028092eb9..4385263899 100644 --- a/trio/_tests/test_highlevel_serve_listeners.py +++ b/trio/_tests/test_highlevel_serve_listeners.py @@ -1,9 +1,8 @@ -import pytest - -from functools import partial import errno +from functools import partial import attr +import pytest import trio from trio.testing import memory_stream_pair, wait_all_tasks_blocked diff --git a/trio/_tests/test_highlevel_socket.py b/trio/_tests/test_highlevel_socket.py index 9dcb834d2c..14143affe2 100644 --- a/trio/_tests/test_highlevel_socket.py +++ b/trio/_tests/test_highlevel_socket.py @@ -1,17 +1,16 @@ -import pytest - -import sys -import socket as stdlib_socket import errno +import socket as stdlib_socket +import sys -from .. import _core +import pytest + +from .. import _core, socket as tsocket +from .._highlevel_socket import * from ..testing import ( + assert_checkpoints, check_half_closeable_stream, wait_all_tasks_blocked, - assert_checkpoints, ) -from .._highlevel_socket import * -from .. import socket as tsocket async def test_SocketStream_basics(): diff --git a/trio/_tests/test_highlevel_ssl_helpers.py b/trio/_tests/test_highlevel_ssl_helpers.py index d8a2bb74d4..f6eda0b578 100644 --- a/trio/_tests/test_highlevel_ssl_helpers.py +++ b/trio/_tests/test_highlevel_ssl_helpers.py @@ -1,22 +1,21 @@ -import pytest - from functools import partial import attr +import pytest import trio -from trio.socket import AF_INET, SOCK_STREAM, IPPROTO_TCP import trio.testing - -# noqa is needed because flake8 doesn't understand how pytest fixtures work. -from .test_ssl import client_ctx, SERVER_CTX # noqa: F401 +from trio.socket import AF_INET, IPPROTO_TCP, SOCK_STREAM from .._highlevel_ssl_helpers import ( - open_ssl_over_tcp_stream, open_ssl_over_tcp_listeners, + open_ssl_over_tcp_stream, serve_ssl_over_tcp, ) +# noqa is needed because flake8 doesn't understand how pytest fixtures work. +from .test_ssl import SERVER_CTX, client_ctx # noqa: F401 + async def echo_handler(stream): async with stream: diff --git a/trio/_tests/test_path.py b/trio/_tests/test_path.py index b4345e4d55..bfef1aaf2c 100644 --- a/trio/_tests/test_path.py +++ b/trio/_tests/test_path.py @@ -4,8 +4,8 @@ import pytest import trio -from trio._path import AsyncAutoWrapperType as Type from trio._file_io import AsyncIOWrapper +from trio._path import AsyncAutoWrapperType as Type @pytest.fixture diff --git a/trio/_tests/test_signals.py b/trio/_tests/test_signals.py index 235772f900..313cce259f 100644 --- a/trio/_tests/test_signals.py +++ b/trio/_tests/test_signals.py @@ -3,9 +3,10 @@ import pytest import trio + from .. import _core +from .._signals import _signal_handler, open_signal_receiver from .._util import signal_raise -from .._signals import open_signal_receiver, _signal_handler async def test_open_signal_receiver(): diff --git a/trio/_tests/test_socket.py b/trio/_tests/test_socket.py index 1ed612924d..e559b98240 100644 --- a/trio/_tests/test_socket.py +++ b/trio/_tests/test_socket.py @@ -1,16 +1,15 @@ import errno - -import pytest -import attr - +import inspect import os import socket as stdlib_socket -import inspect -import tempfile import sys as _sys -from .._core._tests.tutil import creates_ipv6, binds_ipv6 -from .. import _core -from .. import socket as tsocket +import tempfile + +import attr +import pytest + +from .. import _core, socket as tsocket +from .._core._tests.tutil import binds_ipv6, creates_ipv6 from .._socket import _NUMERIC_ONLY, _try_sync from ..testing import assert_checkpoints, wait_all_tasks_blocked diff --git a/trio/_tests/test_ssl.py b/trio/_tests/test_ssl.py index 234290e185..2534c81260 100644 --- a/trio/_tests/test_ssl.py +++ b/trio/_tests/test_ssl.py @@ -1,38 +1,34 @@ from __future__ import annotations import os -import sys -from typing import TYPE_CHECKING - -import pytest - -import threading import socket as stdlib_socket import ssl +import sys +import threading from contextlib import asynccontextmanager, contextmanager from functools import partial +from typing import TYPE_CHECKING -from OpenSSL import SSL +import pytest import trustme +from OpenSSL import SSL import trio -from .. import _core -from .._highlevel_socket import SocketStream, SocketListener + +from .. import _core, socket as tsocket +from .._core import BrokenResourceError, ClosedResourceError +from .._core._tests.tutil import slow from .._highlevel_generic import aclose_forcefully -from .._core import ClosedResourceError, BrokenResourceError from .._highlevel_open_tcp_stream import open_tcp_stream -from .. import socket as tsocket -from .._ssl import SSLStream, SSLListener, NeedHandshakeError, _is_eof +from .._highlevel_socket import SocketListener, SocketStream +from .._ssl import NeedHandshakeError, SSLListener, SSLStream, _is_eof from .._util import ConflictDetector - -from .._core._tests.tutil import slow - from ..testing import ( - assert_checkpoints, Sequencer, - memory_stream_pair, - lockstep_stream_pair, + assert_checkpoints, check_two_way_stream, + lockstep_stream_pair, + memory_stream_pair, ) if TYPE_CHECKING: diff --git a/trio/_tests/test_sync.py b/trio/_tests/test_sync.py index fa903f3492..7de42b86f9 100644 --- a/trio/_tests/test_sync.py +++ b/trio/_tests/test_sync.py @@ -1,12 +1,11 @@ -import pytest - import weakref -from ..testing import wait_all_tasks_blocked, assert_checkpoints +import pytest from .. import _core -from .._timeouts import sleep_forever from .._sync import * +from .._timeouts import sleep_forever +from ..testing import assert_checkpoints, wait_all_tasks_blocked async def test_Event(): @@ -400,8 +399,8 @@ async def waiter(i): assert c.locked() -from .._sync import AsyncContextManagerMixin from .._channel import open_memory_channel +from .._sync import AsyncContextManagerMixin # Three ways of implementing a Lock in terms of a channel. Used to let us put # the channel through the generic lock tests. diff --git a/trio/_tests/test_testing.py b/trio/_tests/test_testing.py index a9cc00684d..3b5a57d3ec 100644 --- a/trio/_tests/test_testing.py +++ b/trio/_tests/test_testing.py @@ -4,15 +4,13 @@ import pytest +from .. import _core, sleep, socket as tsocket from .._core._tests.tutil import can_bind_ipv6 -from .. import sleep -from .. import _core from .._highlevel_generic import aclose_forcefully +from .._highlevel_socket import SocketListener from ..testing import * from ..testing._check_streams import _assert_raises from ..testing._memory_streams import _UnboundedByteQueue -from .. import socket as tsocket -from .._highlevel_socket import SocketListener async def test_wait_all_tasks_blocked(): diff --git a/trio/_tests/test_timeouts.py b/trio/_tests/test_timeouts.py index f55e697d6f..9507d88a78 100644 --- a/trio/_tests/test_timeouts.py +++ b/trio/_tests/test_timeouts.py @@ -1,11 +1,12 @@ +import time + import outcome import pytest -import time -from .._core._tests.tutil import slow from .. import _core -from ..testing import assert_checkpoints +from .._core._tests.tutil import slow from .._timeouts import * +from ..testing import assert_checkpoints async def check_takes_about(f, expected_dur): diff --git a/trio/_tests/test_unix_pipes.py b/trio/_tests/test_unix_pipes.py index ce6a4f80a0..acee75aafb 100644 --- a/trio/_tests/test_unix_pipes.py +++ b/trio/_tests/test_unix_pipes.py @@ -1,13 +1,13 @@ import errno -import select import os +import select import sys import pytest -from .._core._tests.tutil import gc_collect_harder, skip_if_fbsd_pipes_broken from .. import _core -from ..testing import wait_all_tasks_blocked, check_one_way_stream +from .._core._tests.tutil import gc_collect_harder, skip_if_fbsd_pipes_broken +from ..testing import check_one_way_stream, wait_all_tasks_blocked posix = os.name == "posix" pytestmark = pytest.mark.skipif(not posix, reason="posix only") diff --git a/trio/_tests/test_util.py b/trio/_tests/test_util.py index 9f89a68efe..a4df6d35b4 100644 --- a/trio/_tests/test_util.py +++ b/trio/_tests/test_util.py @@ -4,19 +4,20 @@ import pytest import trio + from .. import _core from .._core._tests.tutil import ( - ignore_coroutine_never_awaited_warnings, create_asyncio_future_in_new_loop, + ignore_coroutine_never_awaited_warnings, ) from .._util import ( - signal_raise, ConflictDetector, - is_main_thread, - coroutine_or_error, - generic_function, Final, NoPublicConstructor, + coroutine_or_error, + generic_function, + is_main_thread, + signal_raise, ) from ..testing import wait_all_tasks_blocked diff --git a/trio/_tests/test_wait_for_object.py b/trio/_tests/test_wait_for_object.py index 54291444a0..ea16684289 100644 --- a/trio/_tests/test_wait_for_object.py +++ b/trio/_tests/test_wait_for_object.py @@ -6,17 +6,14 @@ # Mark all the tests in this file as being windows-only pytestmark = pytest.mark.skipif(not on_windows, reason="windows only") -from .._core._tests.tutil import slow import trio -from .. import _core -from .. import _timeouts + +from .. import _core, _timeouts +from .._core._tests.tutil import slow if on_windows: from .._core._windows_cffi import ffi, kernel32 - from .._wait_for_object import ( - WaitForSingleObject, - WaitForMultipleObjects_sync, - ) + from .._wait_for_object import WaitForMultipleObjects_sync, WaitForSingleObject async def test_WaitForMultipleObjects_sync(): diff --git a/trio/_tests/test_windows_pipes.py b/trio/_tests/test_windows_pipes.py index 4837d24ba9..5c4bae7d25 100644 --- a/trio/_tests/test_windows_pipes.py +++ b/trio/_tests/test_windows_pipes.py @@ -1,16 +1,16 @@ import sys -from typing import Any -from typing import Tuple +from typing import Any, Tuple import pytest from .. import _core -from ..testing import wait_all_tasks_blocked, check_one_way_stream +from ..testing import check_one_way_stream, wait_all_tasks_blocked if sys.platform == "win32": - from .._windows_pipes import PipeSendStream, PipeReceiveStream - from .._core._windows_cffi import _handle, kernel32 from asyncio.windows_utils import pipe + + from .._core._windows_cffi import _handle, kernel32 + from .._windows_pipes import PipeReceiveStream, PipeSendStream else: pytestmark = pytest.mark.skip(reason="windows only") pipe: Any = None diff --git a/trio/_tests/tools/test_gen_exports.py b/trio/_tests/tools/test_gen_exports.py index 55495c920d..9436105fa4 100644 --- a/trio/_tests/tools/test_gen_exports.py +++ b/trio/_tests/tools/test_gen_exports.py @@ -1,11 +1,8 @@ import ast + import pytest -from trio._tools.gen_exports import ( - get_public_methods, - create_passthrough_args, - process, -) +from trio._tools.gen_exports import create_passthrough_args, get_public_methods, process SOURCE = '''from _run import _public diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index e54af12444..812aa10ca5 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -51,45 +51,49 @@ }, "packageName": "trio", "symbols": [ - "trio.run", - "trio.current_effective_deadline", "trio._core._run._TaskStatusIgnored.__repr__", "trio._core._run._TaskStatusIgnored.started", - "trio.current_time", + "trio._channel.MemoryReceiveChannel", + "trio._abc.ReceiveChannel", + "trio._abc.ReceiveChannel.__aiter__", + "trio._abc.AsyncResource.aclose", + "trio._abc.AsyncResource.__aenter__", + "trio._abc.AsyncResource.__aexit__", + "trio._channel.MemorySendChannel", + "trio._abc.SendChannel", "trio._core._run.Nursery.__init__", "trio._core._run.Nursery.child_tasks", "trio._core._run.Nursery.parent_task", "trio._core._run.Nursery.start_soon", "trio._core._run.Nursery.start", "trio._core._run.Nursery.__del__", - "trio._sync.Event.is_set", - "trio._sync.Event.wait", - "trio._sync.Event.statistics", - "trio._sync.CapacityLimiter.__init__", - "trio._sync.CapacityLimiter.__repr__", - "trio._sync.CapacityLimiter.total_tokens", - "trio._sync.CapacityLimiter.borrowed_tokens", - "trio._sync.CapacityLimiter.available_tokens", - "trio._sync.CapacityLimiter.statistics", - "trio._sync.Semaphore.__init__", - "trio._sync.Semaphore.__repr__", - "trio._sync.Semaphore.value", - "trio._sync.Semaphore.max_value", - "trio._sync.Semaphore.statistics", - "trio._sync.Lock", - "trio._sync._LockImpl.__repr__", - "trio._sync._LockImpl.locked", - "trio._sync._LockImpl.statistics", - "trio._sync.StrictFIFOLock", - "trio._sync.Condition.__init__", - "trio._sync.Condition.locked", - "trio._sync.Condition.acquire_nowait", - "trio._sync.Condition.acquire", - "trio._sync.Condition.release", - "trio._sync.Condition.notify", - "trio._sync.Condition.notify_all", - "trio._sync.Condition.statistics", - "trio.aclose_forcefully", + "trio.current_effective_deadline", + "trio.current_time", + "trio.run", + "trio._dtls.DTLSChannel", + "trio._dtls.DTLSChannel.__init__", + "trio._dtls.DTLSChannel.close", + "trio._dtls.DTLSChannel.__enter__", + "trio._dtls.DTLSChannel.__exit__", + "trio._dtls.DTLSChannel.aclose", + "trio._dtls.DTLSChannel.do_handshake", + "trio._dtls.DTLSChannel.send", + "trio._dtls.DTLSChannel.receive", + "trio._dtls.DTLSChannel.set_ciphertext_mtu", + "trio._dtls.DTLSChannel.get_cleartext_mtu", + "trio._dtls.DTLSChannel.statistics", + "trio._abc.Channel", + "trio._dtls.DTLSEndpoint.__init__", + "trio._dtls.DTLSEndpoint.__del__", + "trio._dtls.DTLSEndpoint.close", + "trio._dtls.DTLSEndpoint.__enter__", + "trio._dtls.DTLSEndpoint.__exit__", + "trio._dtls.DTLSEndpoint.serve", + "trio._dtls.DTLSEndpoint.connect", + "trio._dtls.DTLSEndpoint.socket", + "trio._dtls.DTLSEndpoint.incoming_packets_buffer", + "trio.open_file", + "trio.wrap_file", "trio._highlevel_generic.StapledStream", "trio._highlevel_generic.StapledStream.send_stream", "trio._highlevel_generic.StapledStream.receive_stream", @@ -104,18 +108,22 @@ "trio._abc.SendStream", "trio._abc.SendStream.send_all", "trio._abc.SendStream.wait_send_all_might_not_block", - "trio._abc.AsyncResource.aclose", - "trio._abc.AsyncResource.__aenter__", - "trio._abc.AsyncResource.__aexit__", "trio._abc.ReceiveStream", "trio._abc.ReceiveStream.receive_some", "trio._abc.ReceiveStream.__aiter__", "trio._abc.ReceiveStream.__anext__", - "trio._channel.MemorySendChannel", - "trio._abc.SendChannel", - "trio._channel.MemoryReceiveChannel", - "trio._abc.ReceiveChannel", - "trio._abc.ReceiveChannel.__aiter__", + "trio.aclose_forcefully", + "trio.open_tcp_listeners", + "trio.serve_tcp", + "trio.open_tcp_stream", + "trio.open_unix_socket", + "trio.serve_listeners", + "trio._highlevel_socket.SocketListener", + "trio._highlevel_socket.SocketListener.__init__", + "trio._highlevel_socket.SocketListener.accept", + "trio._highlevel_socket.SocketListener.aclose", + "trio._abc.Listener", + "trio._abc.Listener.accept", "trio._highlevel_socket.SocketStream", "trio._highlevel_socket.SocketStream.__init__", "trio._highlevel_socket.SocketStream.send_all", @@ -125,14 +133,9 @@ "trio._highlevel_socket.SocketStream.aclose", "trio._highlevel_socket.SocketStream.setsockopt", "trio._highlevel_socket.SocketStream.getsockopt", - "trio._highlevel_socket.SocketListener", - "trio._highlevel_socket.SocketListener.__init__", - "trio._highlevel_socket.SocketListener.accept", - "trio._highlevel_socket.SocketListener.aclose", - "trio._abc.Listener", - "trio._abc.Listener.accept", - "trio.open_file", - "trio.wrap_file", + "trio.open_ssl_over_tcp_listeners", + "trio.open_ssl_over_tcp_stream", + "trio.serve_ssl_over_tcp", "trio._path.Path", "trio._path.Path.__init__", "trio._path.Path.__dir__", @@ -147,6 +150,22 @@ "trio._path.AsyncAutoWrapperType.generate_wraps", "trio._path.AsyncAutoWrapperType.generate_magic", "trio._path.AsyncAutoWrapperType.generate_iter", + "trio._ssl.SSLListener", + "trio._ssl.SSLListener.__init__", + "trio._ssl.SSLListener.accept", + "trio._ssl.SSLListener.aclose", + "trio._ssl.SSLStream", + "trio._ssl.SSLStream.__init__", + "trio._ssl.SSLStream.__getattr__", + "trio._ssl.SSLStream.__setattr__", + "trio._ssl.SSLStream.__dir__", + "trio._ssl.SSLStream.do_handshake", + "trio._ssl.SSLStream.receive_some", + "trio._ssl.SSLStream.send_all", + "trio._ssl.SSLStream.unwrap", + "trio._ssl.SSLStream.aclose", + "trio._ssl.SSLStream.wait_send_all_might_not_block", + "trio._ssl.SSLStream.transport_stream", "trio._subprocess.Process", "trio._subprocess.Process.encoding", "trio._subprocess.Process.errors", @@ -163,56 +182,39 @@ "trio._subprocess.Process.args", "trio._subprocess.Process.pid", "trio.run_process", - "trio._ssl.SSLStream", - "trio._ssl.SSLStream.__init__", - "trio._ssl.SSLStream.__getattr__", - "trio._ssl.SSLStream.__setattr__", - "trio._ssl.SSLStream.__dir__", - "trio._ssl.SSLStream.do_handshake", - "trio._ssl.SSLStream.receive_some", - "trio._ssl.SSLStream.send_all", - "trio._ssl.SSLStream.unwrap", - "trio._ssl.SSLStream.aclose", - "trio._ssl.SSLStream.wait_send_all_might_not_block", - "trio._ssl.SSLStream.transport_stream", - "trio._ssl.SSLListener", - "trio._ssl.SSLListener.__init__", - "trio._ssl.SSLListener.accept", - "trio._ssl.SSLListener.aclose", - "trio._dtls.DTLSEndpoint.__init__", - "trio._dtls.DTLSEndpoint.__del__", - "trio._dtls.DTLSEndpoint.close", - "trio._dtls.DTLSEndpoint.__enter__", - "trio._dtls.DTLSEndpoint.__exit__", - "trio._dtls.DTLSEndpoint.serve", - "trio._dtls.DTLSEndpoint.connect", - "trio._dtls.DTLSEndpoint.socket", - "trio._dtls.DTLSEndpoint.incoming_packets_buffer", - "trio._dtls.DTLSChannel", - "trio._dtls.DTLSChannel.__init__", - "trio._dtls.DTLSChannel.close", - "trio._dtls.DTLSChannel.__enter__", - "trio._dtls.DTLSChannel.__exit__", - "trio._dtls.DTLSChannel.aclose", - "trio._dtls.DTLSChannel.do_handshake", - "trio._dtls.DTLSChannel.send", - "trio._dtls.DTLSChannel.receive", - "trio._dtls.DTLSChannel.set_ciphertext_mtu", - "trio._dtls.DTLSChannel.get_cleartext_mtu", - "trio._dtls.DTLSChannel.statistics", - "trio._abc.Channel", - "trio.serve_listeners", - "trio.open_tcp_stream", - "trio.open_tcp_listeners", - "trio.serve_tcp", - "trio.open_unix_socket", - "trio.open_ssl_over_tcp_stream", - "trio.open_ssl_over_tcp_listeners", - "trio.serve_ssl_over_tcp", + "trio._sync.CapacityLimiter.__init__", + "trio._sync.CapacityLimiter.__repr__", + "trio._sync.CapacityLimiter.total_tokens", + "trio._sync.CapacityLimiter.borrowed_tokens", + "trio._sync.CapacityLimiter.available_tokens", + "trio._sync.CapacityLimiter.statistics", + "trio._sync.Condition.__init__", + "trio._sync.Condition.locked", + "trio._sync.Condition.acquire_nowait", + "trio._sync.Condition.acquire", + "trio._sync.Condition.release", + "trio._sync.Condition.notify", + "trio._sync.Condition.notify_all", + "trio._sync.Condition.statistics", + "trio._sync.Event.is_set", + "trio._sync.Event.wait", + "trio._sync.Event.statistics", + "trio._sync.Lock", + "trio._sync._LockImpl.__repr__", + "trio._sync._LockImpl.locked", + "trio._sync._LockImpl.statistics", + "trio._sync.Semaphore.__init__", + "trio._sync.Semaphore.__repr__", + "trio._sync.Semaphore.value", + "trio._sync.Semaphore.max_value", + "trio._sync.Semaphore.statistics", + "trio._sync.StrictFIFOLock", "trio.__deprecated_attributes__", "trio._abc.Clock.start_clock", "trio._abc.Clock.current_time", "trio._abc.Clock.deadline_to_sleep_time", + "trio._abc.HostnameResolver.getaddrinfo", + "trio._abc.HostnameResolver.getnameinfo", "trio._abc.Instrument.before_run", "trio._abc.Instrument.after_run", "trio._abc.Instrument.task_spawned", @@ -223,12 +225,17 @@ "trio._abc.Instrument.before_io_wait", "trio._abc.Instrument.after_io_wait", "trio._abc.SocketFactory.socket", - "trio._abc.HostnameResolver.getaddrinfo", - "trio._abc.HostnameResolver.getnameinfo", "trio.from_thread.run", "trio.from_thread.run_sync", - "trio.lowlevel.cancel_shielded_checkpoint", - "trio.lowlevel.currently_ki_protected", + "trio._core._parking_lot.ParkingLot.__len__", + "trio._core._parking_lot.ParkingLot.__bool__", + "trio._core._parking_lot.ParkingLot.unpark_all", + "trio._core._parking_lot.ParkingLot.repark_all", + "trio._core._parking_lot.ParkingLot.statistics", + "trio._core._local.RunVar.get", + "trio._core._local.RunVar.set", + "trio._core._local.RunVar.reset", + "trio._core._local.RunVar.__repr__", "trio._core._run.Task.coro", "trio._core._run.Task.name", "trio._core._run.Task.context", @@ -238,13 +245,7 @@ "trio._core._run.Task.eventual_parent_nursery", "trio._core._run.Task.child_nurseries", "trio._core._run.Task.iter_await_frames", - "trio.lowlevel.checkpoint", - "trio.lowlevel.current_task", - "trio._core._parking_lot.ParkingLot.__len__", - "trio._core._parking_lot.ParkingLot.__bool__", - "trio._core._parking_lot.ParkingLot.unpark_all", - "trio._core._parking_lot.ParkingLot.repark_all", - "trio._core._parking_lot.ParkingLot.statistics", + "trio._core._entry_queue.TrioToken.run_sync_soon", "trio._core._unbounded_queue.UnboundedQueue.__repr__", "trio._core._unbounded_queue.UnboundedQueue.qsize", "trio._core._unbounded_queue.UnboundedQueue.empty", @@ -253,40 +254,38 @@ "trio._core._unbounded_queue.UnboundedQueue.statistics", "trio._core._unbounded_queue.UnboundedQueue.__aiter__", "trio._core._unbounded_queue.UnboundedQueue.__anext__", - "trio._core._local.RunVar.get", - "trio._core._local.RunVar.set", - "trio._core._local.RunVar.reset", - "trio._core._local.RunVar.__repr__", - "trio._core._entry_queue.TrioToken.run_sync_soon", + "trio.lowlevel.add_instrument", + "trio.lowlevel.cancel_shielded_checkpoint", + "trio.lowlevel.checkpoint", + "trio.lowlevel.checkpoint_if_cancelled", + "trio.lowlevel.current_clock", + "trio.lowlevel.current_root_task", + "trio.lowlevel.current_statistics", + "trio.lowlevel.current_task", "trio.lowlevel.current_trio_token", - "trio.lowlevel.temporarily_detach_coroutine_object", + "trio.lowlevel.currently_ki_protected", + "trio.lowlevel.notify_closing", "trio.lowlevel.permanently_detach_coroutine_object", "trio.lowlevel.reattach_detached_coroutine_object", - "trio.lowlevel.current_statistics", - "trio.lowlevel.reschedule", "trio.lowlevel.remove_instrument", - "trio.lowlevel.add_instrument", - "trio.lowlevel.current_clock", - "trio.lowlevel.current_root_task", - "trio.lowlevel.checkpoint_if_cancelled", + "trio.lowlevel.reschedule", "trio.lowlevel.spawn_system_task", + "trio.lowlevel.start_guest_run", + "trio.lowlevel.start_thread_soon", + "trio.lowlevel.temporarily_detach_coroutine_object", "trio.lowlevel.wait_readable", "trio.lowlevel.wait_writable", - "trio.lowlevel.notify_closing", - "trio.lowlevel.start_thread_soon", - "trio.lowlevel.start_guest_run", "trio.lowlevel.open_process", "trio._unix_pipes.FdStream", - "trio.socket.fromfd", "trio.socket.from_stdlib_socket", - "trio.socket.getprotobyname", - "trio.socket.socketpair", - "trio.socket.getnameinfo", - "trio.socket.socket", + "trio.socket.fromfd", "trio.socket.getaddrinfo", + "trio.socket.getnameinfo", + "trio.socket.getprotobyname", "trio.socket.set_custom_hostname_resolver", "trio.socket.set_custom_socket_factory", - "trio.testing.wait_all_tasks_blocked", + "trio.socket.socket", + "trio.socket.socketpair", "trio._core._mock_clock.MockClock", "trio._core._mock_clock.MockClock.__init__", "trio._core._mock_clock.MockClock.__repr__", @@ -296,12 +295,21 @@ "trio._core._mock_clock.MockClock.current_time", "trio._core._mock_clock.MockClock.deadline_to_sleep_time", "trio._core._mock_clock.MockClock.jump", - "trio.testing.trio_test", - "trio.testing.assert_checkpoints", - "trio.testing.assert_no_checkpoints", + "trio.testing.wait_all_tasks_blocked", + "trio.testing.check_half_closeable_stream", "trio.testing.check_one_way_stream", "trio.testing.check_two_way_stream", - "trio.testing.check_half_closeable_stream", + "trio.testing.assert_checkpoints", + "trio.testing.assert_no_checkpoints", + "trio.testing._memory_streams.MemoryReceiveStream", + "trio.testing._memory_streams.MemoryReceiveStream.__init__", + "trio.testing._memory_streams.MemoryReceiveStream.receive_some", + "trio.testing._memory_streams.MemoryReceiveStream.close", + "trio.testing._memory_streams.MemoryReceiveStream.aclose", + "trio.testing._memory_streams.MemoryReceiveStream.put_data", + "trio.testing._memory_streams.MemoryReceiveStream.put_eof", + "trio.testing._memory_streams.MemoryReceiveStream.receive_some_hook", + "trio.testing._memory_streams.MemoryReceiveStream.close_hook", "trio.testing._memory_streams.MemorySendStream", "trio.testing._memory_streams.MemorySendStream.__init__", "trio.testing._memory_streams.MemorySendStream.send_all", @@ -313,21 +321,13 @@ "trio.testing._memory_streams.MemorySendStream.send_all_hook", "trio.testing._memory_streams.MemorySendStream.wait_send_all_might_not_block_hook", "trio.testing._memory_streams.MemorySendStream.close_hook", - "trio.testing._memory_streams.MemoryReceiveStream", - "trio.testing._memory_streams.MemoryReceiveStream.__init__", - "trio.testing._memory_streams.MemoryReceiveStream.receive_some", - "trio.testing._memory_streams.MemoryReceiveStream.close", - "trio.testing._memory_streams.MemoryReceiveStream.aclose", - "trio.testing._memory_streams.MemoryReceiveStream.put_data", - "trio.testing._memory_streams.MemoryReceiveStream.put_eof", - "trio.testing._memory_streams.MemoryReceiveStream.receive_some_hook", - "trio.testing._memory_streams.MemoryReceiveStream.close_hook", - "trio.testing.memory_stream_pump", - "trio.testing.memory_stream_one_way_pair", - "trio.testing.memory_stream_pair", "trio.testing.lockstep_stream_one_way_pair", "trio.testing.lockstep_stream_pair", + "trio.testing.memory_stream_one_way_pair", + "trio.testing.memory_stream_pair", + "trio.testing.memory_stream_pump", "trio.testing.open_stream_to_socket_listener", + "trio.testing.trio_test", "trio.tests.TestsDeprecationWrapper", "trio.to_thread.current_default_thread_limiter" ] diff --git a/trio/_tools/gen_exports.py b/trio/_tools/gen_exports.py index 3c18a86298..a5d8529b53 100755 --- a/trio/_tools/gen_exports.py +++ b/trio/_tools/gen_exports.py @@ -5,21 +5,22 @@ """ import argparse import ast -import astor import os -from pathlib import Path import sys - +from pathlib import Path from textwrap import indent +import astor + PREFIX = "_generated" HEADER = """# *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -from ._run import GLOBAL_RUN_CONTEXT, _NO_SEND -from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +# isort: skip from ._instrumentation import Instrument +from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT # fmt: off """ diff --git a/trio/_unix_pipes.py b/trio/_unix_pipes.py index fa98e79521..f4158eb27d 100644 --- a/trio/_unix_pipes.py +++ b/trio/_unix_pipes.py @@ -1,14 +1,14 @@ from __future__ import annotations -import os import errno +import os from typing import TYPE_CHECKING +import trio + from ._abc import Stream from ._util import ConflictDetector, Final -import trio - if TYPE_CHECKING: from typing_extensions import Final as FinalType diff --git a/trio/_util.py b/trio/_util.py index 89a2dea7de..c21cefe71e 100644 --- a/trio/_util.py +++ b/trio/_util.py @@ -1,14 +1,14 @@ # Little utilities we use internally from __future__ import annotations -from abc import ABCMeta +import collections +import inspect import os import signal -from functools import update_wrapper -import typing as t import threading -import collections -import inspect +import typing as t +from abc import ABCMeta +from functools import update_wrapper import trio diff --git a/trio/_wait_for_object.py b/trio/_wait_for_object.py index 9c763f7363..32a88e5398 100644 --- a/trio/_wait_for_object.py +++ b/trio/_wait_for_object.py @@ -1,12 +1,8 @@ import math + import trio -from ._core._windows_cffi import ( - ffi, - kernel32, - ErrorCodes, - raise_winerror, - _handle, -) + +from ._core._windows_cffi import ErrorCodes, _handle, ffi, kernel32, raise_winerror async def WaitForSingleObject(obj): diff --git a/trio/_windows_pipes.py b/trio/_windows_pipes.py index bd5c34eee2..c1c357b018 100644 --- a/trio/_windows_pipes.py +++ b/trio/_windows_pipes.py @@ -1,9 +1,10 @@ import sys from typing import TYPE_CHECKING + from . import _core -from ._abc import SendStream, ReceiveStream +from ._abc import ReceiveStream, SendStream +from ._core._windows_cffi import _handle, kernel32, raise_winerror from ._util import ConflictDetector, Final -from ._core._windows_cffi import _handle, raise_winerror, kernel32 assert sys.platform == "win32" or not TYPE_CHECKING diff --git a/trio/abc.py b/trio/abc.py index dd4d4fcd08..439995640e 100644 --- a/trio/abc.py +++ b/trio/abc.py @@ -7,17 +7,17 @@ # Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) from ._abc import ( - Clock as Clock, - Instrument as Instrument, AsyncResource as AsyncResource, - SendStream as SendStream, - ReceiveStream as ReceiveStream, - Stream as Stream, + Channel as Channel, + Clock as Clock, HalfCloseableStream as HalfCloseableStream, - SocketFactory as SocketFactory, HostnameResolver as HostnameResolver, + Instrument as Instrument, Listener as Listener, - SendChannel as SendChannel, ReceiveChannel as ReceiveChannel, - Channel as Channel, + ReceiveStream as ReceiveStream, + SendChannel as SendChannel, + SendStream as SendStream, + SocketFactory as SocketFactory, + Stream as Stream, ) diff --git a/trio/from_thread.py b/trio/from_thread.py index 8c2b490705..e6f7b2495e 100644 --- a/trio/from_thread.py +++ b/trio/from_thread.py @@ -4,8 +4,7 @@ """ -from ._threads import from_thread_run as run -from ._threads import from_thread_run_sync as run_sync +from ._threads import from_thread_run as run, from_thread_run_sync as run_sync # need to use __all__ for pyright --verifytypes to see re-exports when renaming them __all__ = ["run", "run_sync"] diff --git a/trio/lowlevel.py b/trio/lowlevel.py index b7f4f3a725..db8d180181 100644 --- a/trio/lowlevel.py +++ b/trio/lowlevel.py @@ -7,56 +7,56 @@ import sys import typing as _t -# This is the union of a subset of trio/_core/ and some things from trio/*.py. -# See comments in trio/__init__.py for details. - -# Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) - # Generally available symbols from ._core import ( - cancel_shielded_checkpoint as cancel_shielded_checkpoint, Abort as Abort, + ParkingLot as ParkingLot, RaiseCancelT as RaiseCancelT, - wait_task_rescheduled as wait_task_rescheduled, - enable_ki_protection as enable_ki_protection, - disable_ki_protection as disable_ki_protection, - currently_ki_protected as currently_ki_protected, + RunVar as RunVar, Task as Task, + TrioToken as TrioToken, + UnboundedQueue as UnboundedQueue, + add_instrument as add_instrument, + cancel_shielded_checkpoint as cancel_shielded_checkpoint, checkpoint as checkpoint, + checkpoint_if_cancelled as checkpoint_if_cancelled, + current_clock as current_clock, + current_root_task as current_root_task, + current_statistics as current_statistics, current_task as current_task, - ParkingLot as ParkingLot, - UnboundedQueue as UnboundedQueue, - RunVar as RunVar, - TrioToken as TrioToken, current_trio_token as current_trio_token, - temporarily_detach_coroutine_object as temporarily_detach_coroutine_object, + currently_ki_protected as currently_ki_protected, + disable_ki_protection as disable_ki_protection, + enable_ki_protection as enable_ki_protection, + notify_closing as notify_closing, permanently_detach_coroutine_object as permanently_detach_coroutine_object, reattach_detached_coroutine_object as reattach_detached_coroutine_object, - current_statistics as current_statistics, - reschedule as reschedule, remove_instrument as remove_instrument, - add_instrument as add_instrument, - current_clock as current_clock, - current_root_task as current_root_task, - checkpoint_if_cancelled as checkpoint_if_cancelled, + reschedule as reschedule, spawn_system_task as spawn_system_task, + start_guest_run as start_guest_run, + start_thread_soon as start_thread_soon, + temporarily_detach_coroutine_object as temporarily_detach_coroutine_object, wait_readable as wait_readable, + wait_task_rescheduled as wait_task_rescheduled, wait_writable as wait_writable, - notify_closing as notify_closing, - start_thread_soon as start_thread_soon, - start_guest_run as start_guest_run, ) - from ._subprocess import open_process as open_process +# This is the union of a subset of trio/_core/ and some things from trio/*.py. +# See comments in trio/__init__.py for details. + +# Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) + + if sys.platform == "win32": # Windows symbols from ._core import ( current_iocp as current_iocp, - register_with_iocp as register_with_iocp, - wait_overlapped as wait_overlapped, monitor_completion_key as monitor_completion_key, readinto_overlapped as readinto_overlapped, + register_with_iocp as register_with_iocp, + wait_overlapped as wait_overlapped, write_overlapped as write_overlapped, ) from ._wait_for_object import WaitForSingleObject as WaitForSingleObject diff --git a/trio/socket.py b/trio/socket.py index 6d3ed366d4..61ba48ad3b 100644 --- a/trio/socket.py +++ b/trio/socket.py @@ -9,13 +9,13 @@ # Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) -from . import _socket -import sys -import typing as _t - # Dynamically re-export whatever constants this particular Python happens to # have: import socket as _stdlib_socket +import sys +import typing as _t + +from . import _socket _bad_symbols: _t.Set[str] = set() if sys.platform == "win32": @@ -34,16 +34,16 @@ # import the overwrites from ._socket import ( - fromfd as fromfd, + SocketType as SocketType, from_stdlib_socket as from_stdlib_socket, - getprotobyname as getprotobyname, - socketpair as socketpair, - getnameinfo as getnameinfo, - socket as socket, + fromfd as fromfd, getaddrinfo as getaddrinfo, + getnameinfo as getnameinfo, + getprotobyname as getprotobyname, set_custom_hostname_resolver as set_custom_hostname_resolver, set_custom_socket_factory as set_custom_socket_factory, - SocketType as SocketType, + socket as socket, + socketpair as socketpair, ) # not always available so expose only if @@ -56,25 +56,25 @@ # expose these functions to trio.socket from socket import ( gaierror as gaierror, - herror as herror, gethostname as gethostname, - ntohs as ntohs, + herror as herror, htonl as htonl, htons as htons, inet_aton as inet_aton, inet_ntoa as inet_ntoa, - inet_pton as inet_pton, inet_ntop as inet_ntop, + inet_pton as inet_pton, + ntohs as ntohs, ) # not always available so expose only if if sys.platform != "win32" or not _t.TYPE_CHECKING: try: from socket import ( - sethostname as sethostname, + if_indextoname as if_indextoname, if_nameindex as if_nameindex, if_nametoindex as if_nametoindex, - if_indextoname as if_indextoname, + sethostname as sethostname, ) except ImportError: pass @@ -104,268 +104,89 @@ # kept up to date. if _t.TYPE_CHECKING: from socket import ( # type: ignore[attr-defined] - CMSG_LEN as CMSG_LEN, - CMSG_SPACE as CMSG_SPACE, - CAPI as CAPI, - AF_UNSPEC as AF_UNSPEC, - AF_INET as AF_INET, - AF_UNIX as AF_UNIX, - AF_IPX as AF_IPX, + AF_ALG as AF_ALG, AF_APPLETALK as AF_APPLETALK, - AF_INET6 as AF_INET6, - AF_ROUTE as AF_ROUTE, - AF_LINK as AF_LINK, - AF_SNA as AF_SNA, - PF_SYSTEM as PF_SYSTEM, - AF_SYSTEM as AF_SYSTEM, - SOCK_STREAM as SOCK_STREAM, - SOCK_DGRAM as SOCK_DGRAM, - SOCK_RAW as SOCK_RAW, - SOCK_SEQPACKET as SOCK_SEQPACKET, - SOCK_RDM as SOCK_RDM, - SO_DEBUG as SO_DEBUG, - SO_ACCEPTCONN as SO_ACCEPTCONN, - SO_REUSEADDR as SO_REUSEADDR, - SO_KEEPALIVE as SO_KEEPALIVE, - SO_DONTROUTE as SO_DONTROUTE, - SO_BROADCAST as SO_BROADCAST, - SO_USELOOPBACK as SO_USELOOPBACK, - SO_LINGER as SO_LINGER, - SO_OOBINLINE as SO_OOBINLINE, - SO_REUSEPORT as SO_REUSEPORT, - SO_SNDBUF as SO_SNDBUF, - SO_RCVBUF as SO_RCVBUF, - SO_SNDLOWAT as SO_SNDLOWAT, - SO_RCVLOWAT as SO_RCVLOWAT, - SO_SNDTIMEO as SO_SNDTIMEO, - SO_RCVTIMEO as SO_RCVTIMEO, - SO_ERROR as SO_ERROR, - SO_TYPE as SO_TYPE, - LOCAL_PEERCRED as LOCAL_PEERCRED, - SOMAXCONN as SOMAXCONN, - SCM_RIGHTS as SCM_RIGHTS, - SCM_CREDS as SCM_CREDS, - MSG_OOB as MSG_OOB, - MSG_PEEK as MSG_PEEK, - MSG_DONTROUTE as MSG_DONTROUTE, - MSG_DONTWAIT as MSG_DONTWAIT, - MSG_EOR as MSG_EOR, - MSG_TRUNC as MSG_TRUNC, - MSG_CTRUNC as MSG_CTRUNC, - MSG_WAITALL as MSG_WAITALL, - MSG_EOF as MSG_EOF, - SOL_SOCKET as SOL_SOCKET, - SOL_IP as SOL_IP, - SOL_TCP as SOL_TCP, - SOL_UDP as SOL_UDP, - IPPROTO_IP as IPPROTO_IP, - IPPROTO_HOPOPTS as IPPROTO_HOPOPTS, - IPPROTO_ICMP as IPPROTO_ICMP, - IPPROTO_IGMP as IPPROTO_IGMP, - IPPROTO_GGP as IPPROTO_GGP, - IPPROTO_IPV4 as IPPROTO_IPV4, - IPPROTO_IPIP as IPPROTO_IPIP, - IPPROTO_TCP as IPPROTO_TCP, - IPPROTO_EGP as IPPROTO_EGP, - IPPROTO_PUP as IPPROTO_PUP, - IPPROTO_UDP as IPPROTO_UDP, - IPPROTO_IDP as IPPROTO_IDP, - IPPROTO_HELLO as IPPROTO_HELLO, - IPPROTO_ND as IPPROTO_ND, - IPPROTO_TP as IPPROTO_TP, - IPPROTO_ROUTING as IPPROTO_ROUTING, - IPPROTO_FRAGMENT as IPPROTO_FRAGMENT, - IPPROTO_RSVP as IPPROTO_RSVP, - IPPROTO_GRE as IPPROTO_GRE, - IPPROTO_ESP as IPPROTO_ESP, - IPPROTO_AH as IPPROTO_AH, - IPPROTO_ICMPV6 as IPPROTO_ICMPV6, - IPPROTO_NONE as IPPROTO_NONE, - IPPROTO_DSTOPTS as IPPROTO_DSTOPTS, - IPPROTO_XTP as IPPROTO_XTP, - IPPROTO_EON as IPPROTO_EON, - IPPROTO_PIM as IPPROTO_PIM, - IPPROTO_IPCOMP as IPPROTO_IPCOMP, - IPPROTO_SCTP as IPPROTO_SCTP, - IPPROTO_RAW as IPPROTO_RAW, - IPPROTO_MAX as IPPROTO_MAX, - IPPROTO_MPTCP as IPPROTO_MPTCP, - SYSPROTO_CONTROL as SYSPROTO_CONTROL, - IPPORT_RESERVED as IPPORT_RESERVED, - IPPORT_USERRESERVED as IPPORT_USERRESERVED, - INADDR_ANY as INADDR_ANY, - INADDR_BROADCAST as INADDR_BROADCAST, - INADDR_LOOPBACK as INADDR_LOOPBACK, - INADDR_UNSPEC_GROUP as INADDR_UNSPEC_GROUP, - INADDR_ALLHOSTS_GROUP as INADDR_ALLHOSTS_GROUP, - INADDR_MAX_LOCAL_GROUP as INADDR_MAX_LOCAL_GROUP, - INADDR_NONE as INADDR_NONE, - IP_OPTIONS as IP_OPTIONS, - IP_HDRINCL as IP_HDRINCL, - IP_TOS as IP_TOS, - IP_TTL as IP_TTL, - IP_RECVOPTS as IP_RECVOPTS, - IP_RECVRETOPTS as IP_RECVRETOPTS, - IP_RECVDSTADDR as IP_RECVDSTADDR, - IP_RETOPTS as IP_RETOPTS, - IP_MULTICAST_IF as IP_MULTICAST_IF, - IP_MULTICAST_TTL as IP_MULTICAST_TTL, - IP_MULTICAST_LOOP as IP_MULTICAST_LOOP, - IP_ADD_MEMBERSHIP as IP_ADD_MEMBERSHIP, - IP_DROP_MEMBERSHIP as IP_DROP_MEMBERSHIP, - IP_DEFAULT_MULTICAST_TTL as IP_DEFAULT_MULTICAST_TTL, - IP_DEFAULT_MULTICAST_LOOP as IP_DEFAULT_MULTICAST_LOOP, - IP_MAX_MEMBERSHIPS as IP_MAX_MEMBERSHIPS, - IPV6_JOIN_GROUP as IPV6_JOIN_GROUP, - IPV6_LEAVE_GROUP as IPV6_LEAVE_GROUP, - IPV6_MULTICAST_HOPS as IPV6_MULTICAST_HOPS, - IPV6_MULTICAST_IF as IPV6_MULTICAST_IF, - IPV6_MULTICAST_LOOP as IPV6_MULTICAST_LOOP, - IPV6_UNICAST_HOPS as IPV6_UNICAST_HOPS, - IPV6_V6ONLY as IPV6_V6ONLY, - IPV6_CHECKSUM as IPV6_CHECKSUM, - IPV6_RECVTCLASS as IPV6_RECVTCLASS, - IPV6_RTHDR_TYPE_0 as IPV6_RTHDR_TYPE_0, - IPV6_TCLASS as IPV6_TCLASS, - TCP_NODELAY as TCP_NODELAY, - TCP_MAXSEG as TCP_MAXSEG, - TCP_KEEPINTVL as TCP_KEEPINTVL, - TCP_KEEPCNT as TCP_KEEPCNT, - TCP_FASTOPEN as TCP_FASTOPEN, - TCP_NOTSENT_LOWAT as TCP_NOTSENT_LOWAT, - EAI_ADDRFAMILY as EAI_ADDRFAMILY, - EAI_AGAIN as EAI_AGAIN, - EAI_BADFLAGS as EAI_BADFLAGS, - EAI_FAIL as EAI_FAIL, - EAI_FAMILY as EAI_FAMILY, - EAI_MEMORY as EAI_MEMORY, - EAI_NODATA as EAI_NODATA, - EAI_NONAME as EAI_NONAME, - EAI_OVERFLOW as EAI_OVERFLOW, - EAI_SERVICE as EAI_SERVICE, - EAI_SOCKTYPE as EAI_SOCKTYPE, - EAI_SYSTEM as EAI_SYSTEM, - EAI_BADHINTS as EAI_BADHINTS, - EAI_PROTOCOL as EAI_PROTOCOL, - EAI_MAX as EAI_MAX, - AI_PASSIVE as AI_PASSIVE, - AI_CANONNAME as AI_CANONNAME, - AI_NUMERICHOST as AI_NUMERICHOST, - AI_NUMERICSERV as AI_NUMERICSERV, - AI_MASK as AI_MASK, - AI_ALL as AI_ALL, - AI_V4MAPPED_CFG as AI_V4MAPPED_CFG, - AI_ADDRCONFIG as AI_ADDRCONFIG, - AI_V4MAPPED as AI_V4MAPPED, - AI_DEFAULT as AI_DEFAULT, - NI_MAXHOST as NI_MAXHOST, - NI_MAXSERV as NI_MAXSERV, - NI_NOFQDN as NI_NOFQDN, - NI_NUMERICHOST as NI_NUMERICHOST, - NI_NAMEREQD as NI_NAMEREQD, - NI_NUMERICSERV as NI_NUMERICSERV, - NI_DGRAM as NI_DGRAM, - SHUT_RD as SHUT_RD, - SHUT_WR as SHUT_WR, - SHUT_RDWR as SHUT_RDWR, - EBADF as EBADF, - EAGAIN as EAGAIN, - EWOULDBLOCK as EWOULDBLOCK, AF_ASH as AF_ASH, AF_ATMPVC as AF_ATMPVC, AF_ATMSVC as AF_ATMSVC, AF_AX25 as AF_AX25, AF_BLUETOOTH as AF_BLUETOOTH, AF_BRIDGE as AF_BRIDGE, + AF_CAN as AF_CAN, AF_ECONET as AF_ECONET, + AF_INET as AF_INET, + AF_INET6 as AF_INET6, + AF_IPX as AF_IPX, AF_IRDA as AF_IRDA, AF_KEY as AF_KEY, + AF_LINK as AF_LINK, AF_LLC as AF_LLC, AF_NETBEUI as AF_NETBEUI, AF_NETLINK as AF_NETLINK, AF_NETROM as AF_NETROM, AF_PACKET as AF_PACKET, AF_PPPOX as AF_PPPOX, + AF_QIPCRTR as AF_QIPCRTR, + AF_RDS as AF_RDS, AF_ROSE as AF_ROSE, + AF_ROUTE as AF_ROUTE, AF_SECURITY as AF_SECURITY, + AF_SNA as AF_SNA, + AF_SYSTEM as AF_SYSTEM, + AF_TIPC as AF_TIPC, + AF_UNIX as AF_UNIX, + AF_UNSPEC as AF_UNSPEC, + AF_VSOCK as AF_VSOCK, AF_WANPIPE as AF_WANPIPE, AF_X25 as AF_X25, + AI_ADDRCONFIG as AI_ADDRCONFIG, + AI_ALL as AI_ALL, + AI_CANONNAME as AI_CANONNAME, + AI_DEFAULT as AI_DEFAULT, + AI_MASK as AI_MASK, + AI_NUMERICHOST as AI_NUMERICHOST, + AI_NUMERICSERV as AI_NUMERICSERV, + AI_PASSIVE as AI_PASSIVE, + AI_V4MAPPED as AI_V4MAPPED, + AI_V4MAPPED_CFG as AI_V4MAPPED_CFG, + ALG_OP_DECRYPT as ALG_OP_DECRYPT, + ALG_OP_ENCRYPT as ALG_OP_ENCRYPT, + ALG_OP_SIGN as ALG_OP_SIGN, + ALG_OP_VERIFY as ALG_OP_VERIFY, + ALG_SET_AEAD_ASSOCLEN as ALG_SET_AEAD_ASSOCLEN, + ALG_SET_AEAD_AUTHSIZE as ALG_SET_AEAD_AUTHSIZE, + ALG_SET_IV as ALG_SET_IV, + ALG_SET_KEY as ALG_SET_KEY, + ALG_SET_OP as ALG_SET_OP, + ALG_SET_PUBKEY as ALG_SET_PUBKEY, BDADDR_ANY as BDADDR_ANY, BDADDR_LOCAL as BDADDR_LOCAL, - FD_SETSIZE as FD_SETSIZE, - IPV6_DSTOPTS as IPV6_DSTOPTS, - IPV6_HOPLIMIT as IPV6_HOPLIMIT, - IPV6_HOPOPTS as IPV6_HOPOPTS, - IPV6_NEXTHOP as IPV6_NEXTHOP, - IPV6_PKTINFO as IPV6_PKTINFO, - IPV6_RECVDSTOPTS as IPV6_RECVDSTOPTS, - IPV6_RECVHOPLIMIT as IPV6_RECVHOPLIMIT, - IPV6_RECVHOPOPTS as IPV6_RECVHOPOPTS, - IPV6_RECVPKTINFO as IPV6_RECVPKTINFO, - IPV6_RECVRTHDR as IPV6_RECVRTHDR, - IPV6_RTHDR as IPV6_RTHDR, - IPV6_RTHDRDSTOPTS as IPV6_RTHDRDSTOPTS, - MSG_ERRQUEUE as MSG_ERRQUEUE, - NETLINK_DNRTMSG as NETLINK_DNRTMSG, - NETLINK_FIREWALL as NETLINK_FIREWALL, - NETLINK_IP6_FW as NETLINK_IP6_FW, - NETLINK_NFLOG as NETLINK_NFLOG, - NETLINK_ROUTE as NETLINK_ROUTE, - NETLINK_USERSOCK as NETLINK_USERSOCK, - NETLINK_XFRM as NETLINK_XFRM, - PACKET_BROADCAST as PACKET_BROADCAST, - PACKET_FASTROUTE as PACKET_FASTROUTE, - PACKET_HOST as PACKET_HOST, - PACKET_LOOPBACK as PACKET_LOOPBACK, - PACKET_MULTICAST as PACKET_MULTICAST, - PACKET_OTHERHOST as PACKET_OTHERHOST, - PACKET_OUTGOING as PACKET_OUTGOING, - POLLERR as POLLERR, - POLLHUP as POLLHUP, - POLLIN as POLLIN, - POLLMSG as POLLMSG, - POLLNVAL as POLLNVAL, - POLLOUT as POLLOUT, - POLLPRI as POLLPRI, - POLLRDBAND as POLLRDBAND, - POLLRDNORM as POLLRDNORM, - POLLWRNORM as POLLWRNORM, - SIOCGIFINDEX as SIOCGIFINDEX, - SIOCGIFNAME as SIOCGIFNAME, - SOCK_CLOEXEC as SOCK_CLOEXEC, - TCP_CORK as TCP_CORK, - TCP_DEFER_ACCEPT as TCP_DEFER_ACCEPT, - TCP_INFO as TCP_INFO, - TCP_KEEPIDLE as TCP_KEEPIDLE, - TCP_LINGER2 as TCP_LINGER2, - TCP_QUICKACK as TCP_QUICKACK, - TCP_SYNCNT as TCP_SYNCNT, - TCP_WINDOW_CLAMP as TCP_WINDOW_CLAMP, - AF_ALG as AF_ALG, - AF_CAN as AF_CAN, - AF_RDS as AF_RDS, - AF_TIPC as AF_TIPC, - AF_VSOCK as AF_VSOCK, - ALG_OP_DECRYPT as ALG_OP_DECRYPT, - ALG_OP_ENCRYPT as ALG_OP_ENCRYPT, - ALG_OP_SIGN as ALG_OP_SIGN, - ALG_OP_VERIFY as ALG_OP_VERIFY, - ALG_SET_AEAD_ASSOCLEN as ALG_SET_AEAD_ASSOCLEN, - ALG_SET_AEAD_AUTHSIZE as ALG_SET_AEAD_AUTHSIZE, - ALG_SET_IV as ALG_SET_IV, - ALG_SET_KEY as ALG_SET_KEY, - ALG_SET_OP as ALG_SET_OP, - ALG_SET_PUBKEY as ALG_SET_PUBKEY, + BTPROTO_HCI as BTPROTO_HCI, + BTPROTO_L2CAP as BTPROTO_L2CAP, + BTPROTO_RFCOMM as BTPROTO_RFCOMM, + BTPROTO_SCO as BTPROTO_SCO, CAN_BCM as CAN_BCM, + CAN_BCM_CAN_FD_FRAME as CAN_BCM_CAN_FD_FRAME, + CAN_BCM_RX_ANNOUNCE_RESUME as CAN_BCM_RX_ANNOUNCE_RESUME, CAN_BCM_RX_CHANGED as CAN_BCM_RX_CHANGED, + CAN_BCM_RX_CHECK_DLC as CAN_BCM_RX_CHECK_DLC, CAN_BCM_RX_DELETE as CAN_BCM_RX_DELETE, + CAN_BCM_RX_FILTER_ID as CAN_BCM_RX_FILTER_ID, + CAN_BCM_RX_NO_AUTOTIMER as CAN_BCM_RX_NO_AUTOTIMER, CAN_BCM_RX_READ as CAN_BCM_RX_READ, + CAN_BCM_RX_RTR_FRAME as CAN_BCM_RX_RTR_FRAME, CAN_BCM_RX_SETUP as CAN_BCM_RX_SETUP, CAN_BCM_RX_STATUS as CAN_BCM_RX_STATUS, CAN_BCM_RX_TIMEOUT as CAN_BCM_RX_TIMEOUT, + CAN_BCM_SETTIMER as CAN_BCM_SETTIMER, + CAN_BCM_STARTTIMER as CAN_BCM_STARTTIMER, + CAN_BCM_TX_ANNOUNCE as CAN_BCM_TX_ANNOUNCE, + CAN_BCM_TX_COUNTEVT as CAN_BCM_TX_COUNTEVT, + CAN_BCM_TX_CP_CAN_ID as CAN_BCM_TX_CP_CAN_ID, CAN_BCM_TX_DELETE as CAN_BCM_TX_DELETE, CAN_BCM_TX_EXPIRED as CAN_BCM_TX_EXPIRED, CAN_BCM_TX_READ as CAN_BCM_TX_READ, + CAN_BCM_TX_RESET_MULTI_IDX as CAN_BCM_TX_RESET_MULTI_IDX, CAN_BCM_TX_SEND as CAN_BCM_TX_SEND, CAN_BCM_TX_SETUP as CAN_BCM_TX_SETUP, CAN_BCM_TX_STATUS as CAN_BCM_TX_STATUS, @@ -374,48 +195,337 @@ CAN_ERR_FLAG as CAN_ERR_FLAG, CAN_ERR_MASK as CAN_ERR_MASK, CAN_ISOTP as CAN_ISOTP, + CAN_J1939 as CAN_J1939, CAN_RAW as CAN_RAW, CAN_RAW_ERR_FILTER as CAN_RAW_ERR_FILTER, CAN_RAW_FD_FRAMES as CAN_RAW_FD_FRAMES, CAN_RAW_FILTER as CAN_RAW_FILTER, + CAN_RAW_JOIN_FILTERS as CAN_RAW_JOIN_FILTERS, CAN_RAW_LOOPBACK as CAN_RAW_LOOPBACK, CAN_RAW_RECV_OWN_MSGS as CAN_RAW_RECV_OWN_MSGS, CAN_RTR_FLAG as CAN_RTR_FLAG, CAN_SFF_MASK as CAN_SFF_MASK, + CAPI as CAPI, + CMSG_LEN as CMSG_LEN, + CMSG_SPACE as CMSG_SPACE, + EAGAIN as EAGAIN, + EAI_ADDRFAMILY as EAI_ADDRFAMILY, + EAI_AGAIN as EAI_AGAIN, + EAI_BADFLAGS as EAI_BADFLAGS, + EAI_BADHINTS as EAI_BADHINTS, + EAI_FAIL as EAI_FAIL, + EAI_FAMILY as EAI_FAMILY, + EAI_MAX as EAI_MAX, + EAI_MEMORY as EAI_MEMORY, + EAI_NODATA as EAI_NODATA, + EAI_NONAME as EAI_NONAME, + EAI_OVERFLOW as EAI_OVERFLOW, + EAI_PROTOCOL as EAI_PROTOCOL, + EAI_SERVICE as EAI_SERVICE, + EAI_SOCKTYPE as EAI_SOCKTYPE, + EAI_SYSTEM as EAI_SYSTEM, + EBADF as EBADF, + ETH_P_ALL as ETH_P_ALL, + ETHERTYPE_ARP as ETHERTYPE_ARP, + ETHERTYPE_IP as ETHERTYPE_IP, + ETHERTYPE_IPV6 as ETHERTYPE_IPV6, + ETHERTYPE_VLAN as ETHERTYPE_VLAN, + EWOULDBLOCK as EWOULDBLOCK, + FD_ACCEPT as FD_ACCEPT, + FD_CLOSE as FD_CLOSE, + FD_CLOSE_BIT as FD_CLOSE_BIT, + FD_CONNECT as FD_CONNECT, + FD_CONNECT_BIT as FD_CONNECT_BIT, + FD_READ as FD_READ, + FD_SETSIZE as FD_SETSIZE, + FD_WRITE as FD_WRITE, + HCI_DATA_DIR as HCI_DATA_DIR, + HCI_FILTER as HCI_FILTER, + HCI_TIME_STAMP as HCI_TIME_STAMP, + INADDR_ALLHOSTS_GROUP as INADDR_ALLHOSTS_GROUP, + INADDR_ANY as INADDR_ANY, + INADDR_BROADCAST as INADDR_BROADCAST, + INADDR_LOOPBACK as INADDR_LOOPBACK, + INADDR_MAX_LOCAL_GROUP as INADDR_MAX_LOCAL_GROUP, + INADDR_NONE as INADDR_NONE, + INADDR_UNSPEC_GROUP as INADDR_UNSPEC_GROUP, + INFINITE as INFINITE, IOCTL_VM_SOCKETS_GET_LOCAL_CID as IOCTL_VM_SOCKETS_GET_LOCAL_CID, + IP_ADD_MEMBERSHIP as IP_ADD_MEMBERSHIP, + IP_ADD_SOURCE_MEMBERSHIP as IP_ADD_SOURCE_MEMBERSHIP, + IP_BLOCK_SOURCE as IP_BLOCK_SOURCE, + IP_DEFAULT_MULTICAST_LOOP as IP_DEFAULT_MULTICAST_LOOP, + IP_DEFAULT_MULTICAST_TTL as IP_DEFAULT_MULTICAST_TTL, + IP_DROP_MEMBERSHIP as IP_DROP_MEMBERSHIP, + IP_DROP_SOURCE_MEMBERSHIP as IP_DROP_SOURCE_MEMBERSHIP, + IP_HDRINCL as IP_HDRINCL, + IP_MAX_MEMBERSHIPS as IP_MAX_MEMBERSHIPS, + IP_MULTICAST_IF as IP_MULTICAST_IF, + IP_MULTICAST_LOOP as IP_MULTICAST_LOOP, + IP_MULTICAST_TTL as IP_MULTICAST_TTL, + IP_OPTIONS as IP_OPTIONS, + IP_PKTINFO as IP_PKTINFO, + IP_RECVDSTADDR as IP_RECVDSTADDR, + IP_RECVOPTS as IP_RECVOPTS, + IP_RECVRETOPTS as IP_RECVRETOPTS, + IP_RECVTOS as IP_RECVTOS, + IP_RETOPTS as IP_RETOPTS, + IP_TOS as IP_TOS, + IP_TRANSPARENT as IP_TRANSPARENT, + IP_TTL as IP_TTL, + IP_UNBLOCK_SOURCE as IP_UNBLOCK_SOURCE, + IPPORT_RESERVED as IPPORT_RESERVED, + IPPORT_USERRESERVED as IPPORT_USERRESERVED, + IPPROTO_AH as IPPROTO_AH, + IPPROTO_CBT as IPPROTO_CBT, + IPPROTO_DSTOPTS as IPPROTO_DSTOPTS, + IPPROTO_EGP as IPPROTO_EGP, + IPPROTO_EON as IPPROTO_EON, + IPPROTO_ESP as IPPROTO_ESP, + IPPROTO_FRAGMENT as IPPROTO_FRAGMENT, + IPPROTO_GGP as IPPROTO_GGP, + IPPROTO_GRE as IPPROTO_GRE, + IPPROTO_HELLO as IPPROTO_HELLO, + IPPROTO_HOPOPTS as IPPROTO_HOPOPTS, + IPPROTO_ICLFXBM as IPPROTO_ICLFXBM, + IPPROTO_ICMP as IPPROTO_ICMP, + IPPROTO_ICMPV6 as IPPROTO_ICMPV6, + IPPROTO_IDP as IPPROTO_IDP, + IPPROTO_IGMP as IPPROTO_IGMP, + IPPROTO_IGP as IPPROTO_IGP, + IPPROTO_IP as IPPROTO_IP, + IPPROTO_IPCOMP as IPPROTO_IPCOMP, + IPPROTO_IPIP as IPPROTO_IPIP, + IPPROTO_IPV4 as IPPROTO_IPV4, + IPPROTO_L2TP as IPPROTO_L2TP, + IPPROTO_MAX as IPPROTO_MAX, + IPPROTO_MOBILE as IPPROTO_MOBILE, + IPPROTO_MPTCP as IPPROTO_MPTCP, + IPPROTO_ND as IPPROTO_ND, + IPPROTO_NONE as IPPROTO_NONE, + IPPROTO_PGM as IPPROTO_PGM, + IPPROTO_PIM as IPPROTO_PIM, + IPPROTO_PUP as IPPROTO_PUP, + IPPROTO_RAW as IPPROTO_RAW, + IPPROTO_RDP as IPPROTO_RDP, + IPPROTO_ROUTING as IPPROTO_ROUTING, + IPPROTO_RSVP as IPPROTO_RSVP, + IPPROTO_SCTP as IPPROTO_SCTP, + IPPROTO_ST as IPPROTO_ST, + IPPROTO_TCP as IPPROTO_TCP, + IPPROTO_TP as IPPROTO_TP, + IPPROTO_UDP as IPPROTO_UDP, + IPPROTO_UDPLITE as IPPROTO_UDPLITE, + IPPROTO_XTP as IPPROTO_XTP, + IPV6_CHECKSUM as IPV6_CHECKSUM, IPV6_DONTFRAG as IPV6_DONTFRAG, + IPV6_DSTOPTS as IPV6_DSTOPTS, + IPV6_HOPLIMIT as IPV6_HOPLIMIT, + IPV6_HOPOPTS as IPV6_HOPOPTS, + IPV6_JOIN_GROUP as IPV6_JOIN_GROUP, + IPV6_LEAVE_GROUP as IPV6_LEAVE_GROUP, + IPV6_MULTICAST_HOPS as IPV6_MULTICAST_HOPS, + IPV6_MULTICAST_IF as IPV6_MULTICAST_IF, + IPV6_MULTICAST_LOOP as IPV6_MULTICAST_LOOP, + IPV6_NEXTHOP as IPV6_NEXTHOP, IPV6_PATHMTU as IPV6_PATHMTU, + IPV6_PKTINFO as IPV6_PKTINFO, + IPV6_RECVDSTOPTS as IPV6_RECVDSTOPTS, + IPV6_RECVHOPLIMIT as IPV6_RECVHOPLIMIT, + IPV6_RECVHOPOPTS as IPV6_RECVHOPOPTS, IPV6_RECVPATHMTU as IPV6_RECVPATHMTU, - IP_TRANSPARENT as IP_TRANSPARENT, + IPV6_RECVPKTINFO as IPV6_RECVPKTINFO, + IPV6_RECVRTHDR as IPV6_RECVRTHDR, + IPV6_RECVTCLASS as IPV6_RECVTCLASS, + IPV6_RTHDR as IPV6_RTHDR, + IPV6_RTHDR_TYPE_0 as IPV6_RTHDR_TYPE_0, + IPV6_RTHDRDSTOPTS as IPV6_RTHDRDSTOPTS, + IPV6_TCLASS as IPV6_TCLASS, + IPV6_UNICAST_HOPS as IPV6_UNICAST_HOPS, + IPV6_USE_MIN_MTU as IPV6_USE_MIN_MTU, + IPV6_V6ONLY as IPV6_V6ONLY, + J1939_EE_INFO_NONE as J1939_EE_INFO_NONE, + J1939_EE_INFO_TX_ABORT as J1939_EE_INFO_TX_ABORT, + J1939_FILTER_MAX as J1939_FILTER_MAX, + J1939_IDLE_ADDR as J1939_IDLE_ADDR, + J1939_MAX_UNICAST_ADDR as J1939_MAX_UNICAST_ADDR, + J1939_NLA_BYTES_ACKED as J1939_NLA_BYTES_ACKED, + J1939_NLA_PAD as J1939_NLA_PAD, + J1939_NO_ADDR as J1939_NO_ADDR, + J1939_NO_NAME as J1939_NO_NAME, + J1939_NO_PGN as J1939_NO_PGN, + J1939_PGN_ADDRESS_CLAIMED as J1939_PGN_ADDRESS_CLAIMED, + J1939_PGN_ADDRESS_COMMANDED as J1939_PGN_ADDRESS_COMMANDED, + J1939_PGN_MAX as J1939_PGN_MAX, + J1939_PGN_PDU1_MAX as J1939_PGN_PDU1_MAX, + J1939_PGN_REQUEST as J1939_PGN_REQUEST, + LOCAL_PEERCRED as LOCAL_PEERCRED, + MSG_BCAST as MSG_BCAST, MSG_CMSG_CLOEXEC as MSG_CMSG_CLOEXEC, MSG_CONFIRM as MSG_CONFIRM, + MSG_CTRUNC as MSG_CTRUNC, + MSG_DONTROUTE as MSG_DONTROUTE, + MSG_DONTWAIT as MSG_DONTWAIT, + MSG_EOF as MSG_EOF, + MSG_EOR as MSG_EOR, + MSG_ERRQUEUE as MSG_ERRQUEUE, MSG_FASTOPEN as MSG_FASTOPEN, + MSG_MCAST as MSG_MCAST, MSG_MORE as MSG_MORE, MSG_NOSIGNAL as MSG_NOSIGNAL, + MSG_NOTIFICATION as MSG_NOTIFICATION, + MSG_OOB as MSG_OOB, + MSG_PEEK as MSG_PEEK, + MSG_TRUNC as MSG_TRUNC, + MSG_WAITALL as MSG_WAITALL, NETLINK_CRYPTO as NETLINK_CRYPTO, + NETLINK_DNRTMSG as NETLINK_DNRTMSG, + NETLINK_FIREWALL as NETLINK_FIREWALL, + NETLINK_IP6_FW as NETLINK_IP6_FW, + NETLINK_NFLOG as NETLINK_NFLOG, + NETLINK_ROUTE as NETLINK_ROUTE, + NETLINK_USERSOCK as NETLINK_USERSOCK, + NETLINK_XFRM as NETLINK_XFRM, + NI_DGRAM as NI_DGRAM, + NI_MAXHOST as NI_MAXHOST, + NI_MAXSERV as NI_MAXSERV, + NI_NAMEREQD as NI_NAMEREQD, + NI_NOFQDN as NI_NOFQDN, + NI_NUMERICHOST as NI_NUMERICHOST, + NI_NUMERICSERV as NI_NUMERICSERV, + PACKET_BROADCAST as PACKET_BROADCAST, + PACKET_FASTROUTE as PACKET_FASTROUTE, + PACKET_HOST as PACKET_HOST, + PACKET_LOOPBACK as PACKET_LOOPBACK, + PACKET_MULTICAST as PACKET_MULTICAST, + PACKET_OTHERHOST as PACKET_OTHERHOST, + PACKET_OUTGOING as PACKET_OUTGOING, PF_CAN as PF_CAN, PF_PACKET as PF_PACKET, PF_RDS as PF_RDS, + PF_SYSTEM as PF_SYSTEM, + POLLERR as POLLERR, + POLLHUP as POLLHUP, + POLLIN as POLLIN, + POLLMSG as POLLMSG, + POLLNVAL as POLLNVAL, + POLLOUT as POLLOUT, + POLLPRI as POLLPRI, + POLLRDBAND as POLLRDBAND, + POLLRDNORM as POLLRDNORM, + POLLWRNORM as POLLWRNORM, + RCVALL_MAX as RCVALL_MAX, + RCVALL_OFF as RCVALL_OFF, + RCVALL_ON as RCVALL_ON, + RCVALL_SOCKETLEVELONLY as RCVALL_SOCKETLEVELONLY, SCM_CREDENTIALS as SCM_CREDENTIALS, - SOCK_NONBLOCK as SOCK_NONBLOCK, - SOL_ALG as SOL_ALG, - SOL_CAN_BASE as SOL_CAN_BASE, - SOL_CAN_RAW as SOL_CAN_RAW, - SOL_TIPC as SOL_TIPC, + SCM_CREDS as SCM_CREDS, + SCM_J1939_DEST_ADDR as SCM_J1939_DEST_ADDR, + SCM_J1939_DEST_NAME as SCM_J1939_DEST_NAME, + SCM_J1939_ERRQUEUE as SCM_J1939_ERRQUEUE, + SCM_J1939_PRIO as SCM_J1939_PRIO, + SCM_RIGHTS as SCM_RIGHTS, + SHUT_RD as SHUT_RD, + SHUT_RDWR as SHUT_RDWR, + SHUT_WR as SHUT_WR, + SIO_KEEPALIVE_VALS as SIO_KEEPALIVE_VALS, + SIO_LOOPBACK_FAST_PATH as SIO_LOOPBACK_FAST_PATH, + SIO_RCVALL as SIO_RCVALL, + SIOCGIFINDEX as SIOCGIFINDEX, + SIOCGIFNAME as SIOCGIFNAME, + SO_ACCEPTCONN as SO_ACCEPTCONN, SO_BINDTODEVICE as SO_BINDTODEVICE, + SO_BROADCAST as SO_BROADCAST, + SO_DEBUG as SO_DEBUG, SO_DOMAIN as SO_DOMAIN, + SO_DONTROUTE as SO_DONTROUTE, + SO_ERROR as SO_ERROR, + SO_EXCLUSIVEADDRUSE as SO_EXCLUSIVEADDRUSE, + SO_INCOMING_CPU as SO_INCOMING_CPU, + SO_J1939_ERRQUEUE as SO_J1939_ERRQUEUE, + SO_J1939_FILTER as SO_J1939_FILTER, + SO_J1939_PROMISC as SO_J1939_PROMISC, + SO_J1939_SEND_PRIO as SO_J1939_SEND_PRIO, + SO_KEEPALIVE as SO_KEEPALIVE, + SO_LINGER as SO_LINGER, SO_MARK as SO_MARK, + SO_OOBINLINE as SO_OOBINLINE, SO_PASSCRED as SO_PASSCRED, SO_PASSSEC as SO_PASSSEC, SO_PEERCRED as SO_PEERCRED, SO_PEERSEC as SO_PEERSEC, SO_PRIORITY as SO_PRIORITY, SO_PROTOCOL as SO_PROTOCOL, + SO_RCVBUF as SO_RCVBUF, + SO_RCVLOWAT as SO_RCVLOWAT, + SO_RCVTIMEO as SO_RCVTIMEO, + SO_REUSEADDR as SO_REUSEADDR, + SO_REUSEPORT as SO_REUSEPORT, + SO_SETFIB as SO_SETFIB, + SO_SNDBUF as SO_SNDBUF, + SO_SNDLOWAT as SO_SNDLOWAT, + SO_SNDTIMEO as SO_SNDTIMEO, + SO_TYPE as SO_TYPE, + SO_USELOOPBACK as SO_USELOOPBACK, SO_VM_SOCKETS_BUFFER_MAX_SIZE as SO_VM_SOCKETS_BUFFER_MAX_SIZE, SO_VM_SOCKETS_BUFFER_MIN_SIZE as SO_VM_SOCKETS_BUFFER_MIN_SIZE, SO_VM_SOCKETS_BUFFER_SIZE as SO_VM_SOCKETS_BUFFER_SIZE, + SOCK_CLOEXEC as SOCK_CLOEXEC, + SOCK_DGRAM as SOCK_DGRAM, + SOCK_NONBLOCK as SOCK_NONBLOCK, + SOCK_RAW as SOCK_RAW, + SOCK_RDM as SOCK_RDM, + SOCK_SEQPACKET as SOCK_SEQPACKET, + SOCK_STREAM as SOCK_STREAM, + SOL_ALG as SOL_ALG, + SOL_CAN_BASE as SOL_CAN_BASE, + SOL_CAN_RAW as SOL_CAN_RAW, + SOL_HCI as SOL_HCI, + SOL_IP as SOL_IP, + SOL_RDS as SOL_RDS, + SOL_SOCKET as SOL_SOCKET, + SOL_TCP as SOL_TCP, + SOL_TIPC as SOL_TIPC, + SOL_UDP as SOL_UDP, + SOMAXCONN as SOMAXCONN, + SYSPROTO_CONTROL as SYSPROTO_CONTROL, + TCP_CC_INFO as TCP_CC_INFO, TCP_CONGESTION as TCP_CONGESTION, + TCP_CORK as TCP_CORK, + TCP_DEFER_ACCEPT as TCP_DEFER_ACCEPT, + TCP_FASTOPEN as TCP_FASTOPEN, + TCP_FASTOPEN_CONNECT as TCP_FASTOPEN_CONNECT, + TCP_FASTOPEN_KEY as TCP_FASTOPEN_KEY, + TCP_FASTOPEN_NO_COOKIE as TCP_FASTOPEN_NO_COOKIE, + TCP_INFO as TCP_INFO, + TCP_INQ as TCP_INQ, + TCP_KEEPALIVE as TCP_KEEPALIVE, + TCP_KEEPCNT as TCP_KEEPCNT, + TCP_KEEPIDLE as TCP_KEEPIDLE, + TCP_KEEPINTVL as TCP_KEEPINTVL, + TCP_LINGER2 as TCP_LINGER2, + TCP_MAXSEG as TCP_MAXSEG, + TCP_MD5SIG as TCP_MD5SIG, + TCP_MD5SIG_EXT as TCP_MD5SIG_EXT, + TCP_NODELAY as TCP_NODELAY, + TCP_NOTSENT_LOWAT as TCP_NOTSENT_LOWAT, + TCP_QUEUE_SEQ as TCP_QUEUE_SEQ, + TCP_QUICKACK as TCP_QUICKACK, + TCP_REPAIR as TCP_REPAIR, + TCP_REPAIR_OPTIONS as TCP_REPAIR_OPTIONS, + TCP_REPAIR_QUEUE as TCP_REPAIR_QUEUE, + TCP_REPAIR_WINDOW as TCP_REPAIR_WINDOW, + TCP_SAVE_SYN as TCP_SAVE_SYN, + TCP_SAVED_SYN as TCP_SAVED_SYN, + TCP_SYNCNT as TCP_SYNCNT, + TCP_THIN_DUPACK as TCP_THIN_DUPACK, + TCP_THIN_LINEAR_TIMEOUTS as TCP_THIN_LINEAR_TIMEOUTS, + TCP_TIMESTAMP as TCP_TIMESTAMP, + TCP_TX_DELAY as TCP_TX_DELAY, + TCP_ULP as TCP_ULP, TCP_USER_TIMEOUT as TCP_USER_TIMEOUT, + TCP_WINDOW_CLAMP as TCP_WINDOW_CLAMP, + TCP_ZEROCOPY_RECEIVE as TCP_ZEROCOPY_RECEIVE, TIPC_ADDR_ID as TIPC_ADDR_ID, TIPC_ADDR_NAME as TIPC_ADDR_NAME, TIPC_ADDR_NAMESEQ as TIPC_ADDR_NAMESEQ, @@ -431,100 +541,20 @@ TIPC_NODE_SCOPE as TIPC_NODE_SCOPE, TIPC_PUBLISHED as TIPC_PUBLISHED, TIPC_SRC_DROPPABLE as TIPC_SRC_DROPPABLE, - TIPC_SUBSCR_TIMEOUT as TIPC_SUBSCR_TIMEOUT, TIPC_SUB_CANCEL as TIPC_SUB_CANCEL, TIPC_SUB_PORTS as TIPC_SUB_PORTS, TIPC_SUB_SERVICE as TIPC_SUB_SERVICE, + TIPC_SUBSCR_TIMEOUT as TIPC_SUBSCR_TIMEOUT, TIPC_TOP_SRV as TIPC_TOP_SRV, TIPC_WAIT_FOREVER as TIPC_WAIT_FOREVER, TIPC_WITHDRAWN as TIPC_WITHDRAWN, TIPC_ZONE_SCOPE as TIPC_ZONE_SCOPE, + UDPLITE_RECV_CSCOV as UDPLITE_RECV_CSCOV, + UDPLITE_SEND_CSCOV as UDPLITE_SEND_CSCOV, + VM_SOCKETS_INVALID_VERSION as VM_SOCKETS_INVALID_VERSION, VMADDR_CID_ANY as VMADDR_CID_ANY, VMADDR_CID_HOST as VMADDR_CID_HOST, VMADDR_PORT_ANY as VMADDR_PORT_ANY, - VM_SOCKETS_INVALID_VERSION as VM_SOCKETS_INVALID_VERSION, - MSG_BCAST as MSG_BCAST, - MSG_MCAST as MSG_MCAST, - RCVALL_MAX as RCVALL_MAX, - RCVALL_OFF as RCVALL_OFF, - RCVALL_ON as RCVALL_ON, - RCVALL_SOCKETLEVELONLY as RCVALL_SOCKETLEVELONLY, - SIO_KEEPALIVE_VALS as SIO_KEEPALIVE_VALS, - SIO_LOOPBACK_FAST_PATH as SIO_LOOPBACK_FAST_PATH, - SIO_RCVALL as SIO_RCVALL, - SO_EXCLUSIVEADDRUSE as SO_EXCLUSIVEADDRUSE, - HCI_FILTER as HCI_FILTER, - BTPROTO_SCO as BTPROTO_SCO, - BTPROTO_HCI as BTPROTO_HCI, - HCI_TIME_STAMP as HCI_TIME_STAMP, - SOL_RDS as SOL_RDS, - BTPROTO_L2CAP as BTPROTO_L2CAP, - BTPROTO_RFCOMM as BTPROTO_RFCOMM, - HCI_DATA_DIR as HCI_DATA_DIR, - SOL_HCI as SOL_HCI, - CAN_BCM_RX_ANNOUNCE_RESUME as CAN_BCM_RX_ANNOUNCE_RESUME, - CAN_BCM_RX_CHECK_DLC as CAN_BCM_RX_CHECK_DLC, - CAN_BCM_RX_FILTER_ID as CAN_BCM_RX_FILTER_ID, - CAN_BCM_RX_NO_AUTOTIMER as CAN_BCM_RX_NO_AUTOTIMER, - CAN_BCM_RX_RTR_FRAME as CAN_BCM_RX_RTR_FRAME, - CAN_BCM_SETTIMER as CAN_BCM_SETTIMER, - CAN_BCM_STARTTIMER as CAN_BCM_STARTTIMER, - CAN_BCM_TX_ANNOUNCE as CAN_BCM_TX_ANNOUNCE, - CAN_BCM_TX_COUNTEVT as CAN_BCM_TX_COUNTEVT, - CAN_BCM_TX_CP_CAN_ID as CAN_BCM_TX_CP_CAN_ID, - CAN_BCM_TX_RESET_MULTI_IDX as CAN_BCM_TX_RESET_MULTI_IDX, - IPPROTO_CBT as IPPROTO_CBT, - IPPROTO_ICLFXBM as IPPROTO_ICLFXBM, - IPPROTO_IGP as IPPROTO_IGP, - IPPROTO_L2TP as IPPROTO_L2TP, - IPPROTO_PGM as IPPROTO_PGM, - IPPROTO_RDP as IPPROTO_RDP, - IPPROTO_ST as IPPROTO_ST, - AF_QIPCRTR as AF_QIPCRTR, - CAN_BCM_CAN_FD_FRAME as CAN_BCM_CAN_FD_FRAME, - IPPROTO_MOBILE as IPPROTO_MOBILE, - IPV6_USE_MIN_MTU as IPV6_USE_MIN_MTU, - MSG_NOTIFICATION as MSG_NOTIFICATION, - SO_SETFIB as SO_SETFIB, - CAN_J1939 as CAN_J1939, - CAN_RAW_JOIN_FILTERS as CAN_RAW_JOIN_FILTERS, - IPPROTO_UDPLITE as IPPROTO_UDPLITE, - J1939_EE_INFO_NONE as J1939_EE_INFO_NONE, - J1939_EE_INFO_TX_ABORT as J1939_EE_INFO_TX_ABORT, - J1939_FILTER_MAX as J1939_FILTER_MAX, - J1939_IDLE_ADDR as J1939_IDLE_ADDR, - J1939_MAX_UNICAST_ADDR as J1939_MAX_UNICAST_ADDR, - J1939_NLA_BYTES_ACKED as J1939_NLA_BYTES_ACKED, - J1939_NLA_PAD as J1939_NLA_PAD, - J1939_NO_ADDR as J1939_NO_ADDR, - J1939_NO_NAME as J1939_NO_NAME, - J1939_NO_PGN as J1939_NO_PGN, - J1939_PGN_ADDRESS_CLAIMED as J1939_PGN_ADDRESS_CLAIMED, - J1939_PGN_ADDRESS_COMMANDED as J1939_PGN_ADDRESS_COMMANDED, - J1939_PGN_MAX as J1939_PGN_MAX, - J1939_PGN_PDU1_MAX as J1939_PGN_PDU1_MAX, - J1939_PGN_REQUEST as J1939_PGN_REQUEST, - SCM_J1939_DEST_ADDR as SCM_J1939_DEST_ADDR, - SCM_J1939_DEST_NAME as SCM_J1939_DEST_NAME, - SCM_J1939_ERRQUEUE as SCM_J1939_ERRQUEUE, - SCM_J1939_PRIO as SCM_J1939_PRIO, - SO_J1939_ERRQUEUE as SO_J1939_ERRQUEUE, - SO_J1939_FILTER as SO_J1939_FILTER, - SO_J1939_PROMISC as SO_J1939_PROMISC, - SO_J1939_SEND_PRIO as SO_J1939_SEND_PRIO, - UDPLITE_RECV_CSCOV as UDPLITE_RECV_CSCOV, - UDPLITE_SEND_CSCOV as UDPLITE_SEND_CSCOV, - IP_RECVTOS as IP_RECVTOS, - TCP_KEEPALIVE as TCP_KEEPALIVE, - SO_INCOMING_CPU as SO_INCOMING_CPU, - FD_ACCEPT as FD_ACCEPT, - FD_CLOSE as FD_CLOSE, - FD_CLOSE_BIT as FD_CLOSE_BIT, - FD_CONNECT as FD_CONNECT, - FD_CONNECT_BIT as FD_CONNECT_BIT, - FD_READ as FD_READ, - FD_WRITE as FD_WRITE, - INFINITE as INFINITE, WSA_FLAG_OVERLAPPED as WSA_FLAG_OVERLAPPED, WSA_INVALID_HANDLE as WSA_INVALID_HANDLE, WSA_INVALID_PARAMETER as WSA_INVALID_PARAMETER, @@ -534,35 +564,4 @@ WSA_OPERATION_ABORTED as WSA_OPERATION_ABORTED, WSA_WAIT_FAILED as WSA_WAIT_FAILED, WSA_WAIT_TIMEOUT as WSA_WAIT_TIMEOUT, - # python 3.12 - ETHERTYPE_ARP as ETHERTYPE_ARP, - ETHERTYPE_IP as ETHERTYPE_IP, - ETHERTYPE_IPV6 as ETHERTYPE_IPV6, - ETHERTYPE_VLAN as ETHERTYPE_VLAN, - ETH_P_ALL as ETH_P_ALL, - IP_ADD_SOURCE_MEMBERSHIP as IP_ADD_SOURCE_MEMBERSHIP, - IP_BLOCK_SOURCE as IP_BLOCK_SOURCE, - IP_DROP_SOURCE_MEMBERSHIP as IP_DROP_SOURCE_MEMBERSHIP, - IP_PKTINFO as IP_PKTINFO, - IP_UNBLOCK_SOURCE as IP_UNBLOCK_SOURCE, - TCP_CC_INFO as TCP_CC_INFO, - TCP_FASTOPEN_CONNECT as TCP_FASTOPEN_CONNECT, - TCP_FASTOPEN_KEY as TCP_FASTOPEN_KEY, - TCP_FASTOPEN_NO_COOKIE as TCP_FASTOPEN_NO_COOKIE, - TCP_INQ as TCP_INQ, - TCP_MD5SIG as TCP_MD5SIG, - TCP_MD5SIG_EXT as TCP_MD5SIG_EXT, - TCP_QUEUE_SEQ as TCP_QUEUE_SEQ, - TCP_REPAIR as TCP_REPAIR, - TCP_REPAIR_OPTIONS as TCP_REPAIR_OPTIONS, - TCP_REPAIR_QUEUE as TCP_REPAIR_QUEUE, - TCP_REPAIR_WINDOW as TCP_REPAIR_WINDOW, - TCP_SAVED_SYN as TCP_SAVED_SYN, - TCP_SAVE_SYN as TCP_SAVE_SYN, - TCP_THIN_DUPACK as TCP_THIN_DUPACK, - TCP_THIN_LINEAR_TIMEOUTS as TCP_THIN_LINEAR_TIMEOUTS, - TCP_TIMESTAMP as TCP_TIMESTAMP, - TCP_TX_DELAY as TCP_TX_DELAY, - TCP_ULP as TCP_ULP, - TCP_ZEROCOPY_RECEIVE as TCP_ZEROCOPY_RECEIVE, ) diff --git a/trio/testing/__init__.py b/trio/testing/__init__.py index 202c501483..fa683e1145 100644 --- a/trio/testing/__init__.py +++ b/trio/testing/__init__.py @@ -1,40 +1,34 @@ # Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) from .._core import ( - wait_all_tasks_blocked as wait_all_tasks_blocked, MockClock as MockClock, + wait_all_tasks_blocked as wait_all_tasks_blocked, ) - -from ._trio_test import trio_test as trio_test - -from ._checkpoints import ( - assert_checkpoints as assert_checkpoints, - assert_no_checkpoints as assert_no_checkpoints, -) - -from ._sequencer import Sequencer as Sequencer - +from .._util import fixup_module_metadata from ._check_streams import ( + check_half_closeable_stream as check_half_closeable_stream, check_one_way_stream as check_one_way_stream, check_two_way_stream as check_two_way_stream, - check_half_closeable_stream as check_half_closeable_stream, ) - +from ._checkpoints import ( + assert_checkpoints as assert_checkpoints, + assert_no_checkpoints as assert_no_checkpoints, +) from ._memory_streams import ( - MemorySendStream as MemorySendStream, MemoryReceiveStream as MemoryReceiveStream, - memory_stream_pump as memory_stream_pump, - memory_stream_one_way_pair as memory_stream_one_way_pair, - memory_stream_pair as memory_stream_pair, + MemorySendStream as MemorySendStream, lockstep_stream_one_way_pair as lockstep_stream_one_way_pair, lockstep_stream_pair as lockstep_stream_pair, + memory_stream_one_way_pair as memory_stream_one_way_pair, + memory_stream_pair as memory_stream_pair, + memory_stream_pump as memory_stream_pump, ) - from ._network import open_stream_to_socket_listener as open_stream_to_socket_listener +from ._sequencer import Sequencer as Sequencer +from ._trio_test import trio_test as trio_test ################################################################ -from .._util import fixup_module_metadata fixup_module_metadata(__name__, globals()) del fixup_module_metadata diff --git a/trio/testing/_check_streams.py b/trio/testing/_check_streams.py index 0206f1f737..33d741e670 100644 --- a/trio/testing/_check_streams.py +++ b/trio/testing/_check_streams.py @@ -1,11 +1,11 @@ # Generic stream tests -from contextlib import contextmanager import random +from contextlib import contextmanager from .. import _core +from .._abc import HalfCloseableStream, ReceiveStream, SendStream, Stream from .._highlevel_generic import aclose_forcefully -from .._abc import SendStream, ReceiveStream, Stream, HalfCloseableStream from ._checkpoints import assert_checkpoints diff --git a/trio/testing/_fake_net.py b/trio/testing/_fake_net.py index 9df5ab5b6c..f2d40fb7ff 100644 --- a/trio/testing/_fake_net.py +++ b/trio/testing/_fake_net.py @@ -6,13 +6,14 @@ # - TCP # - UDP broadcast -import trio -import attr -import ipaddress import errno +import ipaddress import os -from typing import Union, Optional +from typing import Optional, Union +import attr + +import trio from trio._util import Final, NoPublicConstructor IPAddress = Union[ipaddress.IPv4Address, ipaddress.IPv6Address] diff --git a/trio/testing/_memory_streams.py b/trio/testing/_memory_streams.py index 99ad7dfcaf..38e8e54de8 100644 --- a/trio/testing/_memory_streams.py +++ b/trio/testing/_memory_streams.py @@ -1,9 +1,8 @@ import operator -from .. import _core +from .. import _core, _util from .._highlevel_generic import StapledStream -from .. import _util -from ..abc import SendStream, ReceiveStream +from ..abc import ReceiveStream, SendStream ################################################################ # In-memory streams - Unbounded buffer version diff --git a/trio/testing/_sequencer.py b/trio/testing/_sequencer.py index 3f4bda9cfc..137fd3c522 100644 --- a/trio/testing/_sequencer.py +++ b/trio/testing/_sequencer.py @@ -6,9 +6,7 @@ import attr -from .. import _core -from .. import _util -from .. import Event +from .. import Event, _core, _util if TYPE_CHECKING: from collections.abc import AsyncIterator diff --git a/trio/testing/_trio_test.py b/trio/testing/_trio_test.py index 4fcaeae372..b4ef69ef09 100644 --- a/trio/testing/_trio_test.py +++ b/trio/testing/_trio_test.py @@ -1,4 +1,4 @@ -from functools import wraps, partial +from functools import partial, wraps from .. import _core from ..abc import Clock, Instrument diff --git a/trio/to_thread.py b/trio/to_thread.py index f2b5ec659e..45ea5b480b 100644 --- a/trio/to_thread.py +++ b/trio/to_thread.py @@ -1,5 +1,4 @@ -from ._threads import to_thread_run_sync as run_sync -from ._threads import current_default_thread_limiter +from ._threads import current_default_thread_limiter, to_thread_run_sync as run_sync # need to use __all__ for pyright --verifytypes to see re-exports when renaming them __all__ = ["current_default_thread_limiter", "run_sync"] From 755f520b907c0ae793b5bb34cc57117eaaaedf41 Mon Sep 17 00:00:00 2001 From: jakkdl Date: Fri, 7 Jul 2023 11:35:31 +0200 Subject: [PATCH 083/162] add isort commit to .git-blame-ignore-revs --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000000..ff63b26c27 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# sorting all imports with isort +bec33b2a490e4d2a61b0d4d27da8df782ebee4c0 From 02e91c2395dd300354efce5504f152fa1f324657 Mon Sep 17 00:00:00 2001 From: jakkdl Date: Fri, 7 Jul 2023 14:56:21 +0200 Subject: [PATCH 084/162] changed commit ref in .git-blame-ignore-revs as it got changed when rebased onto master --- .git-blame-ignore-revs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index ff63b26c27..1d3079ad5a 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,2 +1,2 @@ # sorting all imports with isort -bec33b2a490e4d2a61b0d4d27da8df782ebee4c0 +933f77b96f0092e1baab4474a9208fc2e379aa32 From 6d7104735b5597b2b6c62dde88718310e22f77a5 Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Mon, 10 Jul 2023 19:44:58 +1000 Subject: [PATCH 085/162] Make type completeness results consistent for different platforms (#2691) * Use Linux as the platform for type consistency tests * Sort sections of the verify_types file. * Always use Python 3.8 as the Python version to test --- trio/_tests/check_type_completeness.py | 17 +- trio/_tests/verify_types.json | 306 ++++++++++++------------- 2 files changed, 169 insertions(+), 154 deletions(-) diff --git a/trio/_tests/check_type_completeness.py b/trio/_tests/check_type_completeness.py index 6d0f43b6b0..7a65a4249e 100755 --- a/trio/_tests/check_type_completeness.py +++ b/trio/_tests/check_type_completeness.py @@ -15,7 +15,15 @@ # removing it from the below call later on. def run_pyright(): return subprocess.run( - ["pyright", "--verifytypes=trio", "--outputjson", "--ignoreexternal"], + [ + "pyright", + # Specify a platform and version to keep imported modules consistent. + "--pythonplatform=Linux", + "--pythonversion=3.8", + "--verifytypes=trio", + "--outputjson", + "--ignoreexternal", + ], capture_output=True, ) @@ -148,6 +156,13 @@ def main(args: argparse.Namespace) -> int: new_symbols.append(symbol["name"]) continue + # Ensure order of arrays does not affect result. + new_symbols.sort() + current_result["generalDiagnostics"].sort() + current_result["typeCompleteness"]["modules"].sort( + key=lambda module: module.get("name", "") + ) + current_result["typeCompleteness"]["symbols"] = new_symbols with open(RESULT_FILE, "w") as file: diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index 812aa10ca5..57b307d1d9 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -51,209 +51,207 @@ }, "packageName": "trio", "symbols": [ - "trio._core._run._TaskStatusIgnored.__repr__", - "trio._core._run._TaskStatusIgnored.started", - "trio._channel.MemoryReceiveChannel", - "trio._abc.ReceiveChannel", - "trio._abc.ReceiveChannel.__aiter__", - "trio._abc.AsyncResource.aclose", + "trio.__deprecated_attributes__", "trio._abc.AsyncResource.__aenter__", "trio._abc.AsyncResource.__aexit__", - "trio._channel.MemorySendChannel", + "trio._abc.AsyncResource.aclose", + "trio._abc.Channel", + "trio._abc.Clock.current_time", + "trio._abc.Clock.deadline_to_sleep_time", + "trio._abc.Clock.start_clock", + "trio._abc.HalfCloseableStream", + "trio._abc.HalfCloseableStream.send_eof", + "trio._abc.HostnameResolver.getaddrinfo", + "trio._abc.HostnameResolver.getnameinfo", + "trio._abc.Instrument.after_io_wait", + "trio._abc.Instrument.after_run", + "trio._abc.Instrument.after_task_step", + "trio._abc.Instrument.before_io_wait", + "trio._abc.Instrument.before_run", + "trio._abc.Instrument.before_task_step", + "trio._abc.Instrument.task_exited", + "trio._abc.Instrument.task_scheduled", + "trio._abc.Instrument.task_spawned", + "trio._abc.Listener", + "trio._abc.Listener.accept", + "trio._abc.ReceiveChannel", + "trio._abc.ReceiveChannel.__aiter__", + "trio._abc.ReceiveStream", + "trio._abc.ReceiveStream.__aiter__", + "trio._abc.ReceiveStream.__anext__", + "trio._abc.ReceiveStream.receive_some", "trio._abc.SendChannel", + "trio._abc.SendStream", + "trio._abc.SendStream.send_all", + "trio._abc.SendStream.wait_send_all_might_not_block", + "trio._abc.SocketFactory.socket", + "trio._abc.Stream", + "trio._channel.MemoryReceiveChannel", + "trio._channel.MemorySendChannel", + "trio._core._entry_queue.TrioToken.run_sync_soon", + "trio._core._local.RunVar.__repr__", + "trio._core._local.RunVar.get", + "trio._core._local.RunVar.reset", + "trio._core._local.RunVar.set", + "trio._core._mock_clock.MockClock", + "trio._core._mock_clock.MockClock.__init__", + "trio._core._mock_clock.MockClock.__repr__", + "trio._core._mock_clock.MockClock.autojump_threshold", + "trio._core._mock_clock.MockClock.current_time", + "trio._core._mock_clock.MockClock.deadline_to_sleep_time", + "trio._core._mock_clock.MockClock.jump", + "trio._core._mock_clock.MockClock.rate", + "trio._core._mock_clock.MockClock.start_clock", + "trio._core._parking_lot.ParkingLot.__bool__", + "trio._core._parking_lot.ParkingLot.__len__", + "trio._core._parking_lot.ParkingLot.repark_all", + "trio._core._parking_lot.ParkingLot.statistics", + "trio._core._parking_lot.ParkingLot.unpark_all", + "trio._core._run.Nursery.__del__", "trio._core._run.Nursery.__init__", "trio._core._run.Nursery.child_tasks", "trio._core._run.Nursery.parent_task", - "trio._core._run.Nursery.start_soon", "trio._core._run.Nursery.start", - "trio._core._run.Nursery.__del__", - "trio.current_effective_deadline", - "trio.current_time", - "trio.run", + "trio._core._run.Nursery.start_soon", + "trio._core._run.Task.__repr__", + "trio._core._run.Task.child_nurseries", + "trio._core._run.Task.context", + "trio._core._run.Task.coro", + "trio._core._run.Task.custom_sleep_data", + "trio._core._run.Task.eventual_parent_nursery", + "trio._core._run.Task.iter_await_frames", + "trio._core._run.Task.name", + "trio._core._run.Task.parent_nursery", + "trio._core._run._TaskStatusIgnored.__repr__", + "trio._core._run._TaskStatusIgnored.started", + "trio._core._unbounded_queue.UnboundedQueue.__aiter__", + "trio._core._unbounded_queue.UnboundedQueue.__anext__", + "trio._core._unbounded_queue.UnboundedQueue.__repr__", + "trio._core._unbounded_queue.UnboundedQueue.empty", + "trio._core._unbounded_queue.UnboundedQueue.get_batch", + "trio._core._unbounded_queue.UnboundedQueue.get_batch_nowait", + "trio._core._unbounded_queue.UnboundedQueue.qsize", + "trio._core._unbounded_queue.UnboundedQueue.statistics", "trio._dtls.DTLSChannel", - "trio._dtls.DTLSChannel.__init__", - "trio._dtls.DTLSChannel.close", "trio._dtls.DTLSChannel.__enter__", "trio._dtls.DTLSChannel.__exit__", + "trio._dtls.DTLSChannel.__init__", "trio._dtls.DTLSChannel.aclose", + "trio._dtls.DTLSChannel.close", "trio._dtls.DTLSChannel.do_handshake", - "trio._dtls.DTLSChannel.send", + "trio._dtls.DTLSChannel.get_cleartext_mtu", "trio._dtls.DTLSChannel.receive", + "trio._dtls.DTLSChannel.send", "trio._dtls.DTLSChannel.set_ciphertext_mtu", - "trio._dtls.DTLSChannel.get_cleartext_mtu", "trio._dtls.DTLSChannel.statistics", - "trio._abc.Channel", - "trio._dtls.DTLSEndpoint.__init__", "trio._dtls.DTLSEndpoint.__del__", - "trio._dtls.DTLSEndpoint.close", "trio._dtls.DTLSEndpoint.__enter__", "trio._dtls.DTLSEndpoint.__exit__", - "trio._dtls.DTLSEndpoint.serve", + "trio._dtls.DTLSEndpoint.__init__", + "trio._dtls.DTLSEndpoint.close", "trio._dtls.DTLSEndpoint.connect", - "trio._dtls.DTLSEndpoint.socket", "trio._dtls.DTLSEndpoint.incoming_packets_buffer", - "trio.open_file", - "trio.wrap_file", + "trio._dtls.DTLSEndpoint.serve", + "trio._dtls.DTLSEndpoint.socket", "trio._highlevel_generic.StapledStream", - "trio._highlevel_generic.StapledStream.send_stream", + "trio._highlevel_generic.StapledStream.aclose", + "trio._highlevel_generic.StapledStream.receive_some", "trio._highlevel_generic.StapledStream.receive_stream", "trio._highlevel_generic.StapledStream.send_all", - "trio._highlevel_generic.StapledStream.wait_send_all_might_not_block", "trio._highlevel_generic.StapledStream.send_eof", - "trio._highlevel_generic.StapledStream.receive_some", - "trio._highlevel_generic.StapledStream.aclose", - "trio._abc.HalfCloseableStream", - "trio._abc.HalfCloseableStream.send_eof", - "trio._abc.Stream", - "trio._abc.SendStream", - "trio._abc.SendStream.send_all", - "trio._abc.SendStream.wait_send_all_might_not_block", - "trio._abc.ReceiveStream", - "trio._abc.ReceiveStream.receive_some", - "trio._abc.ReceiveStream.__aiter__", - "trio._abc.ReceiveStream.__anext__", - "trio.aclose_forcefully", - "trio.open_tcp_listeners", - "trio.serve_tcp", - "trio.open_tcp_stream", - "trio.open_unix_socket", - "trio.serve_listeners", + "trio._highlevel_generic.StapledStream.send_stream", + "trio._highlevel_generic.StapledStream.wait_send_all_might_not_block", "trio._highlevel_socket.SocketListener", "trio._highlevel_socket.SocketListener.__init__", "trio._highlevel_socket.SocketListener.accept", "trio._highlevel_socket.SocketListener.aclose", - "trio._abc.Listener", - "trio._abc.Listener.accept", "trio._highlevel_socket.SocketStream", "trio._highlevel_socket.SocketStream.__init__", + "trio._highlevel_socket.SocketStream.aclose", + "trio._highlevel_socket.SocketStream.getsockopt", + "trio._highlevel_socket.SocketStream.receive_some", "trio._highlevel_socket.SocketStream.send_all", - "trio._highlevel_socket.SocketStream.wait_send_all_might_not_block", "trio._highlevel_socket.SocketStream.send_eof", - "trio._highlevel_socket.SocketStream.receive_some", - "trio._highlevel_socket.SocketStream.aclose", "trio._highlevel_socket.SocketStream.setsockopt", - "trio._highlevel_socket.SocketStream.getsockopt", - "trio.open_ssl_over_tcp_listeners", - "trio.open_ssl_over_tcp_stream", - "trio.serve_ssl_over_tcp", + "trio._highlevel_socket.SocketStream.wait_send_all_might_not_block", + "trio._path.AsyncAutoWrapperType.__init__", + "trio._path.AsyncAutoWrapperType.generate_forwards", + "trio._path.AsyncAutoWrapperType.generate_iter", + "trio._path.AsyncAutoWrapperType.generate_magic", + "trio._path.AsyncAutoWrapperType.generate_wraps", "trio._path.Path", - "trio._path.Path.__init__", + "trio._path.Path.__bytes__", "trio._path.Path.__dir__", - "trio._path.Path.__repr__", "trio._path.Path.__fspath__", - "trio._path.Path.open", - "trio._path.Path.__bytes__", - "trio._path.Path.__truediv__", + "trio._path.Path.__init__", + "trio._path.Path.__repr__", "trio._path.Path.__rtruediv__", - "trio._path.AsyncAutoWrapperType.__init__", - "trio._path.AsyncAutoWrapperType.generate_forwards", - "trio._path.AsyncAutoWrapperType.generate_wraps", - "trio._path.AsyncAutoWrapperType.generate_magic", - "trio._path.AsyncAutoWrapperType.generate_iter", + "trio._path.Path.__truediv__", + "trio._path.Path.open", "trio._ssl.SSLListener", "trio._ssl.SSLListener.__init__", "trio._ssl.SSLListener.accept", "trio._ssl.SSLListener.aclose", "trio._ssl.SSLStream", - "trio._ssl.SSLStream.__init__", + "trio._ssl.SSLStream.__dir__", "trio._ssl.SSLStream.__getattr__", + "trio._ssl.SSLStream.__init__", "trio._ssl.SSLStream.__setattr__", - "trio._ssl.SSLStream.__dir__", + "trio._ssl.SSLStream.aclose", "trio._ssl.SSLStream.do_handshake", "trio._ssl.SSLStream.receive_some", "trio._ssl.SSLStream.send_all", + "trio._ssl.SSLStream.transport_stream", "trio._ssl.SSLStream.unwrap", - "trio._ssl.SSLStream.aclose", "trio._ssl.SSLStream.wait_send_all_might_not_block", - "trio._ssl.SSLStream.transport_stream", "trio._subprocess.Process", - "trio._subprocess.Process.encoding", - "trio._subprocess.Process.errors", + "trio._subprocess.Process.__aenter__", "trio._subprocess.Process.__init__", "trio._subprocess.Process.__repr__", - "trio._subprocess.Process.returncode", - "trio._subprocess.Process.__aenter__", "trio._subprocess.Process.aclose", - "trio._subprocess.Process.wait", + "trio._subprocess.Process.args", + "trio._subprocess.Process.encoding", + "trio._subprocess.Process.errors", + "trio._subprocess.Process.kill", + "trio._subprocess.Process.pid", "trio._subprocess.Process.poll", + "trio._subprocess.Process.returncode", "trio._subprocess.Process.send_signal", "trio._subprocess.Process.terminate", - "trio._subprocess.Process.kill", - "trio._subprocess.Process.args", - "trio._subprocess.Process.pid", - "trio.run_process", + "trio._subprocess.Process.wait", "trio._sync.CapacityLimiter.__init__", "trio._sync.CapacityLimiter.__repr__", - "trio._sync.CapacityLimiter.total_tokens", - "trio._sync.CapacityLimiter.borrowed_tokens", "trio._sync.CapacityLimiter.available_tokens", + "trio._sync.CapacityLimiter.borrowed_tokens", "trio._sync.CapacityLimiter.statistics", + "trio._sync.CapacityLimiter.total_tokens", "trio._sync.Condition.__init__", - "trio._sync.Condition.locked", - "trio._sync.Condition.acquire_nowait", "trio._sync.Condition.acquire", - "trio._sync.Condition.release", + "trio._sync.Condition.acquire_nowait", + "trio._sync.Condition.locked", "trio._sync.Condition.notify", "trio._sync.Condition.notify_all", + "trio._sync.Condition.release", "trio._sync.Condition.statistics", "trio._sync.Event.is_set", - "trio._sync.Event.wait", "trio._sync.Event.statistics", + "trio._sync.Event.wait", "trio._sync.Lock", - "trio._sync._LockImpl.__repr__", - "trio._sync._LockImpl.locked", - "trio._sync._LockImpl.statistics", "trio._sync.Semaphore.__init__", "trio._sync.Semaphore.__repr__", - "trio._sync.Semaphore.value", "trio._sync.Semaphore.max_value", "trio._sync.Semaphore.statistics", + "trio._sync.Semaphore.value", "trio._sync.StrictFIFOLock", - "trio.__deprecated_attributes__", - "trio._abc.Clock.start_clock", - "trio._abc.Clock.current_time", - "trio._abc.Clock.deadline_to_sleep_time", - "trio._abc.HostnameResolver.getaddrinfo", - "trio._abc.HostnameResolver.getnameinfo", - "trio._abc.Instrument.before_run", - "trio._abc.Instrument.after_run", - "trio._abc.Instrument.task_spawned", - "trio._abc.Instrument.task_scheduled", - "trio._abc.Instrument.before_task_step", - "trio._abc.Instrument.after_task_step", - "trio._abc.Instrument.task_exited", - "trio._abc.Instrument.before_io_wait", - "trio._abc.Instrument.after_io_wait", - "trio._abc.SocketFactory.socket", + "trio._sync._LockImpl.__repr__", + "trio._sync._LockImpl.locked", + "trio._sync._LockImpl.statistics", + "trio._unix_pipes.FdStream", + "trio.aclose_forcefully", + "trio.current_effective_deadline", + "trio.current_time", "trio.from_thread.run", "trio.from_thread.run_sync", - "trio._core._parking_lot.ParkingLot.__len__", - "trio._core._parking_lot.ParkingLot.__bool__", - "trio._core._parking_lot.ParkingLot.unpark_all", - "trio._core._parking_lot.ParkingLot.repark_all", - "trio._core._parking_lot.ParkingLot.statistics", - "trio._core._local.RunVar.get", - "trio._core._local.RunVar.set", - "trio._core._local.RunVar.reset", - "trio._core._local.RunVar.__repr__", - "trio._core._run.Task.coro", - "trio._core._run.Task.name", - "trio._core._run.Task.context", - "trio._core._run.Task.custom_sleep_data", - "trio._core._run.Task.__repr__", - "trio._core._run.Task.parent_nursery", - "trio._core._run.Task.eventual_parent_nursery", - "trio._core._run.Task.child_nurseries", - "trio._core._run.Task.iter_await_frames", - "trio._core._entry_queue.TrioToken.run_sync_soon", - "trio._core._unbounded_queue.UnboundedQueue.__repr__", - "trio._core._unbounded_queue.UnboundedQueue.qsize", - "trio._core._unbounded_queue.UnboundedQueue.empty", - "trio._core._unbounded_queue.UnboundedQueue.get_batch_nowait", - "trio._core._unbounded_queue.UnboundedQueue.get_batch", - "trio._core._unbounded_queue.UnboundedQueue.statistics", - "trio._core._unbounded_queue.UnboundedQueue.__aiter__", - "trio._core._unbounded_queue.UnboundedQueue.__anext__", "trio.lowlevel.add_instrument", "trio.lowlevel.cancel_shielded_checkpoint", "trio.lowlevel.checkpoint", @@ -265,6 +263,7 @@ "trio.lowlevel.current_trio_token", "trio.lowlevel.currently_ki_protected", "trio.lowlevel.notify_closing", + "trio.lowlevel.open_process", "trio.lowlevel.permanently_detach_coroutine_object", "trio.lowlevel.reattach_detached_coroutine_object", "trio.lowlevel.remove_instrument", @@ -275,8 +274,17 @@ "trio.lowlevel.temporarily_detach_coroutine_object", "trio.lowlevel.wait_readable", "trio.lowlevel.wait_writable", - "trio.lowlevel.open_process", - "trio._unix_pipes.FdStream", + "trio.open_file", + "trio.open_ssl_over_tcp_listeners", + "trio.open_ssl_over_tcp_stream", + "trio.open_tcp_listeners", + "trio.open_tcp_stream", + "trio.open_unix_socket", + "trio.run", + "trio.run_process", + "trio.serve_listeners", + "trio.serve_ssl_over_tcp", + "trio.serve_tcp", "trio.socket.from_stdlib_socket", "trio.socket.fromfd", "trio.socket.getaddrinfo", @@ -286,41 +294,31 @@ "trio.socket.set_custom_socket_factory", "trio.socket.socket", "trio.socket.socketpair", - "trio._core._mock_clock.MockClock", - "trio._core._mock_clock.MockClock.__init__", - "trio._core._mock_clock.MockClock.__repr__", - "trio._core._mock_clock.MockClock.rate", - "trio._core._mock_clock.MockClock.autojump_threshold", - "trio._core._mock_clock.MockClock.start_clock", - "trio._core._mock_clock.MockClock.current_time", - "trio._core._mock_clock.MockClock.deadline_to_sleep_time", - "trio._core._mock_clock.MockClock.jump", - "trio.testing.wait_all_tasks_blocked", - "trio.testing.check_half_closeable_stream", - "trio.testing.check_one_way_stream", - "trio.testing.check_two_way_stream", - "trio.testing.assert_checkpoints", - "trio.testing.assert_no_checkpoints", "trio.testing._memory_streams.MemoryReceiveStream", "trio.testing._memory_streams.MemoryReceiveStream.__init__", - "trio.testing._memory_streams.MemoryReceiveStream.receive_some", - "trio.testing._memory_streams.MemoryReceiveStream.close", "trio.testing._memory_streams.MemoryReceiveStream.aclose", + "trio.testing._memory_streams.MemoryReceiveStream.close", + "trio.testing._memory_streams.MemoryReceiveStream.close_hook", "trio.testing._memory_streams.MemoryReceiveStream.put_data", "trio.testing._memory_streams.MemoryReceiveStream.put_eof", + "trio.testing._memory_streams.MemoryReceiveStream.receive_some", "trio.testing._memory_streams.MemoryReceiveStream.receive_some_hook", - "trio.testing._memory_streams.MemoryReceiveStream.close_hook", "trio.testing._memory_streams.MemorySendStream", "trio.testing._memory_streams.MemorySendStream.__init__", - "trio.testing._memory_streams.MemorySendStream.send_all", - "trio.testing._memory_streams.MemorySendStream.wait_send_all_might_not_block", - "trio.testing._memory_streams.MemorySendStream.close", "trio.testing._memory_streams.MemorySendStream.aclose", + "trio.testing._memory_streams.MemorySendStream.close", + "trio.testing._memory_streams.MemorySendStream.close_hook", "trio.testing._memory_streams.MemorySendStream.get_data", "trio.testing._memory_streams.MemorySendStream.get_data_nowait", + "trio.testing._memory_streams.MemorySendStream.send_all", "trio.testing._memory_streams.MemorySendStream.send_all_hook", + "trio.testing._memory_streams.MemorySendStream.wait_send_all_might_not_block", "trio.testing._memory_streams.MemorySendStream.wait_send_all_might_not_block_hook", - "trio.testing._memory_streams.MemorySendStream.close_hook", + "trio.testing.assert_checkpoints", + "trio.testing.assert_no_checkpoints", + "trio.testing.check_half_closeable_stream", + "trio.testing.check_one_way_stream", + "trio.testing.check_two_way_stream", "trio.testing.lockstep_stream_one_way_pair", "trio.testing.lockstep_stream_pair", "trio.testing.memory_stream_one_way_pair", @@ -328,8 +326,10 @@ "trio.testing.memory_stream_pump", "trio.testing.open_stream_to_socket_listener", "trio.testing.trio_test", + "trio.testing.wait_all_tasks_blocked", "trio.tests.TestsDeprecationWrapper", - "trio.to_thread.current_default_thread_limiter" + "trio.to_thread.current_default_thread_limiter", + "trio.wrap_file" ] } } From db3853a4f9de3edf2a0bd1cb9e4b8799845548c7 Mon Sep 17 00:00:00 2001 From: Joshua Oreman Date: Sun, 2 Jul 2023 16:39:51 -0600 Subject: [PATCH 086/162] Ensure guest runner is initialized when start_guest_run() returns --- newsfragments/2696.feature.rst | 4 +++ trio/_core/_run.py | 43 ++++++++++++++++++++++++++ trio/_core/_tests/test_guest_mode.py | 46 +++++++++++++++++++++++++++- 3 files changed, 92 insertions(+), 1 deletion(-) create mode 100644 newsfragments/2696.feature.rst diff --git a/newsfragments/2696.feature.rst b/newsfragments/2696.feature.rst new file mode 100644 index 0000000000..560cf3b365 --- /dev/null +++ b/newsfragments/2696.feature.rst @@ -0,0 +1,4 @@ +:func:`trio.lowlevel.start_guest_run` now does a bit more setup of the guest run +before it returns to its caller, so that the caller can immediately make calls to +:func:`trio.current_time`, :func:`trio.lowlevel.spawn_system_task`, +:func:`trio.lowlevel.current_trio_token`, etc. diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 0b6d326546..50c9cf2580 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -2118,6 +2118,16 @@ def start_guest_run( the host loop and then immediately starts the guest run, and then shuts down the host when the guest run completes. + Once :func:`start_guest_run` returns successfully, the guest run + has been set up enough that you can invoke sync-colored Trio + functions such as :func:`current_time`, :func:`spawn_system_task`, + and :func:`current_trio_token`. If a `TrioInternalError` occurs + during this early setup of the guest run, it will be raised out of + :func:`start_guest_run`. All other errors, including all errors + raised by the *async_fn*, will be delivered to your + *done_callback* at some point after :func:`start_guest_run` returns + successfully. + Args: run_sync_soon_threadsafe: An arbitrary callable, which will be passed a @@ -2178,6 +2188,39 @@ def my_done_callback(run_outcome): host_uses_signal_set_wakeup_fd=host_uses_signal_set_wakeup_fd, ), ) + + # Run a few ticks of the guest run synchronously, so that by the + # time we return, the system nursery exists and callers can use + # spawn_system_task. We don't actually run any user code during + # this time, so it shouldn't be possible to get an exception here, + # except for a TrioInternalError. + next_send = None + for tick in range(5): # expected need is 2 iterations + leave some wiggle room + if runner.system_nursery is not None: + # We're initialized enough to switch to async guest ticks + break + try: + timeout = guest_state.unrolled_run_gen.send(next_send) + except StopIteration: # pragma: no cover + raise TrioInternalError( + "Guest runner exited before system nursery was initialized" + ) + if timeout != 0: # pragma: no cover + guest_state.unrolled_run_gen.throw( + TrioInternalError( + "Guest runner blocked before system nursery was initialized" + ) + ) + next_send = () + else: # pragma: no cover + guest_state.unrolled_run_gen.throw( + TrioInternalError( + "Guest runner yielded too many times before " + "system nursery was initialized" + ) + ) + + guest_state.unrolled_run_next_send = Value(next_send) run_sync_soon_not_threadsafe(guest_state.guest_tick) diff --git a/trio/_core/_tests/test_guest_mode.py b/trio/_core/_tests/test_guest_mode.py index 7b004cf04d..7aef3e437e 100644 --- a/trio/_core/_tests/test_guest_mode.py +++ b/trio/_core/_tests/test_guest_mode.py @@ -26,7 +26,7 @@ # our main # - final result is returned # - any unhandled exceptions cause an immediate crash -def trivial_guest_run(trio_fn, **start_guest_run_kwargs): +def trivial_guest_run(trio_fn, *, in_host_after_start=None, **start_guest_run_kwargs): todo = queue.Queue() host_thread = threading.current_thread() @@ -58,6 +58,8 @@ def done_callback(outcome): done_callback=done_callback, **start_guest_run_kwargs, ) + if in_host_after_start is not None: + in_host_after_start() try: while True: @@ -109,6 +111,48 @@ async def do_receive(): trivial_guest_run(trio_main) +def test_guest_is_initialized_when_start_returns(): + trio_token = None + record = [] + + async def trio_main(in_host): + record.append("main task ran") + await trio.sleep(0) + assert trio.lowlevel.current_trio_token() is trio_token + return "ok" + + def after_start(): + # We should get control back before the main task executes any code + assert record == [] + + nonlocal trio_token + trio_token = trio.lowlevel.current_trio_token() + trio_token.run_sync_soon(record.append, "run_sync_soon cb ran") + + @trio.lowlevel.spawn_system_task + async def early_task(): + record.append("system task ran") + await trio.sleep(0) + + res = trivial_guest_run(trio_main, in_host_after_start=after_start) + assert res == "ok" + assert set(record) == {"system task ran", "main task ran", "run_sync_soon cb ran"} + + # Errors during initialization (which can only be TrioInternalErrors) + # are raised out of start_guest_run, not out of the done_callback + with pytest.raises(trio.TrioInternalError): + class BadClock: + def start_clock(self): + raise ValueError("whoops") + + def after_start_never_runs(): # pragma: no cover + pytest.fail("shouldn't get here") + + trivial_guest_run( + trio_main, clock=BadClock(), in_host_after_start=after_start_never_runs + ) + + def test_host_can_directly_wake_trio_task(): async def trio_main(in_host): ev = trio.Event() From b121a837f3960eb2ffe3bd71262c02bb45476df0 Mon Sep 17 00:00:00 2001 From: Joshua Oreman Date: Tue, 11 Jul 2023 04:17:04 -0600 Subject: [PATCH 087/162] CI fixes --- trio/_core/_run.py | 6 +++--- trio/_core/_tests/test_guest_mode.py | 1 + 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 50c9cf2580..39c1e0d48f 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -2120,8 +2120,8 @@ def start_guest_run( Once :func:`start_guest_run` returns successfully, the guest run has been set up enough that you can invoke sync-colored Trio - functions such as :func:`current_time`, :func:`spawn_system_task`, - and :func:`current_trio_token`. If a `TrioInternalError` occurs + functions such as :func:`~trio.current_time`, :func:`spawn_system_task`, + and :func:`current_trio_token`. If a `~trio.TrioInternalError` occurs during this early setup of the guest run, it will be raised out of :func:`start_guest_run`. All other errors, including all errors raised by the *async_fn*, will be delivered to your @@ -2211,7 +2211,7 @@ def my_done_callback(run_outcome): "Guest runner blocked before system nursery was initialized" ) ) - next_send = () + next_send = 0 if sys.platform == "win32" else () else: # pragma: no cover guest_state.unrolled_run_gen.throw( TrioInternalError( diff --git a/trio/_core/_tests/test_guest_mode.py b/trio/_core/_tests/test_guest_mode.py index 7aef3e437e..7d8a358f71 100644 --- a/trio/_core/_tests/test_guest_mode.py +++ b/trio/_core/_tests/test_guest_mode.py @@ -141,6 +141,7 @@ async def early_task(): # Errors during initialization (which can only be TrioInternalErrors) # are raised out of start_guest_run, not out of the done_callback with pytest.raises(trio.TrioInternalError): + class BadClock: def start_clock(self): raise ValueError("whoops") From 1687f5d5a891758f59dcbc4f4f0b57351aac7988 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Tue, 11 Jul 2023 23:48:48 +0200 Subject: [PATCH 088/162] fix PermissionError due to accessing pthreads upon importing trio (#2693) * fix PermissionError due to accessing pthreads upon importing trio * fix codecov * add newsfragment --- newsfragments/2688.bugfix.rst | 1 + trio/_core/_thread_cache.py | 9 ++++++++- 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 newsfragments/2688.bugfix.rst diff --git a/newsfragments/2688.bugfix.rst b/newsfragments/2688.bugfix.rst new file mode 100644 index 0000000000..95f16feff7 --- /dev/null +++ b/newsfragments/2688.bugfix.rst @@ -0,0 +1 @@ +Fix ``PermissionError`` when importing `trio` due to trying to access ``pthread``. diff --git a/trio/_core/_thread_cache.py b/trio/_core/_thread_cache.py index e570e8dead..cc272fc92c 100644 --- a/trio/_core/_thread_cache.py +++ b/trio/_core/_thread_cache.py @@ -39,7 +39,14 @@ def darwin_namefunc( libpthread_path = ctypes.util.find_library("pthread") if not libpthread_path: return None - libpthread = ctypes.CDLL(libpthread_path) + + # Sometimes windows can find the path, but gives a permission error when + # accessing it. Catching a wider exception in case of more esoteric errors. + # https://github.com/python-trio/trio/issues/2688 + try: + libpthread = ctypes.CDLL(libpthread_path) + except Exception: # pragma: no cover + return None # get the setname method from it # afaik this should never fail From fde342f24e8d67e4540a060e91d5240085b35d8e Mon Sep 17 00:00:00 2001 From: Joshua Oreman Date: Sun, 2 Jul 2023 16:11:42 -0600 Subject: [PATCH 089/162] Modernize sniffio integration Use `sniffio.thread_local` to indicate which async library is running, instead of the deprecated `sniffio.current_async_library_cvar`. This allows us to remove a number of workarounds for the contextvar being inherited where it shouldn't be. --- setup.py | 2 +- trio/_core/_run.py | 32 +++++++----- trio/_core/_tests/test_guest_mode.py | 31 +++++++++++- trio/_core/_tests/test_run.py | 23 ++++++--- trio/_tests/test_threads.py | 76 +++++++++------------------- trio/_threads.py | 7 ++- trio/_util.py | 8 +++ 7 files changed, 103 insertions(+), 76 deletions(-) diff --git a/setup.py b/setup.py index 3f420f6c63..adf1a00a2f 100644 --- a/setup.py +++ b/setup.py @@ -86,7 +86,7 @@ "sortedcontainers", "idna", "outcome", - "sniffio", + "sniffio >= 1.3.0", # cffi 1.12 adds from_buffer(require_writable=True) and ffi.release() # cffi 1.14 fixes memory leak inside ffi.getwinerror() # cffi is required on Windows, except on PyPy where it is built-in diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 0b6d326546..a0bb0699cc 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -21,7 +21,7 @@ import attr from outcome import Error, Outcome, Value, capture -from sniffio import current_async_library_cvar +from sniffio import thread_local as sniffio_library from sortedcontainers import SortedDict from .. import _core @@ -1366,6 +1366,7 @@ class GuestState: unrolled_run_next_send = attr.ib(factory=_value_factory, type=Outcome) def guest_tick(self): + prev_library, sniffio_library.name = sniffio_library.name, "trio" try: timeout = self.unrolled_run_next_send.send(self.unrolled_run_gen) except StopIteration: @@ -1374,6 +1375,8 @@ def guest_tick(self): except TrioInternalError as exc: self.done_callback(Error(exc)) return + finally: + sniffio_library.name = prev_library # Optimization: try to skip going into the thread if we can avoid it events_outcome = capture(self.runner.io_manager.get_events, 0) @@ -1562,17 +1565,13 @@ def spawn_impl( assert self.init_task is None ###### - # Propagate contextvars, and make sure that async_fn can use sniffio. + # Propagate contextvars ###### if context is None: if system_task: context = self.system_context.copy() else: context = copy_context() - # start_soon() or spawn_system_task() might have been invoked - # from a different async library; make sure the new task - # understands it's Trio-flavored. - context.run(current_async_library_cvar.set, "trio") ###### # Call the function and get the coroutine object, while giving helpful @@ -2077,14 +2076,19 @@ def run( strict_exception_groups, ) - gen = unrolled_run(runner, async_fn, args) - next_send = None - while True: - try: - timeout = gen.send(next_send) - except StopIteration: - break - next_send = runner.io_manager.get_events(timeout) + prev_library, sniffio_library.name = sniffio_library.name, "trio" + try: + gen = unrolled_run(runner, async_fn, args) + next_send = None + while True: + try: + timeout = gen.send(next_send) + except StopIteration: + break + next_send = runner.io_manager.get_events(timeout) + finally: + sniffio_library.name = prev_library + # Inlined copy of runner.main_task_outcome.unwrap() to avoid # cluttering every single Trio traceback with an extra frame. if isinstance(runner.main_task_outcome, Value): diff --git a/trio/_core/_tests/test_guest_mode.py b/trio/_core/_tests/test_guest_mode.py index 7b004cf04d..27aa77a944 100644 --- a/trio/_core/_tests/test_guest_mode.py +++ b/trio/_core/_tests/test_guest_mode.py @@ -142,6 +142,35 @@ async def trio_main(in_host): assert trivial_guest_run(trio_main) == "ok" +def test_guest_mode_sniffio_integration(): + from sniffio import current_async_library, thread_local as sniffio_library + + async def trio_main(in_host): + async def synchronize(): + """Wait for all in_host() calls issued so far to complete.""" + evt = trio.Event() + in_host(evt.set) + await evt.wait() + + # Host and guest have separate sniffio_library contexts + in_host(partial(setattr, sniffio_library, "name", "nullio")) + await synchronize() + assert current_async_library() == "trio" + + record = [] + in_host(lambda: record.append(current_async_library())) + await synchronize() + assert record == ["nullio"] + assert current_async_library() == "trio" + + return "ok" + + try: + assert trivial_guest_run(trio_main) == "ok" + finally: + sniffio_library.name = None + + def test_warn_set_wakeup_fd_overwrite(): assert signal.set_wakeup_fd(-1) == -1 @@ -527,8 +556,6 @@ async def agen(label): record.add((label, library)) async def iterate_in_aio(): - # "trio" gets inherited from our Trio caller if we don't set this - sniffio.current_async_library_cvar.set("asyncio") await agen("asyncio").asend(None) async def trio_main(): diff --git a/trio/_core/_tests/test_run.py b/trio/_core/_tests/test_run.py index 81c3b73cc4..173f3e2d30 100644 --- a/trio/_core/_tests/test_run.py +++ b/trio/_core/_tests/test_run.py @@ -6,7 +6,7 @@ import time import types import weakref -from contextlib import ExitStack +from contextlib import ExitStack, contextmanager from math import inf import outcome @@ -2004,13 +2004,24 @@ def check_function_returning_coroutine(): with pytest.raises(sniffio.AsyncLibraryNotFoundError): sniffio.current_async_library() + @contextmanager + def alternate_sniffio_library(): + prev_token = sniffio.current_async_library_cvar.set("nullio") + prev_library, sniffio.thread_local.name = sniffio.thread_local.name, "nullio" + try: + yield + assert sniffio.current_async_library() == "nullio" + finally: + sniffio.thread_local.name = prev_library + sniffio.current_async_library_cvar.reset(prev_token) + async def check_new_task_resets_sniffio_library(): - sniffio.current_async_library_cvar.set("nullio") - _core.spawn_system_task(check_inside_trio) + with alternate_sniffio_library(): + _core.spawn_system_task(check_inside_trio) async with _core.open_nursery() as nursery: - nursery.start_soon(check_inside_trio) - nursery.start_soon(check_function_returning_coroutine) - assert sniffio.current_async_library() == "nullio" + with alternate_sniffio_library(): + nursery.start_soon(check_inside_trio) + nursery.start_soon(check_function_returning_coroutine) _core.run(check_new_task_resets_sniffio_library) diff --git a/trio/_tests/test_threads.py b/trio/_tests/test_threads.py index 21eb7b12e8..37f44fca64 100644 --- a/trio/_tests/test_threads.py +++ b/trio/_tests/test_threads.py @@ -9,7 +9,7 @@ from typing import Callable, Optional import pytest -from sniffio import current_async_library_cvar +import sniffio from trio._core import TrioToken, current_trio_token @@ -593,42 +593,35 @@ async def test_trio_to_thread_run_sync_contextvars(): def f(): value = trio_test_contextvar.get() - sniffio_cvar_value = current_async_library_cvar.get() - return (value, sniffio_cvar_value, threading.current_thread()) + with pytest.raises(sniffio.AsyncLibraryNotFoundError): + sniffio.current_async_library() + return (value, threading.current_thread()) - value, sniffio_cvar_value, child_thread = await to_thread_run_sync(f) + value, child_thread = await to_thread_run_sync(f) assert value == "main" - assert sniffio_cvar_value == None assert child_thread != trio_thread def g(): parent_value = trio_test_contextvar.get() trio_test_contextvar.set("worker") inner_value = trio_test_contextvar.get() - sniffio_cvar_value = current_async_library_cvar.get() + with pytest.raises(sniffio.AsyncLibraryNotFoundError): + sniffio.current_async_library() return ( parent_value, inner_value, - sniffio_cvar_value, threading.current_thread(), ) - ( - parent_value, - inner_value, - sniffio_cvar_value, - child_thread, - ) = await to_thread_run_sync(g) + parent_value, inner_value, child_thread = await to_thread_run_sync(g) current_value = trio_test_contextvar.get() - sniffio_outer_value = current_async_library_cvar.get() assert parent_value == "main" assert inner_value == "worker" assert current_value == "main", ( "The contextvar value set on the worker would not propagate back to the main" " thread" ) - assert sniffio_cvar_value is None - assert sniffio_outer_value == "trio" + assert sniffio.current_async_library() == "trio" async def test_trio_from_thread_run_sync(): @@ -715,50 +708,40 @@ def thread_fn(): thread_parent_value = trio_test_contextvar.get() trio_test_contextvar.set("worker") thread_current_value = trio_test_contextvar.get() - sniffio_cvar_thread_pre_value = current_async_library_cvar.get() + with pytest.raises(sniffio.AsyncLibraryNotFoundError): + sniffio.current_async_library() def back_in_main(): back_parent_value = trio_test_contextvar.get() trio_test_contextvar.set("back_in_main") back_current_value = trio_test_contextvar.get() - sniffio_cvar_back_value = current_async_library_cvar.get() - return back_parent_value, back_current_value, sniffio_cvar_back_value + assert sniffio.current_async_library() == "trio" + return back_parent_value, back_current_value - ( - back_parent_value, - back_current_value, - sniffio_cvar_back_value, - ) = from_thread_run_sync(back_in_main) + back_parent_value, back_current_value = from_thread_run_sync(back_in_main) thread_after_value = trio_test_contextvar.get() - sniffio_cvar_thread_after_value = current_async_library_cvar.get() + with pytest.raises(sniffio.AsyncLibraryNotFoundError): + sniffio.current_async_library() return ( thread_parent_value, thread_current_value, thread_after_value, - sniffio_cvar_thread_pre_value, - sniffio_cvar_thread_after_value, back_parent_value, back_current_value, - sniffio_cvar_back_value, ) ( thread_parent_value, thread_current_value, thread_after_value, - sniffio_cvar_thread_pre_value, - sniffio_cvar_thread_after_value, back_parent_value, back_current_value, - sniffio_cvar_back_value, ) = await to_thread_run_sync(thread_fn) current_value = trio_test_contextvar.get() - sniffio_cvar_out_value = current_async_library_cvar.get() assert current_value == thread_parent_value == "main" assert thread_current_value == back_parent_value == thread_after_value == "worker" + assert sniffio.current_async_library() == "trio" assert back_current_value == "back_in_main" - assert sniffio_cvar_out_value == sniffio_cvar_back_value == "trio" - assert sniffio_cvar_thread_pre_value == sniffio_cvar_thread_after_value == None async def test_trio_from_thread_run_contextvars(): @@ -768,49 +751,40 @@ def thread_fn(): thread_parent_value = trio_test_contextvar.get() trio_test_contextvar.set("worker") thread_current_value = trio_test_contextvar.get() - sniffio_cvar_thread_pre_value = current_async_library_cvar.get() + with pytest.raises(sniffio.AsyncLibraryNotFoundError): + sniffio.current_async_library() async def async_back_in_main(): back_parent_value = trio_test_contextvar.get() trio_test_contextvar.set("back_in_main") back_current_value = trio_test_contextvar.get() - sniffio_cvar_back_value = current_async_library_cvar.get() - return back_parent_value, back_current_value, sniffio_cvar_back_value + assert sniffio.current_async_library() == "trio" + return back_parent_value, back_current_value - ( - back_parent_value, - back_current_value, - sniffio_cvar_back_value, - ) = from_thread_run(async_back_in_main) + back_parent_value, back_current_value = from_thread_run(async_back_in_main) thread_after_value = trio_test_contextvar.get() - sniffio_cvar_thread_after_value = current_async_library_cvar.get() + with pytest.raises(sniffio.AsyncLibraryNotFoundError): + sniffio.current_async_library() return ( thread_parent_value, thread_current_value, thread_after_value, - sniffio_cvar_thread_pre_value, - sniffio_cvar_thread_after_value, back_parent_value, back_current_value, - sniffio_cvar_back_value, ) ( thread_parent_value, thread_current_value, thread_after_value, - sniffio_cvar_thread_pre_value, - sniffio_cvar_thread_after_value, back_parent_value, back_current_value, - sniffio_cvar_back_value, ) = await to_thread_run_sync(thread_fn) current_value = trio_test_contextvar.get() assert current_value == thread_parent_value == "main" assert thread_current_value == back_parent_value == thread_after_value == "worker" assert back_current_value == "back_in_main" - assert sniffio_cvar_thread_pre_value == sniffio_cvar_thread_after_value == None - assert sniffio_cvar_back_value == "trio" + assert sniffio.current_async_library() == "trio" def test_run_fn_as_system_task_catched_badly_typed_token(): diff --git a/trio/_threads.py b/trio/_threads.py index 807212e0f9..6284e8ad2c 100644 --- a/trio/_threads.py +++ b/trio/_threads.py @@ -179,7 +179,12 @@ def do_release_then_return_result(): thread_name = f"{getattr(sync_fn, '__name__', None)} from {trio.lowlevel.current_task().name}" def worker_fn(): + # Trio doesn't use current_async_library_cvar, but if someone + # else set it, it would now shine through since + # snifio.thread_local isn't set in the new thread. Make sure + # the new thread sees that it's not running in async context. current_async_library_cvar.set(None) + TOKEN_LOCAL.token = current_trio_token try: ret = sync_fn(*args) @@ -354,8 +359,6 @@ def from_thread_run_sync(fn, *args, trio_token=None): """ def callback(q, fn, args): - current_async_library_cvar.set("trio") - @disable_ki_protection def unprotected_fn(): ret = fn(*args) diff --git a/trio/_util.py b/trio/_util.py index c21cefe71e..01a5bfccdb 100644 --- a/trio/_util.py +++ b/trio/_util.py @@ -11,6 +11,7 @@ from functools import update_wrapper import trio +from sniffio import thread_local as sniffio_loop # Equivalent to the C function raise(), which Python doesn't wrap if os.name == "nt": @@ -98,6 +99,10 @@ def _return_value_looks_like_wrong_library(value): return True return False + # Make sure a sync-fn-that-returns-coroutine still sees itself as being + # in trio context + prev_loop, sniffio_loop.name = sniffio_loop.name, "trio" + try: coro = async_fn(*args) @@ -135,6 +140,9 @@ def _return_value_looks_like_wrong_library(value): raise + finally: + sniffio_loop.name = prev_loop + # We can't check iscoroutinefunction(async_fn), because that will fail # for things like functools.partial objects wrapping an async # function. So we have to just call it and then check whether the From f307245d589072bbddca486c142c1caa0ed53123 Mon Sep 17 00:00:00 2001 From: Joshua Oreman Date: Wed, 12 Jul 2023 15:18:19 -0600 Subject: [PATCH 090/162] Fix isort, add newsfragment --- newsfragments/2700.misc.rst | 4 ++++ trio/_util.py | 3 ++- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 newsfragments/2700.misc.rst diff --git a/newsfragments/2700.misc.rst b/newsfragments/2700.misc.rst new file mode 100644 index 0000000000..a70924816e --- /dev/null +++ b/newsfragments/2700.misc.rst @@ -0,0 +1,4 @@ +Trio now indicates its presence to `sniffio` using the ``sniffio.thread_local`` +interface that is preferred since sniffio v1.3.0. This should be less likely +than the previous approach to cause :func:`sniffio.current_async_library` to +return incorrect results due to unintended inheritance of contextvars. diff --git a/trio/_util.py b/trio/_util.py index 01a5bfccdb..62ff6d8681 100644 --- a/trio/_util.py +++ b/trio/_util.py @@ -10,9 +10,10 @@ from abc import ABCMeta from functools import update_wrapper -import trio from sniffio import thread_local as sniffio_loop +import trio + # Equivalent to the C function raise(), which Python doesn't wrap if os.name == "nt": # On Windows, os.kill exists but is really weird. From d35600d1bbac4a888ed1f3e8716fb5f28a70d01c Mon Sep 17 00:00:00 2001 From: Joshua Oreman Date: Wed, 12 Jul 2023 15:28:20 -0600 Subject: [PATCH 091/162] test_guest_mode_asyncgens works on pypy now --- trio/_core/_tests/test_guest_mode.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/trio/_core/_tests/test_guest_mode.py b/trio/_core/_tests/test_guest_mode.py index 27aa77a944..76ef228b99 100644 --- a/trio/_core/_tests/test_guest_mode.py +++ b/trio/_core/_tests/test_guest_mode.py @@ -533,10 +533,6 @@ async def trio_main(in_host): @pytest.mark.skipif(buggy_pypy_asyncgens, reason="PyPy 7.2 is buggy") -@pytest.mark.xfail( - sys.implementation.name == "pypy", - reason="async generator issue under investigation", -) @restore_unraisablehook() def test_guest_mode_asyncgens(): import sniffio From 8741c58def83defc84ab2734f95f2f8d51c21920 Mon Sep 17 00:00:00 2001 From: A5rocks Date: Thu, 13 Jul 2023 07:50:51 +0900 Subject: [PATCH 092/162] Release 0.22.2 --- docs/source/history.rst | 9 +++++++++ newsfragments/2688.bugfix.rst | 1 - trio/_version.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) delete mode 100644 newsfragments/2688.bugfix.rst diff --git a/docs/source/history.rst b/docs/source/history.rst index 8e1d0209d6..24eeb57261 100644 --- a/docs/source/history.rst +++ b/docs/source/history.rst @@ -5,6 +5,15 @@ Release history .. towncrier release notes start +Trio 0.22.2 (2023-07-13) +------------------------ + +Bugfixes +~~~~~~~~ + +- Fix ``PermissionError`` when importing `trio` due to trying to access ``pthread``. (`#2688 `__) + + Trio 0.22.1 (2023-07-02) ------------------------ diff --git a/newsfragments/2688.bugfix.rst b/newsfragments/2688.bugfix.rst deleted file mode 100644 index 95f16feff7..0000000000 --- a/newsfragments/2688.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fix ``PermissionError`` when importing `trio` due to trying to access ``pthread``. diff --git a/trio/_version.py b/trio/_version.py index 48fcaf534e..d06f473191 100644 --- a/trio/_version.py +++ b/trio/_version.py @@ -1,3 +1,3 @@ # This file is imported from __init__.py and exec'd from setup.py -__version__ = "0.22.1+dev" +__version__ = "0.22.2" From 87ed1d360e41b7aa47abac86c2c6bdbecf779795 Mon Sep 17 00:00:00 2001 From: A5rocks Date: Thu, 13 Jul 2023 08:09:51 +0900 Subject: [PATCH 093/162] Released 0.22.2, start new cycle --- trio/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trio/_version.py b/trio/_version.py index d06f473191..65242863a9 100644 --- a/trio/_version.py +++ b/trio/_version.py @@ -1,3 +1,3 @@ # This file is imported from __init__.py and exec'd from setup.py -__version__ = "0.22.2" +__version__ = "0.22.2+dev" From 4cd3164d7fa6c6bf597d83e64c453b956b7dca3e Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Thu, 13 Jul 2023 02:44:49 +0000 Subject: [PATCH 094/162] lots of typing improvements (#2682) * lots of typing improvements * changes after review * fix tests, reverting accidentally removing a default param * fix all __aexit * rename exc_val to exc_value to match official data model * rename exc_tb to traceback to match python data model * fix format check CI * changes after review from TeamSpen210 * Apply suggestions from code review Co-authored-by: Spencer Brown * fixes after review * attempt to fix readthedocs build, (temp) ignore errors on custom_sleep_data * revent custom_sleep_data to Any, add docstring to some Statistics to see if that makes readthedocs happy * fix CI errors * don't require *Statistics to be Final * fix CI * fix codecov and formatting * ValueError, not TypeError * fix pragma: no cover * fixes from CI review, mainly from Zac-HD * small fixes after review from A5Rocks * fix read the docs references --------- Co-authored-by: Spencer Brown --- docs/source/conf.py | 12 ++ docs/source/reference-core.rst | 12 ++ docs/source/reference-lowlevel.rst | 2 + trio/__init__.py | 4 + trio/_abc.py | 40 +++-- trio/_channel.py | 8 +- trio/_core/__init__.py | 2 +- trio/_core/_mock_clock.py | 26 ++-- trio/_core/_multierror.py | 19 ++- trio/_core/_parking_lot.py | 55 +++++-- trio/_core/_run.py | 149 ++++++++++-------- trio/_core/_tests/test_parking_lot.py | 3 + trio/_dtls.py | 34 +++- trio/_highlevel_generic.py | 28 ++-- trio/_highlevel_socket.py | 21 ++- trio/_socket.py | 67 +++++--- trio/_sync.py | 213 +++++++++++++++++--------- trio/_tests/test_exports.py | 4 + trio/_tests/verify_types.json | 131 ++-------------- trio/_util.py | 8 +- trio/lowlevel.py | 1 + trio/testing/_check_streams.py | 12 +- trio/testing/_fake_net.py | 14 +- 23 files changed, 517 insertions(+), 348 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index cfac66576b..68a5a22a81 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -62,10 +62,22 @@ ("py:obj", "trio._abc.SendType"), ("py:obj", "trio._abc.T"), ("py:obj", "trio._abc.T_resource"), + ("py:class", "types.FrameType"), ] autodoc_inherit_docstrings = False default_role = "obj" +# These have incorrect __module__ set in stdlib and give the error +# `py:class reference target not found` +# Some of the nitpick_ignore's above can probably be fixed with this. +# See https://github.com/sphinx-doc/sphinx/issues/8315#issuecomment-751335798 +autodoc_type_aliases = { + # aliasing doesn't actually fix the warning for types.FrameType, but displaying + # "types.FrameType" is more helpful than just "frame" + "FrameType": "types.FrameType", +} + + # XX hack the RTD theme until # https://github.com/rtfd/sphinx_rtd_theme/pull/382 # is shipped (should be in the release after 0.2.4) diff --git a/docs/source/reference-core.rst b/docs/source/reference-core.rst index 922ae4680e..4f4f4d62b9 100644 --- a/docs/source/reference-core.rst +++ b/docs/source/reference-core.rst @@ -1096,6 +1096,8 @@ Broadcasting an event with :class:`Event` .. autoclass:: Event :members: +.. autoclass:: EventStatistics + :members: .. _channels: @@ -1456,6 +1458,16 @@ don't have any special access to Trio's internals.) .. autoclass:: Condition :members: +These primitives return statistics objects that can be inspected. + +.. autoclass:: CapacityLimiterStatistics + :members: + +.. autoclass:: LockStatistics + :members: + +.. autoclass:: ConditionStatistics + :members: .. _async-generators: diff --git a/docs/source/reference-lowlevel.rst b/docs/source/reference-lowlevel.rst index 815cff2ddf..bacebff5ad 100644 --- a/docs/source/reference-lowlevel.rst +++ b/docs/source/reference-lowlevel.rst @@ -378,6 +378,8 @@ Wait queue abstraction :members: :undoc-members: +.. autoclass:: ParkingLotStatistics + :members: Low-level checkpoint functions ------------------------------ diff --git a/trio/__init__.py b/trio/__init__.py index 42b57e69c0..2b8810504b 100644 --- a/trio/__init__.py +++ b/trio/__init__.py @@ -78,9 +78,13 @@ from ._subprocess import Process as Process, run_process as run_process from ._sync import ( CapacityLimiter as CapacityLimiter, + CapacityLimiterStatistics as CapacityLimiterStatistics, Condition as Condition, + ConditionStatistics as ConditionStatistics, Event as Event, + EventStatistics as EventStatistics, Lock as Lock, + LockStatistics as LockStatistics, Semaphore as Semaphore, StrictFIFOLock as StrictFIFOLock, ) diff --git a/trio/_abc.py b/trio/_abc.py index a01812dae8..2a1721db13 100644 --- a/trio/_abc.py +++ b/trio/_abc.py @@ -1,8 +1,15 @@ +from __future__ import annotations + from abc import ABCMeta, abstractmethod -from typing import Generic, TypeVar +from typing import TYPE_CHECKING, Generic, TypeVar import trio +if TYPE_CHECKING: + from types import TracebackType + + from typing_extensions import Self + # We use ABCMeta instead of ABC, plus set __slots__=(), so as not to force a # __dict__ onto subclasses. @@ -12,7 +19,7 @@ class Clock(metaclass=ABCMeta): __slots__ = () @abstractmethod - def start_clock(self): + def start_clock(self) -> None: """Do any setup this clock might need. Called at the beginning of the run. @@ -20,7 +27,7 @@ def start_clock(self): """ @abstractmethod - def current_time(self): + def current_time(self) -> float: """Return the current time, according to this clock. This is used to implement functions like :func:`trio.current_time` and @@ -32,7 +39,7 @@ def current_time(self): """ @abstractmethod - def deadline_to_sleep_time(self, deadline): + def deadline_to_sleep_time(self, deadline: float) -> float: """Compute the real time until the given deadline. This is called before we enter a system-specific wait function like @@ -225,7 +232,7 @@ class AsyncResource(metaclass=ABCMeta): __slots__ = () @abstractmethod - async def aclose(self): + async def aclose(self) -> None: """Close this resource, possibly blocking. IMPORTANT: This method may block in order to perform a "graceful" @@ -253,10 +260,15 @@ async def aclose(self): """ - async def __aenter__(self): + async def __aenter__(self) -> Self: return self - async def __aexit__(self, *args): + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: await self.aclose() @@ -279,7 +291,7 @@ class SendStream(AsyncResource): __slots__ = () @abstractmethod - async def send_all(self, data): + async def send_all(self, data: bytes | bytearray | memoryview) -> None: """Sends the given data through the stream, blocking if necessary. Args: @@ -305,7 +317,7 @@ async def send_all(self, data): """ @abstractmethod - async def wait_send_all_might_not_block(self): + async def wait_send_all_might_not_block(self) -> None: """Block until it's possible that :meth:`send_all` might not block. This method may return early: it's possible that after it returns, @@ -385,7 +397,7 @@ class ReceiveStream(AsyncResource): __slots__ = () @abstractmethod - async def receive_some(self, max_bytes=None): + async def receive_some(self, max_bytes: int | None = None) -> bytes | bytearray: """Wait until there is data available on this stream, and then return some of it. @@ -413,10 +425,10 @@ async def receive_some(self, max_bytes=None): """ - def __aiter__(self): + def __aiter__(self) -> Self: return self - async def __anext__(self): + async def __anext__(self) -> bytes | bytearray: data = await self.receive_some() if not data: raise StopAsyncIteration @@ -446,7 +458,7 @@ class HalfCloseableStream(Stream): __slots__ = () @abstractmethod - async def send_eof(self): + async def send_eof(self) -> None: """Send an end-of-file indication on this stream, if possible. The difference between :meth:`send_eof` and @@ -632,7 +644,7 @@ async def receive(self) -> ReceiveType: """ - def __aiter__(self): + def __aiter__(self) -> Self: return self async def __anext__(self) -> ReceiveType: diff --git a/trio/_channel.py b/trio/_channel.py index 2bdec5bd09..7c8ff4660d 100644 --- a/trio/_channel.py +++ b/trio/_channel.py @@ -243,8 +243,8 @@ def __enter__(self: SelfT) -> SelfT: def __exit__( self, exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, + exc_value: BaseException | None, + traceback: TracebackType | None, ) -> None: self.close() @@ -389,8 +389,8 @@ def __enter__(self: SelfT) -> SelfT: def __exit__( self, exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, + exc_value: BaseException | None, + traceback: TracebackType | None, ) -> None: self.close() diff --git a/trio/_core/__init__.py b/trio/_core/__init__.py index c2991a4048..abd58245e3 100644 --- a/trio/_core/__init__.py +++ b/trio/_core/__init__.py @@ -20,7 +20,7 @@ from ._ki import currently_ki_protected, disable_ki_protection, enable_ki_protection from ._local import RunVar from ._mock_clock import MockClock -from ._parking_lot import ParkingLot +from ._parking_lot import ParkingLot, ParkingLotStatistics # Imports that always exist from ._run import ( diff --git a/trio/_core/_mock_clock.py b/trio/_core/_mock_clock.py index 0eb76b6356..fe35298631 100644 --- a/trio/_core/_mock_clock.py +++ b/trio/_core/_mock_clock.py @@ -62,7 +62,7 @@ class MockClock(Clock, metaclass=Final): """ - def __init__(self, rate=0.0, autojump_threshold=inf): + def __init__(self, rate: float = 0.0, autojump_threshold: float = inf): # when the real clock said 'real_base', the virtual time was # 'virtual_base', and since then it's advanced at 'rate' virtual # seconds per real second. @@ -77,17 +77,17 @@ def __init__(self, rate=0.0, autojump_threshold=inf): self.rate = rate self.autojump_threshold = autojump_threshold - def __repr__(self): + def __repr__(self) -> str: return "".format( self.current_time(), self._rate, id(self) ) @property - def rate(self): + def rate(self) -> float: return self._rate @rate.setter - def rate(self, new_rate): + def rate(self, new_rate: float) -> None: if new_rate < 0: raise ValueError("rate must be >= 0") else: @@ -98,11 +98,11 @@ def rate(self, new_rate): self._rate = float(new_rate) @property - def autojump_threshold(self): + def autojump_threshold(self) -> float: return self._autojump_threshold @autojump_threshold.setter - def autojump_threshold(self, new_autojump_threshold): + def autojump_threshold(self, new_autojump_threshold: float) -> None: self._autojump_threshold = float(new_autojump_threshold) self._try_resync_autojump_threshold() @@ -112,7 +112,7 @@ def autojump_threshold(self, new_autojump_threshold): # API. Discussion: # # https://github.com/python-trio/trio/issues/1587 - def _try_resync_autojump_threshold(self): + def _try_resync_autojump_threshold(self) -> None: try: runner = GLOBAL_RUN_CONTEXT.runner if runner.is_guest: @@ -124,24 +124,24 @@ def _try_resync_autojump_threshold(self): # Invoked by the run loop when runner.clock_autojump_threshold is # exceeded. - def _autojump(self): + def _autojump(self) -> None: statistics = _core.current_statistics() jump = statistics.seconds_to_next_deadline if 0 < jump < inf: self.jump(jump) - def _real_to_virtual(self, real): + def _real_to_virtual(self, real: float) -> float: real_offset = real - self._real_base virtual_offset = self._rate * real_offset return self._virtual_base + virtual_offset - def start_clock(self): + def start_clock(self) -> None: self._try_resync_autojump_threshold() - def current_time(self): + def current_time(self) -> float: return self._real_to_virtual(self._real_clock()) - def deadline_to_sleep_time(self, deadline): + def deadline_to_sleep_time(self, deadline: float) -> float: virtual_timeout = deadline - self.current_time() if virtual_timeout <= 0: return 0 @@ -150,7 +150,7 @@ def deadline_to_sleep_time(self, deadline): else: return 999999999 - def jump(self, seconds): + def jump(self, seconds) -> None: """Manually advance the clock by the given number of seconds. Args: diff --git a/trio/_core/_multierror.py b/trio/_core/_multierror.py index 9e69928162..3c6ebb789f 100644 --- a/trio/_core/_multierror.py +++ b/trio/_core/_multierror.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import sys import warnings +from typing import TYPE_CHECKING import attr @@ -10,6 +13,8 @@ else: from traceback import print_exception +if TYPE_CHECKING: + from types import TracebackType ################################################################ # MultiError ################################################################ @@ -130,11 +135,16 @@ class MultiErrorCatcher: def __enter__(self): pass - def __exit__(self, etype, exc, tb): - if exc is not None: - filtered_exc = _filter_impl(self._handler, exc) + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> bool | None: + if exc_value is not None: + filtered_exc = _filter_impl(self._handler, exc_value) - if filtered_exc is exc: + if filtered_exc is exc_value: # Let the interpreter re-raise it return False if filtered_exc is None: @@ -154,6 +164,7 @@ def __exit__(self, etype, exc, tb): # delete references from locals to avoid creating cycles # see test_MultiError_catch_doesnt_create_cyclic_garbage del _, filtered_exc, value + return False class MultiError(BaseExceptionGroup): diff --git a/trio/_core/_parking_lot.py b/trio/_core/_parking_lot.py index 69882c787b..74708433da 100644 --- a/trio/_core/_parking_lot.py +++ b/trio/_core/_parking_lot.py @@ -69,18 +69,34 @@ # unpark is called. # # See: https://github.com/python-trio/trio/issues/53 +from __future__ import annotations +import math from collections import OrderedDict +from collections.abc import Iterator +from typing import TYPE_CHECKING import attr from .. import _core from .._util import Final +if TYPE_CHECKING: + from ._run import Task + @attr.s(frozen=True, slots=True) -class _ParkingLotStatistics: - tasks_waiting = attr.ib() +class ParkingLotStatistics: + """An object containing debugging information for a ParkingLot. + + Currently the following fields are defined: + + * ``tasks_waiting`` (int): The number of tasks blocked on this lot's + :meth:`trio.lowlevel.ParkingLot.park` method. + + """ + + tasks_waiting: int = attr.ib() @attr.s(eq=False, hash=False, slots=True) @@ -99,13 +115,13 @@ class ParkingLot(metaclass=Final): # {task: None}, we just want a deque where we can quickly delete random # items - _parked = attr.ib(factory=OrderedDict, init=False) + _parked: OrderedDict[Task, None] = attr.ib(factory=OrderedDict, init=False) - def __len__(self): + def __len__(self) -> int: """Returns the number of parked tasks.""" return len(self._parked) - def __bool__(self): + def __bool__(self) -> bool: """True if there are parked tasks, False otherwise.""" return bool(self._parked) @@ -114,7 +130,7 @@ def __bool__(self): # line (for false wakeups), then we could have it return a ticket that # abstracts the "place in line" concept. @_core.enable_ki_protection - async def park(self): + async def park(self) -> None: """Park the current task until woken by a call to :meth:`unpark` or :meth:`unpark_all`. @@ -129,13 +145,20 @@ def abort_fn(_): await _core.wait_task_rescheduled(abort_fn) - def _pop_several(self, count): - for _ in range(min(count, len(self._parked))): + def _pop_several(self, count: int | float) -> Iterator[Task]: + if isinstance(count, float): + if math.isinf(count): + count = len(self._parked) + else: + raise ValueError("Cannot pop a non-integer number of tasks.") + else: + count = min(count, len(self._parked)) + for _ in range(count): task, _ = self._parked.popitem(last=False) yield task @_core.enable_ki_protection - def unpark(self, *, count=1): + def unpark(self, *, count: int | float = 1) -> list[Task]: """Unpark one or more tasks. This wakes up ``count`` tasks that are blocked in :meth:`park`. If @@ -143,7 +166,7 @@ def unpark(self, *, count=1): are available and then returns successfully. Args: - count (int): the number of tasks to unpark. + count (int | math.inf): the number of tasks to unpark. """ tasks = list(self._pop_several(count)) @@ -151,12 +174,12 @@ def unpark(self, *, count=1): _core.reschedule(task) return tasks - def unpark_all(self): + def unpark_all(self) -> list[Task]: """Unpark all parked tasks.""" return self.unpark(count=len(self)) @_core.enable_ki_protection - def repark(self, new_lot, *, count=1): + def repark(self, new_lot: ParkingLot, *, count: int | float = 1) -> None: """Move parked tasks from one :class:`ParkingLot` object to another. This dequeues ``count`` tasks from one lot, and requeues them on @@ -186,7 +209,7 @@ async def main(): Args: new_lot (ParkingLot): the parking lot to move tasks to. - count (int): the number of tasks to move. + count (int|math.inf): the number of tasks to move. """ if not isinstance(new_lot, ParkingLot): @@ -195,7 +218,7 @@ async def main(): new_lot._parked[task] = None task.custom_sleep_data = new_lot - def repark_all(self, new_lot): + def repark_all(self, new_lot: ParkingLot) -> None: """Move all parked tasks from one :class:`ParkingLot` object to another. @@ -204,7 +227,7 @@ def repark_all(self, new_lot): """ return self.repark(new_lot, count=len(self)) - def statistics(self): + def statistics(self) -> ParkingLotStatistics: """Return an object containing debugging information. Currently the following fields are defined: @@ -213,4 +236,4 @@ def statistics(self): :meth:`park` method. """ - return _ParkingLotStatistics(tasks_waiting=len(self._parked)) + return ParkingLotStatistics(tasks_waiting=len(self._parked)) diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 0b6d326546..585dc4aa41 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -10,7 +10,7 @@ import threading import warnings from collections import deque -from collections.abc import Callable, Iterator +from collections.abc import Callable, Coroutine, Iterator from contextlib import AbstractAsyncContextManager, contextmanager from contextvars import copy_context from heapq import heapify, heappop, heappush @@ -45,7 +45,11 @@ if sys.version_info < (3, 11): from exceptiongroup import BaseExceptionGroup +from types import FrameType + if TYPE_CHECKING: + import contextvars + # An unfortunate name collision here with trio._util.Final from typing_extensions import Final as FinalT @@ -272,7 +276,7 @@ class CancelStatus: # Our associated cancel scope. Can be any object with attributes # `deadline`, `shield`, and `cancel_called`, but in current usage # is always a CancelScope object. Must not be None. - _scope = attr.ib() + _scope: CancelScope = attr.ib() # True iff the tasks in self._tasks should receive cancellations # when they checkpoint. Always True when scope.cancel_called is True; @@ -282,31 +286,31 @@ class CancelStatus: # effectively cancelled due to the cancel scope two levels out # becoming cancelled, but then the cancel scope one level out # becomes shielded so we're not effectively cancelled anymore. - effectively_cancelled = attr.ib(default=False) + effectively_cancelled: bool = attr.ib(default=False) # The CancelStatus whose cancellations can propagate to us; we # become effectively cancelled when they do, unless scope.shield # is True. May be None (for the outermost CancelStatus in a call # to trio.run(), briefly during TaskStatus.started(), or during # recovery from mis-nesting of cancel scopes). - _parent = attr.ib(default=None, repr=False) + _parent: CancelStatus | None = attr.ib(default=None, repr=False) # All of the CancelStatuses that have this CancelStatus as their parent. - _children = attr.ib(factory=set, init=False, repr=False) + _children: set[CancelStatus] = attr.ib(factory=set, init=False, repr=False) # Tasks whose cancellation state is currently tied directly to # the cancellation state of this CancelStatus object. Don't modify # this directly; instead, use Task._activate_cancel_status(). # Invariant: all(task._cancel_status is self for task in self._tasks) - _tasks = attr.ib(factory=set, init=False, repr=False) + _tasks: set[Task] = attr.ib(factory=set, init=False, repr=False) # Set to True on still-active cancel statuses that are children # of a cancel status that's been closed. This is used to permit # recovery from mis-nested cancel scopes (well, at least enough # recovery to show a useful traceback). - abandoned_by_misnesting = attr.ib(default=False, init=False, repr=False) + abandoned_by_misnesting: bool = attr.ib(default=False, init=False, repr=False) - def __attrs_post_init__(self): + def __attrs_post_init__(self) -> None: if self._parent is not None: self._parent._children.add(self) self.recalculate() @@ -314,11 +318,11 @@ def __attrs_post_init__(self): # parent/children/tasks accessors are used by TaskStatus.started() @property - def parent(self): + def parent(self) -> CancelStatus | None: return self._parent @parent.setter - def parent(self, parent): + def parent(self, parent: CancelStatus) -> None: if self._parent is not None: self._parent._children.remove(self) self._parent = parent @@ -327,14 +331,14 @@ def parent(self, parent): self.recalculate() @property - def children(self): + def children(self) -> frozenset[CancelStatus]: return frozenset(self._children) @property - def tasks(self): + def tasks(self) -> frozenset[Task]: return frozenset(self._tasks) - def encloses(self, other): + def encloses(self, other: CancelStatus | None) -> bool: """Returns true if this cancel status is a direct or indirect parent of cancel status *other*, or if *other* is *self*. """ @@ -344,7 +348,7 @@ def encloses(self, other): other = other.parent return False - def close(self): + def close(self) -> None: self.parent = None # now we're not a child of self.parent anymore if self._tasks or self._children: # Cancel scopes weren't exited in opposite order of being @@ -404,7 +408,7 @@ def _mark_abandoned(self): for child in self._children: child._mark_abandoned() - def effective_deadline(self): + def effective_deadline(self) -> float: if self.effectively_cancelled: return -inf if self._parent is None or self._scope.shield: @@ -852,10 +856,10 @@ class NurseryManager: """ - strict_exception_groups = attr.ib(default=False) + strict_exception_groups: bool = attr.ib(default=False) @enable_ki_protection - async def __aenter__(self): + async def __aenter__(self) -> Nursery: self._scope = CancelScope() self._scope.__enter__() self._nursery = Nursery._create( @@ -864,7 +868,12 @@ async def __aenter__(self): return self._nursery @enable_ki_protection - async def __aexit__(self, etype, exc, tb): + async def __aexit__( + self, + etype: type[BaseException] | None, + exc: BaseException | None, + tb: TracebackType | None, + ) -> bool: new_exc = await self._nursery._nested_child_finished(exc) # Tracebacks show the 'raise' line below out of context, so let's give # this variable a name that makes sense out of context. @@ -887,13 +896,21 @@ async def __aexit__(self, etype, exc, tb): # see test_simple_cancel_scope_usage_doesnt_create_cyclic_garbage del _, combined_error_from_nursery, value, new_exc - def __enter__(self): - raise RuntimeError( - "use 'async with open_nursery(...)', not 'with open_nursery(...)'" - ) + # make sure these raise errors in static analysis if called + if not TYPE_CHECKING: + + def __enter__(self) -> NoReturn: + raise RuntimeError( + "use 'async with open_nursery(...)', not 'with open_nursery(...)'" + ) - def __exit__(self): # pragma: no cover - assert False, """Never called, but should be defined""" + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> NoReturn: # pragma: no cover + raise AssertionError("Never called, but should be defined") def open_nursery( @@ -939,7 +956,12 @@ class Nursery(metaclass=NoPublicConstructor): in response to some external event. """ - def __init__(self, parent_task, cancel_scope, strict_exception_groups): + def __init__( + self, + parent_task: Task, + cancel_scope: CancelScope, + strict_exception_groups: bool, + ): self._parent_task = parent_task self._strict_exception_groups = strict_exception_groups parent_task._child_nurseries.append(self) @@ -950,8 +972,8 @@ def __init__(self, parent_task, cancel_scope, strict_exception_groups): # children. self.cancel_scope = cancel_scope assert self.cancel_scope._cancel_status is self._cancel_status - self._children = set() - self._pending_excs = [] + self._children: set[Task] = set() + self._pending_excs: list[BaseException] = [] # The "nested child" is how this code refers to the contents of the # nursery's 'async with' block, which acts like a child Task in all # the ways we can make it. @@ -961,17 +983,17 @@ def __init__(self, parent_task, cancel_scope, strict_exception_groups): self._closed = False @property - def child_tasks(self): + def child_tasks(self) -> frozenset[Task]: """(`frozenset`): Contains all the child :class:`~trio.lowlevel.Task` objects which are still running.""" return frozenset(self._children) @property - def parent_task(self): + def parent_task(self) -> Task: "(`~trio.lowlevel.Task`): The Task that opened this nursery." return self._parent_task - def _add_exc(self, exc): + def _add_exc(self, exc: BaseException) -> None: self._pending_excs.append(exc) self.cancel_scope.cancel() @@ -1133,7 +1155,7 @@ async def async_fn(arg1, arg2, *, task_status=trio.TASK_STATUS_IGNORED): self._pending_starts -= 1 self._check_nursery_closed() - def __del__(self): + def __del__(self) -> None: assert not self._children @@ -1144,12 +1166,11 @@ def __del__(self): @attr.s(eq=False, hash=False, repr=False, slots=True) class Task(metaclass=NoPublicConstructor): - _parent_nursery = attr.ib() - coro = attr.ib() + _parent_nursery: Nursery | None = attr.ib() + coro: Coroutine[Any, Outcome[object], Any] = attr.ib() _runner = attr.ib() - name = attr.ib() - # PEP 567 contextvars context - context = attr.ib() + name: str = attr.ib() + context: contextvars.Context = attr.ib() _counter: int = attr.ib(init=False, factory=itertools.count().__next__) # Invariant: @@ -1165,24 +1186,26 @@ class Task(metaclass=NoPublicConstructor): # Tasks start out unscheduled. _next_send_fn = attr.ib(default=None) _next_send = attr.ib(default=None) - _abort_func = attr.ib(default=None) - custom_sleep_data = attr.ib(default=None) + _abort_func: Callable[[Callable[[], NoReturn]], Abort] | None = attr.ib( + default=None + ) + custom_sleep_data: Any = attr.ib(default=None) # For introspection and nursery.start() - _child_nurseries = attr.ib(factory=list) - _eventual_parent_nursery = attr.ib(default=None) + _child_nurseries: list[Nursery] = attr.ib(factory=list) + _eventual_parent_nursery: Nursery | None = attr.ib(default=None) # these are counts of how many cancel/schedule points this task has # executed, for assert{_no,}_checkpoints # XX maybe these should be exposed as part of a statistics() method? - _cancel_points = attr.ib(default=0) - _schedule_points = attr.ib(default=0) + _cancel_points: int = attr.ib(default=0) + _schedule_points: int = attr.ib(default=0) - def __repr__(self): + def __repr__(self) -> str: return f"" @property - def parent_nursery(self): + def parent_nursery(self) -> Nursery | None: """The nursery this task is inside (or None if this is the "init" task). @@ -1193,7 +1216,7 @@ def parent_nursery(self): return self._parent_nursery @property - def eventual_parent_nursery(self): + def eventual_parent_nursery(self) -> Nursery | None: """The nursery this task will be inside after it calls ``task_status.started()``. @@ -1205,7 +1228,7 @@ def eventual_parent_nursery(self): return self._eventual_parent_nursery @property - def child_nurseries(self): + def child_nurseries(self) -> list[Nursery]: """The nurseries this task contains. This is a list, with outer nurseries before inner nurseries. @@ -1213,7 +1236,7 @@ def child_nurseries(self): """ return list(self._child_nurseries) - def iter_await_frames(self): + def iter_await_frames(self) -> Iterator[tuple[FrameType, int]]: """Iterates recursively over the coroutine-like objects this task is waiting on, yielding the frame and line number at each frame. @@ -1233,7 +1256,8 @@ def print_stack_for_task(task): print("".join(ss.format())) """ - coro = self.coro + # ignore static typing as we're doing lots of dynamic introspection + coro: Any = self.coro while coro is not None: if hasattr(coro, "cr_frame"): # A real coroutine @@ -1266,9 +1290,9 @@ def print_stack_for_task(task): # The CancelStatus object that is currently active for this task. # Don't change this directly; instead, use _activate_cancel_status(). - _cancel_status = attr.ib(default=None, repr=False) + _cancel_status: CancelStatus = attr.ib(default=None, repr=False) - def _activate_cancel_status(self, cancel_status): + def _activate_cancel_status(self, cancel_status: CancelStatus) -> None: if self._cancel_status is not None: self._cancel_status._tasks.remove(self) self._cancel_status = cancel_status @@ -1277,11 +1301,16 @@ def _activate_cancel_status(self, cancel_status): if self._cancel_status.effectively_cancelled: self._attempt_delivery_of_any_pending_cancel() - def _attempt_abort(self, raise_cancel): + def _attempt_abort(self, raise_cancel: Callable[[], NoReturn]) -> None: # Either the abort succeeds, in which case we will reschedule the # task, or else it fails, in which case it will worry about # rescheduling itself (hopefully eventually calling reraise to raise # the given exception, but not necessarily). + + # This is only called by the functions immediately below, which both check + # `self.abort_func is not None`. + assert self._abort_func is not None, "FATAL INTERNAL ERROR" + success = self._abort_func(raise_cancel) if type(success) is not Abort: raise TrioInternalError("abort function must return Abort enum") @@ -1291,7 +1320,7 @@ def _attempt_abort(self, raise_cancel): if success is Abort.SUCCEEDED: self._runner.reschedule(self, capture(raise_cancel)) - def _attempt_delivery_of_any_pending_cancel(self): + def _attempt_delivery_of_any_pending_cancel(self) -> None: if self._abort_func is None: return if not self._cancel_status.effectively_cancelled: @@ -1302,12 +1331,12 @@ def raise_cancel(): self._attempt_abort(raise_cancel) - def _attempt_delivery_of_pending_ki(self): + def _attempt_delivery_of_pending_ki(self) -> None: assert self._runner.ki_pending if self._abort_func is None: return - def raise_cancel(): + def raise_cancel() -> NoReturn: self._runner.ki_pending = False raise KeyboardInterrupt @@ -2433,17 +2462,17 @@ def unrolled_run( class _TaskStatusIgnored: - def __repr__(self): + def __repr__(self) -> str: return "TASK_STATUS_IGNORED" - def started(self, value=None): + def started(self, value: object = None) -> None: pass TASK_STATUS_IGNORED: FinalT = _TaskStatusIgnored() -def current_task(): +def current_task() -> Task: """Return the :class:`Task` object representing the current task. Returns: @@ -2457,7 +2486,7 @@ def current_task(): raise RuntimeError("must be called from async context") from None -def current_effective_deadline(): +def current_effective_deadline() -> float: """Returns the current effective deadline for the current task. This function examines all the cancellation scopes that are currently in @@ -2484,7 +2513,7 @@ def current_effective_deadline(): return current_task()._cancel_status.effective_deadline() -async def checkpoint(): +async def checkpoint() -> None: """A pure :ref:`checkpoint `. This checks for cancellation and allows other tasks to be scheduled, @@ -2511,7 +2540,7 @@ async def checkpoint(): await _core.wait_task_rescheduled(lambda _: _core.Abort.SUCCEEDED) -async def checkpoint_if_cancelled(): +async def checkpoint_if_cancelled() -> None: """Issue a :ref:`checkpoint ` if the calling context has been cancelled. diff --git a/trio/_core/_tests/test_parking_lot.py b/trio/_core/_tests/test_parking_lot.py index db3fc76709..3f03fdbade 100644 --- a/trio/_core/_tests/test_parking_lot.py +++ b/trio/_core/_tests/test_parking_lot.py @@ -72,6 +72,9 @@ async def waiter(i, lot): ) lot.unpark_all() + with pytest.raises(ValueError): + lot.unpark(count=1.5) + async def cancellable_waiter(name, lot, scopes, record): with _core.CancelScope() as scope: diff --git a/trio/_dtls.py b/trio/_dtls.py index f46fc4fda0..722a9499f8 100644 --- a/trio/_dtls.py +++ b/trio/_dtls.py @@ -6,6 +6,8 @@ # Hopefully they fix this before implementing DTLS 1.3, because it's a very different # protocol, and it's probably impossible to pull tricks like we do here. +from __future__ import annotations + import enum import errno import hmac @@ -14,12 +16,16 @@ import warnings import weakref from itertools import count +from typing import TYPE_CHECKING import attr import trio from trio._util import Final, NoPublicConstructor +if TYPE_CHECKING: + from types import TracebackType + MAX_UDP_PACKET_SIZE = 65527 @@ -809,7 +815,7 @@ def _check_replaced(self): # DTLS where packets are all independent and can be lost anyway. We do at least need # to handle receiving it properly though, which might be easier if we send it... - def close(self): + def close(self) -> None: """Close this connection. `DTLSChannel`\\s don't actually own any OS-level resources – the @@ -833,8 +839,13 @@ def close(self): def __enter__(self): return self - def __exit__(self, *args): - self.close() + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + return self.close() async def aclose(self): """Close this connection, but asynchronously. @@ -1121,6 +1132,8 @@ def __init__(self, socket, *, incoming_packets_buffer=10): global SSL from OpenSSL import SSL + # TODO: create a `self._initialized` for `__del__`, so self.socket can be typed + # as trio.socket.SocketType and `is not None` checks can be removed. self.socket = None # for __del__, in case the next line raises if socket.type != trio.socket.SOCK_DGRAM: raise ValueError("DTLS requires a SOCK_DGRAM socket") @@ -1167,12 +1180,16 @@ def __del__(self): f"unclosed DTLS endpoint {self!r}", ResourceWarning, source=self ) - def close(self): + def close(self) -> None: """Close this socket, and all associated DTLS connections. This object can also be used as a context manager. """ + # Do nothing if this object was never fully constructed + if self.socket is None: # pragma: no cover + return + self._closed = True self.socket.close() for stream in list(self._streams.values()): @@ -1182,8 +1199,13 @@ def close(self): def __enter__(self): return self - def __exit__(self, *args): - self.close() + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + return self.close() def _check_closed(self): if self._closed: diff --git a/trio/_highlevel_generic.py b/trio/_highlevel_generic.py index 2ae381c8e2..e1ac378c6a 100644 --- a/trio/_highlevel_generic.py +++ b/trio/_highlevel_generic.py @@ -1,12 +1,19 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + import attr import trio from trio._util import Final +if TYPE_CHECKING: + from .abc import SendStream, ReceiveStream, AsyncResource + from .abc import HalfCloseableStream -async def aclose_forcefully(resource): +async def aclose_forcefully(resource: AsyncResource) -> None: """Close an async resource or async generator immediately, without blocking to do any graceful cleanup. @@ -72,18 +79,18 @@ class StapledStream(HalfCloseableStream, metaclass=Final): """ - send_stream = attr.ib() - receive_stream = attr.ib() + send_stream: SendStream = attr.ib() + receive_stream: ReceiveStream = attr.ib() - async def send_all(self, data): + async def send_all(self, data: bytes | bytearray | memoryview) -> None: """Calls ``self.send_stream.send_all``.""" return await self.send_stream.send_all(data) - async def wait_send_all_might_not_block(self): + async def wait_send_all_might_not_block(self) -> None: """Calls ``self.send_stream.wait_send_all_might_not_block``.""" return await self.send_stream.wait_send_all_might_not_block() - async def send_eof(self): + async def send_eof(self) -> None: """Shuts down the send side of the stream. If ``self.send_stream.send_eof`` exists, then calls it. Otherwise, @@ -91,15 +98,18 @@ async def send_eof(self): """ if hasattr(self.send_stream, "send_eof"): - return await self.send_stream.send_eof() + # send_stream.send_eof() is not defined in Trio, this should maybe be + # redesigned so it's possible to type it. + return await self.send_stream.send_eof() # type: ignore[no-any-return] else: return await self.send_stream.aclose() - async def receive_some(self, max_bytes=None): + # we intentionally accept more types from the caller than we support returning + async def receive_some(self, max_bytes: int | None = None) -> bytes: """Calls ``self.receive_stream.receive_some``.""" return await self.receive_stream.receive_some(max_bytes) - async def aclose(self): + async def aclose(self) -> None: """Calls ``aclose`` on both underlying streams.""" try: await self.send_stream.aclose() diff --git a/trio/_highlevel_socket.py b/trio/_highlevel_socket.py index ce23de17d7..ce96153805 100644 --- a/trio/_highlevel_socket.py +++ b/trio/_highlevel_socket.py @@ -1,7 +1,9 @@ # "High-level" networking interface +from __future__ import annotations import errno from contextlib import contextmanager +from typing import TYPE_CHECKING import trio @@ -9,6 +11,9 @@ from ._util import ConflictDetector, Final from .abc import HalfCloseableStream, Listener +if TYPE_CHECKING: + from ._socket import _SocketType as SocketType + # XX TODO: this number was picked arbitrarily. We should do experiments to # tune it. (Or make it dynamic -- one idea is to start small and increase it # if we observe single reads filling up the whole buffer, at least within some @@ -58,7 +63,7 @@ class SocketStream(HalfCloseableStream, metaclass=Final): """ - def __init__(self, socket): + def __init__(self, socket: SocketType): if not isinstance(socket, tsocket.SocketType): raise TypeError("SocketStream requires a Trio socket object") if socket.type != tsocket.SOCK_STREAM: @@ -109,14 +114,14 @@ async def send_all(self, data): sent = await self.socket.send(remaining) total_sent += sent - async def wait_send_all_might_not_block(self): + async def wait_send_all_might_not_block(self) -> None: with self._send_conflict_detector: if self.socket.fileno() == -1: raise trio.ClosedResourceError with _translate_socket_errors_to_stream_errors(): await self.socket.wait_writable() - async def send_eof(self): + async def send_eof(self) -> None: with self._send_conflict_detector: await trio.lowlevel.checkpoint() # On macOS, calling shutdown a second time raises ENOTCONN, but @@ -126,7 +131,7 @@ async def send_eof(self): with _translate_socket_errors_to_stream_errors(): self.socket.shutdown(tsocket.SHUT_WR) - async def receive_some(self, max_bytes=None): + async def receive_some(self, max_bytes: int | None = None) -> bytes: if max_bytes is None: max_bytes = DEFAULT_RECEIVE_SIZE if max_bytes < 1: @@ -134,7 +139,7 @@ async def receive_some(self, max_bytes=None): with _translate_socket_errors_to_stream_errors(): return await self.socket.recv(max_bytes) - async def aclose(self): + async def aclose(self) -> None: self.socket.close() await trio.lowlevel.checkpoint() @@ -331,7 +336,7 @@ class SocketListener(Listener[SocketStream], metaclass=Final): """ - def __init__(self, socket): + def __init__(self, socket: SocketType): if not isinstance(socket, tsocket.SocketType): raise TypeError("SocketListener requires a Trio socket object") if socket.type != tsocket.SOCK_STREAM: @@ -347,7 +352,7 @@ def __init__(self, socket): self.socket = socket - async def accept(self): + async def accept(self) -> SocketStream: """Accept an incoming connection. Returns: @@ -375,7 +380,7 @@ async def accept(self): else: return SocketStream(sock) - async def aclose(self): + async def aclose(self) -> None: """Close this listener and its underlying socket.""" self.socket.close() await trio.lowlevel.checkpoint() diff --git a/trio/_socket.py b/trio/_socket.py index b4ee4a7199..eaf0e04d15 100644 --- a/trio/_socket.py +++ b/trio/_socket.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import select import socket as _stdlib_socket @@ -11,6 +13,12 @@ from . import _core +if TYPE_CHECKING: + from collections.abc import Iterable + from types import TracebackType + + from typing_extensions import Self + # Usage: # @@ -33,8 +41,13 @@ def _is_blocking_io_error(self, exc): async def __aenter__(self): await trio.lowlevel.checkpoint_if_cancelled() - async def __aexit__(self, etype, value, tb): - if value is not None and self._is_blocking_io_error(value): + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> bool: + if exc_value is not None and self._is_blocking_io_error(exc_value): # Discard the exception and fall through to the code below the # block return True @@ -430,7 +443,7 @@ def __init__(self): class _SocketType(SocketType): - def __init__(self, sock): + def __init__(self, sock: _stdlib_socket.socket): if type(sock) is not _stdlib_socket.socket: # For example, ssl.SSLSocket subclasses socket.socket, but we # certainly don't want to blindly wrap one of those. @@ -473,44 +486,49 @@ def __getattr__(self, name): return getattr(self._sock, name) raise AttributeError(name) - def __dir__(self): - return super().__dir__() + list(self._forward) + def __dir__(self) -> Iterable[str]: + return [*super().__dir__(), *self._forward] - def __enter__(self): + def __enter__(self) -> Self: return self - def __exit__(self, *exc_info): - return self._sock.__exit__(*exc_info) + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + return self._sock.__exit__(exc_type, exc_value, traceback) @property - def family(self): + def family(self) -> _stdlib_socket.AddressFamily: return self._sock.family @property - def type(self): + def type(self) -> _stdlib_socket.SocketKind: return self._sock.type @property - def proto(self): + def proto(self) -> int: return self._sock.proto @property - def did_shutdown_SHUT_WR(self): + def did_shutdown_SHUT_WR(self) -> bool: return self._did_shutdown_SHUT_WR - def __repr__(self): + def __repr__(self) -> str: return repr(self._sock).replace("socket.socket", "trio.socket.socket") - def dup(self): + def dup(self) -> _SocketType: """Same as :meth:`socket.socket.dup`.""" return _SocketType(self._sock.dup()) - def close(self): + def close(self) -> None: if self._sock.fileno() != -1: trio.lowlevel.notify_closing(self._sock) self._sock.close() - async def bind(self, address): + async def bind(self, address: tuple[object, ...] | str | bytes) -> None: address = await self._resolve_address_nocp(address, local=True) if ( hasattr(_stdlib_socket, "AF_UNIX") @@ -519,7 +537,8 @@ async def bind(self, address): ): # Use a thread for the filesystem traversal (unless it's an # abstract domain socket) - return await trio.to_thread.run_sync(self._sock.bind, address) + # remove the `type: ignore` when run.sync is typed. + return await trio.to_thread.run_sync(self._sock.bind, address) # type: ignore[no-any-return] else: # POSIX actually says that bind can return EWOULDBLOCK and # complete asynchronously, like connect. But in practice AFAICT @@ -528,14 +547,14 @@ async def bind(self, address): await trio.lowlevel.checkpoint() return self._sock.bind(address) - def shutdown(self, flag): + def shutdown(self, flag: int) -> None: # no need to worry about return value b/c always returns None: self._sock.shutdown(flag) # only do this if the call succeeded: if flag in [_stdlib_socket.SHUT_WR, _stdlib_socket.SHUT_RDWR]: self._did_shutdown_SHUT_WR = True - def is_readable(self): + def is_readable(self) -> bool: # use select.select on Windows, and select.poll everywhere else if sys.platform == "win32": rready, _, _ = select.select([self._sock], [], [], 0) @@ -544,7 +563,7 @@ def is_readable(self): p.register(self._sock, select.POLLIN) return bool(p.poll(0)) - async def wait_writable(self): + async def wait_writable(self) -> None: await _core.wait_writable(self._sock) async def _resolve_address_nocp(self, address, *, local): @@ -684,7 +703,13 @@ async def connect(self, address): # recv ################################################################ - recv = _make_simple_sock_method_wrapper("recv", _core.wait_readable) + if TYPE_CHECKING: + + async def recv(self, buffersize: int, flags: int = 0) -> bytes: + ... + + else: + recv = _make_simple_sock_method_wrapper("recv", _core.wait_readable) ################################################################ # recv_into diff --git a/trio/_sync.py b/trio/_sync.py index 60d7074d9e..5a7f240d5e 100644 --- a/trio/_sync.py +++ b/trio/_sync.py @@ -1,4 +1,7 @@ +from __future__ import annotations + import math +from typing import TYPE_CHECKING import attr @@ -8,10 +11,25 @@ from ._core import ParkingLot, enable_ki_protection from ._util import Final +if TYPE_CHECKING: + from types import TracebackType + + from ._core import Task + from ._core._parking_lot import ParkingLotStatistics + + +@attr.s(frozen=True, slots=True) +class EventStatistics: + """An object containing debugging information. + + Currently the following fields are defined: -@attr.s(frozen=True) -class _EventStatistics: - tasks_waiting = attr.ib() + * ``tasks_waiting``: The number of tasks blocked on this event's + :meth:`trio.Event.wait` method. + + """ + + tasks_waiting: int = attr.ib() @attr.s(repr=False, eq=False, hash=False, slots=True) @@ -41,15 +59,15 @@ class Event(metaclass=Final): """ - _tasks = attr.ib(factory=set, init=False) - _flag = attr.ib(default=False, init=False) + _tasks: set[Task] = attr.ib(factory=set, init=False) + _flag: bool = attr.ib(default=False, init=False) - def is_set(self): + def is_set(self) -> bool: """Return the current value of the internal flag.""" return self._flag @enable_ki_protection - def set(self): + def set(self) -> None: """Set the internal flag value to True, and wake any waiting tasks.""" if not self._flag: self._flag = True @@ -57,7 +75,7 @@ def set(self): _core.reschedule(task) self._tasks.clear() - async def wait(self): + async def wait(self) -> None: """Block until the internal flag value becomes True. If it's already True, then this method returns immediately. @@ -75,7 +93,7 @@ def abort_fn(_): await _core.wait_task_rescheduled(abort_fn) - def statistics(self): + def statistics(self) -> EventStatistics: """Return an object containing debugging information. Currently the following fields are defined: @@ -84,25 +102,49 @@ def statistics(self): :meth:`wait` method. """ - return _EventStatistics(tasks_waiting=len(self._tasks)) + return EventStatistics(tasks_waiting=len(self._tasks)) +# TODO: type this with a Protocol to get rid of type: ignore, see +# https://github.com/python-trio/trio/pull/2682#discussion_r1259097422 class AsyncContextManagerMixin: @enable_ki_protection - async def __aenter__(self): - await self.acquire() + async def __aenter__(self) -> None: + await self.acquire() # type: ignore[attr-defined] @enable_ki_protection - async def __aexit__(self, *args): - self.release() + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + self.release() # type: ignore[attr-defined] + + +@attr.s(frozen=True, slots=True) +class CapacityLimiterStatistics: + """An object containing debugging information. + + Currently the following fields are defined: + + * ``borrowed_tokens``: The number of tokens currently borrowed from + the sack. + * ``total_tokens``: The total number of tokens in the sack. Usually + this will be larger than ``borrowed_tokens``, but it's possibly for + it to be smaller if :attr:`trio.CapacityLimiter.total_tokens` was recently decreased. + * ``borrowers``: A list of all tasks or other entities that currently + hold a token. + * ``tasks_waiting``: The number of tasks blocked on this + :class:`CapacityLimiter`\'s :meth:`trio.CapacityLimiter.acquire` or + :meth:`trio.CapacityLimiter.acquire_on_behalf_of` methods. + """ -@attr.s(frozen=True) -class _CapacityLimiterStatistics: - borrowed_tokens = attr.ib() - total_tokens = attr.ib() - borrowers = attr.ib() - tasks_waiting = attr.ib() + borrowed_tokens: int = attr.ib() + total_tokens: int | float = attr.ib() + borrowers: list[Task] = attr.ib() + tasks_waiting: int = attr.ib() class CapacityLimiter(AsyncContextManagerMixin, metaclass=Final): @@ -159,22 +201,23 @@ class CapacityLimiter(AsyncContextManagerMixin, metaclass=Final): """ - def __init__(self, total_tokens): + # total_tokens would ideally be int|Literal[math.inf] - but that's not valid typing + def __init__(self, total_tokens: int | float): self._lot = ParkingLot() - self._borrowers = set() + self._borrowers: set[Task] = set() # Maps tasks attempting to acquire -> borrower, to handle on-behalf-of - self._pending_borrowers = {} + self._pending_borrowers: dict[Task, Task] = {} # invoke the property setter for validation - self.total_tokens = total_tokens + self.total_tokens: int | float = total_tokens assert self._total_tokens == total_tokens - def __repr__(self): + def __repr__(self) -> str: return "".format( id(self), len(self._borrowers), self._total_tokens, len(self._lot) ) @property - def total_tokens(self): + def total_tokens(self) -> int | float: """The total capacity available. You can change :attr:`total_tokens` by assigning to this attribute. If @@ -189,7 +232,7 @@ def total_tokens(self): return self._total_tokens @total_tokens.setter - def total_tokens(self, new_total_tokens): + def total_tokens(self, new_total_tokens: int | float) -> None: if not isinstance(new_total_tokens, int) and new_total_tokens != math.inf: raise TypeError("total_tokens must be an int or math.inf") if new_total_tokens < 1: @@ -197,23 +240,23 @@ def total_tokens(self, new_total_tokens): self._total_tokens = new_total_tokens self._wake_waiters() - def _wake_waiters(self): + def _wake_waiters(self) -> None: available = self._total_tokens - len(self._borrowers) for woken in self._lot.unpark(count=available): self._borrowers.add(self._pending_borrowers.pop(woken)) @property - def borrowed_tokens(self): + def borrowed_tokens(self) -> int: """The amount of capacity that's currently in use.""" return len(self._borrowers) @property - def available_tokens(self): + def available_tokens(self) -> int | float: """The amount of capacity that's available to use.""" return self.total_tokens - self.borrowed_tokens @enable_ki_protection - def acquire_nowait(self): + def acquire_nowait(self) -> None: """Borrow a token from the sack, without blocking. Raises: @@ -225,7 +268,7 @@ def acquire_nowait(self): self.acquire_on_behalf_of_nowait(trio.lowlevel.current_task()) @enable_ki_protection - def acquire_on_behalf_of_nowait(self, borrower): + def acquire_on_behalf_of_nowait(self, borrower: Task) -> None: """Borrow a token from the sack on behalf of ``borrower``, without blocking. @@ -253,7 +296,7 @@ def acquire_on_behalf_of_nowait(self, borrower): raise trio.WouldBlock @enable_ki_protection - async def acquire(self): + async def acquire(self) -> None: """Borrow a token from the sack, blocking if necessary. Raises: @@ -264,7 +307,7 @@ async def acquire(self): await self.acquire_on_behalf_of(trio.lowlevel.current_task()) @enable_ki_protection - async def acquire_on_behalf_of(self, borrower): + async def acquire_on_behalf_of(self, borrower: Task) -> None: """Borrow a token from the sack on behalf of ``borrower``, blocking if necessary. @@ -293,7 +336,7 @@ async def acquire_on_behalf_of(self, borrower): await trio.lowlevel.cancel_shielded_checkpoint() @enable_ki_protection - def release(self): + def release(self) -> None: """Put a token back into the sack. Raises: @@ -304,7 +347,7 @@ def release(self): self.release_on_behalf_of(trio.lowlevel.current_task()) @enable_ki_protection - def release_on_behalf_of(self, borrower): + def release_on_behalf_of(self, borrower: Task) -> None: """Put a token back into the sack on behalf of ``borrower``. Raises: @@ -319,7 +362,7 @@ def release_on_behalf_of(self, borrower): self._borrowers.remove(borrower) self._wake_waiters() - def statistics(self): + def statistics(self) -> CapacityLimiterStatistics: """Return an object containing debugging information. Currently the following fields are defined: @@ -336,7 +379,7 @@ def statistics(self): :meth:`acquire_on_behalf_of` methods. """ - return _CapacityLimiterStatistics( + return CapacityLimiterStatistics( borrowed_tokens=len(self._borrowers), total_tokens=self._total_tokens, # Use a list instead of a frozenset just in case we start to allow @@ -373,7 +416,7 @@ class Semaphore(AsyncContextManagerMixin, metaclass=Final): """ - def __init__(self, initial_value, *, max_value=None): + def __init__(self, initial_value: int, *, max_value: int | None = None): if not isinstance(initial_value, int): raise TypeError("initial_value must be an int") if initial_value < 0: @@ -391,7 +434,7 @@ def __init__(self, initial_value, *, max_value=None): self._value = initial_value self._max_value = max_value - def __repr__(self): + def __repr__(self) -> str: if self._max_value is None: max_value_str = "" else: @@ -401,17 +444,17 @@ def __repr__(self): ) @property - def value(self): + def value(self) -> int: """The current value of the semaphore.""" return self._value @property - def max_value(self): + def max_value(self) -> int | None: """The maximum allowed value. May be None to indicate no limit.""" return self._max_value @enable_ki_protection - def acquire_nowait(self): + def acquire_nowait(self) -> None: """Attempt to decrement the semaphore value, without blocking. Raises: @@ -425,7 +468,7 @@ def acquire_nowait(self): raise trio.WouldBlock @enable_ki_protection - async def acquire(self): + async def acquire(self) -> None: """Decrement the semaphore value, blocking if necessary to avoid letting it drop below zero. @@ -439,7 +482,7 @@ async def acquire(self): await trio.lowlevel.cancel_shielded_checkpoint() @enable_ki_protection - def release(self): + def release(self) -> None: """Increment the semaphore value, possibly waking a task blocked in :meth:`acquire`. @@ -456,7 +499,7 @@ def release(self): raise ValueError("semaphore released too many times") self._value += 1 - def statistics(self): + def statistics(self) -> ParkingLotStatistics: """Return an object containing debugging information. Currently the following fields are defined: @@ -468,19 +511,31 @@ def statistics(self): return self._lot.statistics() -@attr.s(frozen=True) -class _LockStatistics: - locked = attr.ib() - owner = attr.ib() - tasks_waiting = attr.ib() +@attr.s(frozen=True, slots=True) +class LockStatistics: + """An object containing debugging information for a Lock. + + Currently the following fields are defined: + + * ``locked`` (boolean): indicating whether the lock is held. + * ``owner``: the :class:`trio.lowlevel.Task` currently holding the lock, + or None if the lock is not held. + * ``tasks_waiting`` (int): The number of tasks blocked on this lock's + :meth:`trio.Lock.acquire` method. + + """ + + locked: bool = attr.ib() + owner: Task | None = attr.ib() + tasks_waiting: int = attr.ib() @attr.s(eq=False, hash=False, repr=False) class _LockImpl(AsyncContextManagerMixin): - _lot = attr.ib(factory=ParkingLot, init=False) - _owner = attr.ib(default=None, init=False) + _lot: ParkingLot = attr.ib(factory=ParkingLot, init=False) + _owner: Task | None = attr.ib(default=None, init=False) - def __repr__(self): + def __repr__(self) -> str: if self.locked(): s1 = "locked" s2 = f" with {len(self._lot)} waiters" @@ -491,7 +546,7 @@ def __repr__(self): s1, self.__class__.__name__, id(self), s2 ) - def locked(self): + def locked(self) -> bool: """Check whether the lock is currently held. Returns: @@ -501,7 +556,7 @@ def locked(self): return self._owner is not None @enable_ki_protection - def acquire_nowait(self): + def acquire_nowait(self) -> None: """Attempt to acquire the lock, without blocking. Raises: @@ -519,7 +574,7 @@ def acquire_nowait(self): raise trio.WouldBlock @enable_ki_protection - async def acquire(self): + async def acquire(self) -> None: """Acquire the lock, blocking if necessary.""" await trio.lowlevel.checkpoint_if_cancelled() try: @@ -533,7 +588,7 @@ async def acquire(self): await trio.lowlevel.cancel_shielded_checkpoint() @enable_ki_protection - def release(self): + def release(self) -> None: """Release the lock. Raises: @@ -548,7 +603,7 @@ def release(self): else: self._owner = None - def statistics(self): + def statistics(self) -> LockStatistics: """Return an object containing debugging information. Currently the following fields are defined: @@ -560,7 +615,7 @@ def statistics(self): :meth:`acquire` method. """ - return _LockStatistics( + return LockStatistics( locked=self.locked(), owner=self._owner, tasks_waiting=len(self._lot) ) @@ -642,10 +697,20 @@ class StrictFIFOLock(_LockImpl, metaclass=Final): """ -@attr.s(frozen=True) -class _ConditionStatistics: - tasks_waiting = attr.ib() - lock_statistics = attr.ib() +@attr.s(frozen=True, slots=True) +class ConditionStatistics: + r"""An object containing debugging information for a Condition. + + Currently the following fields are defined: + + * ``tasks_waiting`` (int): The number of tasks blocked on this condition's + :meth:`trio.Condition.wait` method. + * ``lock_statistics``: The result of calling the underlying + :class:`Lock`\s :meth:`~Lock.statistics` method. + + """ + tasks_waiting: int = attr.ib() + lock_statistics: LockStatistics = attr.ib() class Condition(AsyncContextManagerMixin, metaclass=Final): @@ -663,7 +728,7 @@ class Condition(AsyncContextManagerMixin, metaclass=Final): """ - def __init__(self, lock=None): + def __init__(self, lock: Lock | None = None): if lock is None: lock = Lock() if not type(lock) is Lock: @@ -671,7 +736,7 @@ def __init__(self, lock=None): self._lock = lock self._lot = trio.lowlevel.ParkingLot() - def locked(self): + def locked(self) -> bool: """Check whether the underlying lock is currently held. Returns: @@ -680,7 +745,7 @@ def locked(self): """ return self._lock.locked() - def acquire_nowait(self): + def acquire_nowait(self) -> None: """Attempt to acquire the underlying lock, without blocking. Raises: @@ -689,16 +754,16 @@ def acquire_nowait(self): """ return self._lock.acquire_nowait() - async def acquire(self): + async def acquire(self) -> None: """Acquire the underlying lock, blocking if necessary.""" await self._lock.acquire() - def release(self): + def release(self) -> None: """Release the underlying lock.""" self._lock.release() @enable_ki_protection - async def wait(self): + async def wait(self) -> None: """Wait for another task to call :meth:`notify` or :meth:`notify_all`. @@ -733,7 +798,7 @@ async def wait(self): await self.acquire() raise - def notify(self, n=1): + def notify(self, n: int = 1) -> None: """Wake one or more tasks that are blocked in :meth:`wait`. Args: @@ -747,7 +812,7 @@ def notify(self, n=1): raise RuntimeError("must hold the lock to notify") self._lot.repark(self._lock._lot, count=n) - def notify_all(self): + def notify_all(self) -> None: """Wake all tasks that are currently blocked in :meth:`wait`. Raises: @@ -758,7 +823,7 @@ def notify_all(self): raise RuntimeError("must hold the lock to notify") self._lot.repark_all(self._lock._lot) - def statistics(self): + def statistics(self) -> ConditionStatistics: r"""Return an object containing debugging information. Currently the following fields are defined: @@ -769,6 +834,6 @@ def statistics(self): :class:`Lock`\s :meth:`~Lock.statistics` method. """ - return _ConditionStatistics( + return ConditionStatistics( tasks_waiting=len(self._lot), lock_statistics=self._lock.statistics() ) diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index 3ab0016386..e51bbe31f5 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -492,4 +492,8 @@ def test_classes_are_final(): continue # ... insert other special cases here ... + # don't care about the *Statistics classes + if name.endswith("Statistics"): + continue + assert isinstance(class_, _util.Final) diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index 57b307d1d9..9d7d7aa912 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.8317152103559871, + "completenessScore": 0.8764044943820225, "exportedSymbolCounts": { "withAmbiguousType": 1, - "withKnownType": 514, - "withUnknownType": 103 + "withKnownType": 546, + "withUnknownType": 76 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -45,22 +45,13 @@ } ], "otherSymbolCounts": { - "withAmbiguousType": 14, - "withKnownType": 244, - "withUnknownType": 224 + "withAmbiguousType": 8, + "withKnownType": 433, + "withUnknownType": 135 }, "packageName": "trio", "symbols": [ "trio.__deprecated_attributes__", - "trio._abc.AsyncResource.__aenter__", - "trio._abc.AsyncResource.__aexit__", - "trio._abc.AsyncResource.aclose", - "trio._abc.Channel", - "trio._abc.Clock.current_time", - "trio._abc.Clock.deadline_to_sleep_time", - "trio._abc.Clock.start_clock", - "trio._abc.HalfCloseableStream", - "trio._abc.HalfCloseableStream.send_eof", "trio._abc.HostnameResolver.getaddrinfo", "trio._abc.HostnameResolver.getnameinfo", "trio._abc.Instrument.after_io_wait", @@ -72,58 +63,16 @@ "trio._abc.Instrument.task_exited", "trio._abc.Instrument.task_scheduled", "trio._abc.Instrument.task_spawned", - "trio._abc.Listener", "trio._abc.Listener.accept", - "trio._abc.ReceiveChannel", - "trio._abc.ReceiveChannel.__aiter__", - "trio._abc.ReceiveStream", - "trio._abc.ReceiveStream.__aiter__", - "trio._abc.ReceiveStream.__anext__", - "trio._abc.ReceiveStream.receive_some", - "trio._abc.SendChannel", - "trio._abc.SendStream", - "trio._abc.SendStream.send_all", - "trio._abc.SendStream.wait_send_all_might_not_block", "trio._abc.SocketFactory.socket", - "trio._abc.Stream", - "trio._channel.MemoryReceiveChannel", - "trio._channel.MemorySendChannel", "trio._core._entry_queue.TrioToken.run_sync_soon", "trio._core._local.RunVar.__repr__", "trio._core._local.RunVar.get", "trio._core._local.RunVar.reset", "trio._core._local.RunVar.set", - "trio._core._mock_clock.MockClock", - "trio._core._mock_clock.MockClock.__init__", - "trio._core._mock_clock.MockClock.__repr__", - "trio._core._mock_clock.MockClock.autojump_threshold", - "trio._core._mock_clock.MockClock.current_time", - "trio._core._mock_clock.MockClock.deadline_to_sleep_time", "trio._core._mock_clock.MockClock.jump", - "trio._core._mock_clock.MockClock.rate", - "trio._core._mock_clock.MockClock.start_clock", - "trio._core._parking_lot.ParkingLot.__bool__", - "trio._core._parking_lot.ParkingLot.__len__", - "trio._core._parking_lot.ParkingLot.repark_all", - "trio._core._parking_lot.ParkingLot.statistics", - "trio._core._parking_lot.ParkingLot.unpark_all", - "trio._core._run.Nursery.__del__", - "trio._core._run.Nursery.__init__", - "trio._core._run.Nursery.child_tasks", - "trio._core._run.Nursery.parent_task", "trio._core._run.Nursery.start", "trio._core._run.Nursery.start_soon", - "trio._core._run.Task.__repr__", - "trio._core._run.Task.child_nurseries", - "trio._core._run.Task.context", - "trio._core._run.Task.coro", - "trio._core._run.Task.custom_sleep_data", - "trio._core._run.Task.eventual_parent_nursery", - "trio._core._run.Task.iter_await_frames", - "trio._core._run.Task.name", - "trio._core._run.Task.parent_nursery", - "trio._core._run._TaskStatusIgnored.__repr__", - "trio._core._run._TaskStatusIgnored.started", "trio._core._unbounded_queue.UnboundedQueue.__aiter__", "trio._core._unbounded_queue.UnboundedQueue.__anext__", "trio._core._unbounded_queue.UnboundedQueue.__repr__", @@ -132,12 +81,9 @@ "trio._core._unbounded_queue.UnboundedQueue.get_batch_nowait", "trio._core._unbounded_queue.UnboundedQueue.qsize", "trio._core._unbounded_queue.UnboundedQueue.statistics", - "trio._dtls.DTLSChannel", "trio._dtls.DTLSChannel.__enter__", - "trio._dtls.DTLSChannel.__exit__", "trio._dtls.DTLSChannel.__init__", "trio._dtls.DTLSChannel.aclose", - "trio._dtls.DTLSChannel.close", "trio._dtls.DTLSChannel.do_handshake", "trio._dtls.DTLSChannel.get_cleartext_mtu", "trio._dtls.DTLSChannel.receive", @@ -146,34 +92,17 @@ "trio._dtls.DTLSChannel.statistics", "trio._dtls.DTLSEndpoint.__del__", "trio._dtls.DTLSEndpoint.__enter__", - "trio._dtls.DTLSEndpoint.__exit__", "trio._dtls.DTLSEndpoint.__init__", - "trio._dtls.DTLSEndpoint.close", "trio._dtls.DTLSEndpoint.connect", "trio._dtls.DTLSEndpoint.incoming_packets_buffer", "trio._dtls.DTLSEndpoint.serve", "trio._dtls.DTLSEndpoint.socket", - "trio._highlevel_generic.StapledStream", - "trio._highlevel_generic.StapledStream.aclose", - "trio._highlevel_generic.StapledStream.receive_some", - "trio._highlevel_generic.StapledStream.receive_stream", - "trio._highlevel_generic.StapledStream.send_all", - "trio._highlevel_generic.StapledStream.send_eof", - "trio._highlevel_generic.StapledStream.send_stream", - "trio._highlevel_generic.StapledStream.wait_send_all_might_not_block", "trio._highlevel_socket.SocketListener", "trio._highlevel_socket.SocketListener.__init__", - "trio._highlevel_socket.SocketListener.accept", - "trio._highlevel_socket.SocketListener.aclose", - "trio._highlevel_socket.SocketStream", "trio._highlevel_socket.SocketStream.__init__", - "trio._highlevel_socket.SocketStream.aclose", "trio._highlevel_socket.SocketStream.getsockopt", - "trio._highlevel_socket.SocketStream.receive_some", "trio._highlevel_socket.SocketStream.send_all", - "trio._highlevel_socket.SocketStream.send_eof", "trio._highlevel_socket.SocketStream.setsockopt", - "trio._highlevel_socket.SocketStream.wait_send_all_might_not_block", "trio._path.AsyncAutoWrapperType.__init__", "trio._path.AsyncAutoWrapperType.generate_forwards", "trio._path.AsyncAutoWrapperType.generate_iter", @@ -188,11 +117,21 @@ "trio._path.Path.__rtruediv__", "trio._path.Path.__truediv__", "trio._path.Path.open", + "trio._socket._SocketType.__getattr__", + "trio._socket._SocketType.accept", + "trio._socket._SocketType.connect", + "trio._socket._SocketType.recv_into", + "trio._socket._SocketType.recvfrom", + "trio._socket._SocketType.recvfrom_into", + "trio._socket._SocketType.recvmsg", + "trio._socket._SocketType.recvmsg_into", + "trio._socket._SocketType.send", + "trio._socket._SocketType.sendmsg", + "trio._socket._SocketType.sendto", "trio._ssl.SSLListener", "trio._ssl.SSLListener.__init__", "trio._ssl.SSLListener.accept", "trio._ssl.SSLListener.aclose", - "trio._ssl.SSLStream", "trio._ssl.SSLStream.__dir__", "trio._ssl.SSLStream.__getattr__", "trio._ssl.SSLStream.__init__", @@ -204,7 +143,6 @@ "trio._ssl.SSLStream.transport_stream", "trio._ssl.SSLStream.unwrap", "trio._ssl.SSLStream.wait_send_all_might_not_block", - "trio._subprocess.Process", "trio._subprocess.Process.__aenter__", "trio._subprocess.Process.__init__", "trio._subprocess.Process.__repr__", @@ -219,47 +157,14 @@ "trio._subprocess.Process.send_signal", "trio._subprocess.Process.terminate", "trio._subprocess.Process.wait", - "trio._sync.CapacityLimiter.__init__", - "trio._sync.CapacityLimiter.__repr__", - "trio._sync.CapacityLimiter.available_tokens", - "trio._sync.CapacityLimiter.borrowed_tokens", - "trio._sync.CapacityLimiter.statistics", - "trio._sync.CapacityLimiter.total_tokens", - "trio._sync.Condition.__init__", - "trio._sync.Condition.acquire", - "trio._sync.Condition.acquire_nowait", - "trio._sync.Condition.locked", - "trio._sync.Condition.notify", - "trio._sync.Condition.notify_all", - "trio._sync.Condition.release", - "trio._sync.Condition.statistics", - "trio._sync.Event.is_set", - "trio._sync.Event.statistics", - "trio._sync.Event.wait", - "trio._sync.Lock", - "trio._sync.Semaphore.__init__", - "trio._sync.Semaphore.__repr__", - "trio._sync.Semaphore.max_value", - "trio._sync.Semaphore.statistics", - "trio._sync.Semaphore.value", - "trio._sync.StrictFIFOLock", - "trio._sync._LockImpl.__repr__", - "trio._sync._LockImpl.locked", - "trio._sync._LockImpl.statistics", - "trio._unix_pipes.FdStream", - "trio.aclose_forcefully", - "trio.current_effective_deadline", "trio.current_time", "trio.from_thread.run", "trio.from_thread.run_sync", "trio.lowlevel.add_instrument", "trio.lowlevel.cancel_shielded_checkpoint", - "trio.lowlevel.checkpoint", - "trio.lowlevel.checkpoint_if_cancelled", "trio.lowlevel.current_clock", "trio.lowlevel.current_root_task", "trio.lowlevel.current_statistics", - "trio.lowlevel.current_task", "trio.lowlevel.current_trio_token", "trio.lowlevel.currently_ki_protected", "trio.lowlevel.notify_closing", @@ -294,7 +199,6 @@ "trio.socket.set_custom_socket_factory", "trio.socket.socket", "trio.socket.socketpair", - "trio.testing._memory_streams.MemoryReceiveStream", "trio.testing._memory_streams.MemoryReceiveStream.__init__", "trio.testing._memory_streams.MemoryReceiveStream.aclose", "trio.testing._memory_streams.MemoryReceiveStream.close", @@ -303,7 +207,6 @@ "trio.testing._memory_streams.MemoryReceiveStream.put_eof", "trio.testing._memory_streams.MemoryReceiveStream.receive_some", "trio.testing._memory_streams.MemoryReceiveStream.receive_some_hook", - "trio.testing._memory_streams.MemorySendStream", "trio.testing._memory_streams.MemorySendStream.__init__", "trio.testing._memory_streams.MemorySendStream.aclose", "trio.testing._memory_streams.MemorySendStream.close", diff --git a/trio/_util.py b/trio/_util.py index c21cefe71e..0a0795fc15 100644 --- a/trio/_util.py +++ b/trio/_util.py @@ -9,6 +9,7 @@ import typing as t from abc import ABCMeta from functools import update_wrapper +from types import TracebackType import trio @@ -188,7 +189,12 @@ def __enter__(self): else: self._held = True - def __exit__(self, *args): + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: self._held = False diff --git a/trio/lowlevel.py b/trio/lowlevel.py index db8d180181..54f4ef3141 100644 --- a/trio/lowlevel.py +++ b/trio/lowlevel.py @@ -11,6 +11,7 @@ from ._core import ( Abort as Abort, ParkingLot as ParkingLot, + ParkingLotStatistics as ParkingLotStatistics, RaiseCancelT as RaiseCancelT, RunVar as RunVar, Task as Task, diff --git a/trio/testing/_check_streams.py b/trio/testing/_check_streams.py index 33d741e670..401b8ef0c2 100644 --- a/trio/testing/_check_streams.py +++ b/trio/testing/_check_streams.py @@ -1,13 +1,18 @@ # Generic stream tests +from __future__ import annotations import random from contextlib import contextmanager +from typing import TYPE_CHECKING from .. import _core from .._abc import HalfCloseableStream, ReceiveStream, SendStream, Stream from .._highlevel_generic import aclose_forcefully from ._checkpoints import assert_checkpoints +if TYPE_CHECKING: + from types import TracebackType + class _ForceCloseBoth: def __init__(self, both): @@ -16,7 +21,12 @@ def __init__(self, both): async def __aenter__(self): return self._both - async def __aexit__(self, *args): + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: try: await aclose_forcefully(self._both[0]) finally: diff --git a/trio/testing/_fake_net.py b/trio/testing/_fake_net.py index f2d40fb7ff..b3bdfd85c0 100644 --- a/trio/testing/_fake_net.py +++ b/trio/testing/_fake_net.py @@ -6,16 +6,21 @@ # - TCP # - UDP broadcast +from __future__ import annotations + import errno import ipaddress import os -from typing import Optional, Union +from typing import TYPE_CHECKING, Optional, Union import attr import trio from trio._util import Final, NoPublicConstructor +if TYPE_CHECKING: + from types import TracebackType + IPAddress = Union[ipaddress.IPv4Address, ipaddress.IPv6Address] @@ -338,7 +343,12 @@ def setsockopt(self, level, item, value): def __enter__(self): return self - def __exit__(self, *exc_info): + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: self.close() async def send(self, data, flags=0): From 6f187fb37795829773acedbb5c0ee9e146bffb6d Mon Sep 17 00:00:00 2001 From: jakkdl Date: Wed, 12 Jul 2023 13:55:13 +0200 Subject: [PATCH 095/162] move mypy config to pyproject.toml --- mypy.ini | 25 ------------------------- pyproject.toml | 25 +++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 25 deletions(-) delete mode 100644 mypy.ini diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 31eeef1cd0..0000000000 --- a/mypy.ini +++ /dev/null @@ -1,25 +0,0 @@ -[mypy] -# TODO: run mypy against several OS/version combos in CI -# https://mypy.readthedocs.io/en/latest/command_line.html#platform-configuration - -# Be flexible about dependencies that don't have stubs yet (like pytest) -ignore_missing_imports = True - -# Be strict about use of Mypy -warn_unused_ignores = True -warn_unused_configs = True -warn_redundant_casts = True -warn_return_any = True - -# Avoid subtle backsliding -#disallow_any_decorated = True -#disallow_incomplete_defs = True -#disallow_subclassing_any = True - -# Enable gradually / for new modules -check_untyped_defs = False -disallow_untyped_calls = False -disallow_untyped_defs = False - -# DO NOT use `ignore_errors`; it doesn't apply -# downstream and users have to deal with them. diff --git a/pyproject.toml b/pyproject.toml index 0f95a0cbc1..cfb4060ee7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,6 +19,31 @@ combine_as_imports = true profile = "black" skip_gitignore = true +[tool.mypy] +python_version = "3.8" + +# Be flexible about dependencies that don't have stubs yet (like pytest) +ignore_missing_imports = true + +# Be strict about use of Mypy +warn_unused_ignores = true +warn_unused_configs = true +warn_redundant_casts = true +warn_return_any = true + +# Avoid subtle backsliding +#disallow_any_decorated = true +#disallow_incomplete_defs = true +#disallow_subclassing_any = true + +# Enable gradually / for new modules +check_untyped_defs = false +disallow_untyped_calls = false +disallow_untyped_defs = false + +# DO NOT use `ignore_errors`; it doesn't apply +# downstream and users have to deal with them. + [tool.pytest.ini_options] addopts = ["--strict-markers", "--strict-config"] faulthandler_timeout = 60 From 1d2fc1565f883e81f356e975d8aeb4ffa76e78a9 Mon Sep 17 00:00:00 2001 From: Joshua Oreman Date: Thu, 13 Jul 2023 22:06:37 -0600 Subject: [PATCH 096/162] Add comment clarifying the value of next_send --- trio/_core/_run.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 39c1e0d48f..1c68658fb5 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -2211,6 +2211,10 @@ def my_done_callback(run_outcome): "Guest runner blocked before system nursery was initialized" ) ) + # next_send should be the return value of + # IOManager.get_events() if no I/O was waiting, which is + # platform-dependent. We don't actually check for I/O during + # this init phase because no one should be expecting any yet. next_send = 0 if sys.platform == "win32" else () else: # pragma: no cover guest_state.unrolled_run_gen.throw( From bf9fe64150d0c1b7aec15eecd0f2fb6a3e7e660c Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Mon, 17 Jul 2023 07:59:14 +0000 Subject: [PATCH 097/162] drop python 3.7 support, drop pypy3.7-3.8, add pypy3.10 (except on windows) (#2668) * drop support for Python 3.7, PyPy3.7 & 3.8. Updates documentation, CI, and code with help of `pyupgrade --py38-plus`. --- .github/workflows/ci.yml | 16 ++++---- README.rst | 5 ++- docs/source/index.rst | 2 +- docs/source/reference-core.rst | 10 ++--- docs/source/tutorial.rst | 2 +- newsfragments/2668.removal.rst | 1 + newsfragments/README.rst | 1 + .../how-does-windows-so-reuseaddr-work.py | 26 ++++++++----- notes-to-self/reopen-pipe.py | 10 +++-- notes-to-self/schedule-timing.py | 8 +++- notes-to-self/socketpair-buffering.py | 2 +- notes-to-self/ssl-handshake/ssl-handshake.py | 22 +++++++---- notes-to-self/sslobject.py | 4 +- notes-to-self/thread-closure-bug-demo.py | 8 +++- notes-to-self/thread-dispatch-bench.py | 5 ++- .../time-wait-windows-exclusiveaddruse.py | 8 ++-- notes-to-self/time-wait.py | 7 +++- pyproject.toml | 2 +- setup.py | 6 +-- test-requirements.in | 17 ++------ test-requirements.txt | 6 +-- trio/_channel.py | 4 +- trio/_core/_run.py | 4 +- trio/_core/_tests/test_multierror.py | 4 +- trio/_core/_tests/tutil.py | 39 ++++--------------- trio/_path.py | 4 +- trio/_socket.py | 14 +------ trio/_tests/test_exports.py | 2 +- trio/_tests/test_ssl.py | 31 --------------- trio/_unix_pipes.py | 2 +- trio/_util.py | 1 + trio/socket.py | 4 +- 32 files changed, 116 insertions(+), 161 deletions(-) create mode 100644 newsfragments/2668.removal.rst diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 480398a1f0..40af0960f5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,16 +18,18 @@ jobs: strategy: fail-fast: false matrix: - python: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.8-nightly', 'pypy-3.9-nightly'] + # pypy-3.10 is failing, see https://github.com/python-trio/trio/issues/2678 + python: ['3.8', '3.9', '3.10', 'pypy-3.9-nightly'] #, 'pypy-3.10-nightly'] arch: ['x86', 'x64'] lsp: [''] lsp_extract_file: [''] extra_name: [''] exclude: - - python: 'pypy-3.8-nightly' - arch: 'x86' + # pypy does not release 32-bit binaries - python: 'pypy-3.9-nightly' arch: 'x86' + #- python: 'pypy-3.10-nightly' + # arch: 'x86' include: - python: '3.8' arch: 'x64' @@ -65,8 +67,8 @@ jobs: # and then finally an actual release version. actions/setup-python doesn't # support this for PyPy presently so we get no help there. # - # CPython -> 3.9.0-alpha - 3.9.X - # PyPy -> pypy-3.7 + # 'CPython' -> '3.9.0-alpha - 3.9.X' + # 'PyPy' -> 'pypy-3.9' python-version: ${{ fromJSON(format('["{0}", "{1}"]', format('{0}.0-alpha - {0}.X', matrix.python), matrix.python))[startsWith(matrix.python, 'pypy')] }} architecture: '${{ matrix.arch }}' cache: pip @@ -92,7 +94,7 @@ jobs: strategy: fail-fast: false matrix: - python: ['pypy-3.7', 'pypy-3.8', 'pypy-3.9', '3.7', '3.8', '3.9', '3.10', '3.11', '3.12-dev', 'pypy-3.8-nightly', 'pypy-3.9-nightly'] + python: ['pypy-3.9', 'pypy-3.10', '3.8', '3.9', '3.10', '3.11', '3.12-dev', 'pypy-3.9-nightly', 'pypy-3.10-nightly'] check_formatting: ['0'] extra_name: [''] include: @@ -143,7 +145,7 @@ jobs: strategy: fail-fast: false matrix: - python: ['3.7', '3.8', '3.9', '3.10', 'pypy-3.8-nightly', 'pypy-3.9-nightly'] + python: ['3.8', '3.9', '3.10', 'pypy-3.9-nightly', 'pypy-3.10-nightly'] continue-on-error: >- ${{ ( diff --git a/README.rst b/README.rst index 4e096eddf3..016823e1f5 100644 --- a/README.rst +++ b/README.rst @@ -92,8 +92,9 @@ demonstration of implementing the "Happy Eyeballs" algorithm in an older library versus Trio. **Cool, but will it work on my system?** Probably! As long as you have -some kind of Python 3.7-or-better (CPython or the latest PyPy3 are -both fine), and are using Linux, macOS, Windows, or FreeBSD, then Trio +some kind of Python 3.8-or-better (CPython or [currently maintained versions of +PyPy3](https://doc.pypy.org/en/latest/faq.html#which-python-versions-does-pypy-implement) +are both fine), and are using Linux, macOS, Windows, or FreeBSD, then Trio will work. Other environments might work too, but those are the ones we test on. And all of our dependencies are pure Python, except for CFFI on Windows, which has wheels available, so diff --git a/docs/source/index.rst b/docs/source/index.rst index 84d81880af..fc13227c3a 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -45,7 +45,7 @@ Vital statistics: * Supported environments: We test on - - Python: 3.7+ (CPython and PyPy) + - Python: 3.8+ (CPython and PyPy) - Windows, macOS, Linux (glibc and musl), FreeBSD Other environments might also work; give it a try and see. diff --git a/docs/source/reference-core.rst b/docs/source/reference-core.rst index 4f4f4d62b9..141e128026 100644 --- a/docs/source/reference-core.rst +++ b/docs/source/reference-core.rst @@ -974,12 +974,8 @@ work. What we need is something that's *like* a global variable, but that can have different values depending on which request handler is accessing it. -To solve this problem, Python 3.7 added a new module to the standard -library: :mod:`contextvars`. And not only does Trio have built-in -support for :mod:`contextvars`, but if you're using an earlier version -of Python, then Trio makes sure that a backported version of -:mod:`contextvars` is installed. So you can assume :mod:`contextvars` -is there and works regardless of what version of Python you're using. +To solve this problem, Python has a module in the standard +library: :mod:`contextvars`. Here's a toy example demonstrating how to use :mod:`contextvars`: @@ -1009,7 +1005,7 @@ Example output (yours may differ slightly): request 0: Request received finished For more information, read the -`contextvars docs `__. +`contextvars docs `__. .. _synchronization: diff --git a/docs/source/tutorial.rst b/docs/source/tutorial.rst index 19289ca991..0faffd119b 100644 --- a/docs/source/tutorial.rst +++ b/docs/source/tutorial.rst @@ -88,7 +88,7 @@ Okay, ready? Let's get started. Before you begin ---------------- -1. Make sure you're using Python 3.7 or newer. +1. Make sure you're using Python 3.8 or newer. 2. ``python3 -m pip install --upgrade trio`` (or on Windows, maybe ``py -3 -m pip install --upgrade trio`` – `details diff --git a/newsfragments/2668.removal.rst b/newsfragments/2668.removal.rst new file mode 100644 index 0000000000..512f681077 --- /dev/null +++ b/newsfragments/2668.removal.rst @@ -0,0 +1 @@ +Drop support for Python3.7 and PyPy3.7/3.8. diff --git a/newsfragments/README.rst b/newsfragments/README.rst index 349e67eec0..52dc0716bb 100644 --- a/newsfragments/README.rst +++ b/newsfragments/README.rst @@ -14,6 +14,7 @@ Each file should be named like ``..rst``, where deprecated features after an appropriate time, go in the ``deprecated`` category instead) * ``feature``: any new feature that doesn't qualify for ``headline`` +* ``removal``: removing support for old python versions, or other removals with no deprecation period. * ``bugfix`` * ``doc`` * ``deprecated`` diff --git a/notes-to-self/how-does-windows-so-reuseaddr-work.py b/notes-to-self/how-does-windows-so-reuseaddr-work.py index 4865ea17b3..d8d60d1d66 100644 --- a/notes-to-self/how-does-windows-so-reuseaddr-work.py +++ b/notes-to-self/how-does-windows-so-reuseaddr-work.py @@ -10,6 +10,7 @@ modes = ["default", "SO_REUSEADDR", "SO_EXCLUSIVEADDRUSE"] bind_types = ["wildcard", "specific"] + def sock(mode): s = socket.socket(family=socket.AF_INET) if mode == "SO_REUSEADDR": @@ -18,6 +19,7 @@ def sock(mode): s.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) return s + def bind(sock, bind_type): if bind_type == "wildcard": sock.bind(("0.0.0.0", 12345)) @@ -26,6 +28,7 @@ def bind(sock, bind_type): else: assert False + def table_entry(mode1, bind_type1, mode2, bind_type2): with sock(mode1) as sock1: bind(sock1, bind_type1) @@ -41,19 +44,22 @@ def table_entry(mode1, bind_type1, mode2, bind_type2): else: return "Success" -print(""" + +print( + """ second bind | """ -+ " | ".join(["%-19s" % mode for mode in modes]) + + " | ".join(["%-19s" % mode for mode in modes]) ) -print(""" """, end='') +print(""" """, end="") for mode in modes: - print(" | " + " | ".join(["%8s" % bind_type for bind_type in bind_types]), end='') + print(" | " + " | ".join(["%8s" % bind_type for bind_type in bind_types]), end="") -print(""" +print( + """ first bind -----------------------------------------------------------------""" -# default | wildcard | INUSE | Success | ACCESS | Success | INUSE | Success + # default | wildcard | INUSE | Success | ACCESS | Success | INUSE | Success ) for i, mode1 in enumerate(modes): @@ -63,6 +69,8 @@ def table_entry(mode1, bind_type1, mode2, bind_type2): for l, bind_type2 in enumerate(bind_types): entry = table_entry(mode1, bind_type1, mode2, bind_type2) row.append(entry) - #print(mode1, bind_type1, mode2, bind_type2, entry) - print("{:>19} | {:>8} | ".format(mode1, bind_type1) - + " | ".join(["%8s" % entry for entry in row])) + # print(mode1, bind_type1, mode2, bind_type2, entry) + print( + f"{mode1:>19} | {bind_type1:>8} | " + + " | ".join(["%8s" % entry for entry in row]) + ) diff --git a/notes-to-self/reopen-pipe.py b/notes-to-self/reopen-pipe.py index 910def397c..5e5b31e41f 100644 --- a/notes-to-self/reopen-pipe.py +++ b/notes-to-self/reopen-pipe.py @@ -3,12 +3,13 @@ import time import tempfile + def check_reopen(r1, w): try: print("Reopening read end") - r2 = os.open("/proc/self/fd/{}".format(r1), os.O_RDONLY) + r2 = os.open(f"/proc/self/fd/{r1}", os.O_RDONLY) - print("r1 is {}, r2 is {}".format(r1, r2)) + print(f"r1 is {r1}, r2 is {r2}") print("checking they both can receive from w...") @@ -36,11 +37,12 @@ def check_reopen(r1, w): def sleep_then_write(): time.sleep(1) os.write(w, b"c") + threading.Thread(target=sleep_then_write, daemon=True).start() assert os.read(r1, 1) == b"c" print("r1 definitely seems to be in blocking mode") except Exception as exc: - print("ERROR: {!r}".format(exc)) + print(f"ERROR: {exc!r}") print("-- testing anonymous pipe --") @@ -63,6 +65,6 @@ def sleep_then_write(): print("-- testing socketpair --") import socket + rs, ws = socket.socketpair() check_reopen(rs.fileno(), ws.fileno()) - diff --git a/notes-to-self/schedule-timing.py b/notes-to-self/schedule-timing.py index c3093066e2..176dcf9220 100644 --- a/notes-to-self/schedule-timing.py +++ b/notes-to-self/schedule-timing.py @@ -4,16 +4,18 @@ LOOPS = 0 RUNNING = True + async def reschedule_loop(depth): if depth == 0: global LOOPS while RUNNING: LOOPS += 1 await trio.sleep(0) - #await trio.lowlevel.cancel_shielded_checkpoint() + # await trio.lowlevel.cancel_shielded_checkpoint() else: await reschedule_loop(depth - 1) + async def report_loop(): global RUNNING try: @@ -25,13 +27,15 @@ async def report_loop(): end_count = LOOPS loops = end_count - start_count duration = end_time - start_time - print("{} loops/sec".format(loops / duration)) + print(f"{loops / duration} loops/sec") finally: RUNNING = False + async def main(): async with trio.open_nursery() as nursery: nursery.start_soon(reschedule_loop, 10) nursery.start_soon(report_loop) + trio.run(main) diff --git a/notes-to-self/socketpair-buffering.py b/notes-to-self/socketpair-buffering.py index dd3b1ad97d..5e77a709b7 100644 --- a/notes-to-self/socketpair-buffering.py +++ b/notes-to-self/socketpair-buffering.py @@ -32,6 +32,6 @@ except BlockingIOError: pass - print("setsockopt bufsize {}: {}".format(bufsize, i)) + print(f"setsockopt bufsize {bufsize}: {i}") a.close() b.close() diff --git a/notes-to-self/ssl-handshake/ssl-handshake.py b/notes-to-self/ssl-handshake/ssl-handshake.py index 81d875be6a..18a0e1a675 100644 --- a/notes-to-self/ssl-handshake/ssl-handshake.py +++ b/notes-to-self/ssl-handshake/ssl-handshake.py @@ -8,6 +8,7 @@ server_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) server_ctx.load_cert_chain("trio-test-1.pem") + def _ssl_echo_serve_sync(sock): try: wrapped = server_ctx.wrap_socket(sock, server_side=True) @@ -20,16 +21,19 @@ def _ssl_echo_serve_sync(sock): except BrokenPipeError: pass + @contextmanager def echo_server_connection(): client_sock, server_sock = socket.socketpair() with client_sock, server_sock: t = threading.Thread( - target=_ssl_echo_serve_sync, args=(server_sock,), daemon=True) + target=_ssl_echo_serve_sync, args=(server_sock,), daemon=True + ) t.start() yield client_sock + class ManuallyWrappedSocket: def __init__(self, ctx, sock, **kwargs): self.incoming = ssl.MemoryBIO() @@ -82,21 +86,23 @@ def unwrap(self): def wrap_socket_via_wrap_socket(ctx, sock, **kwargs): return ctx.wrap_socket(sock, do_handshake_on_connect=False, **kwargs) + def wrap_socket_via_wrap_bio(ctx, sock, **kwargs): return ManuallyWrappedSocket(ctx, sock, **kwargs) for wrap_socket in [ - wrap_socket_via_wrap_socket, - wrap_socket_via_wrap_bio, + wrap_socket_via_wrap_socket, + wrap_socket_via_wrap_bio, ]: - print("\n--- checking {} ---\n".format(wrap_socket.__name__)) + print(f"\n--- checking {wrap_socket.__name__} ---\n") print("checking with do_handshake + correct hostname...") with echo_server_connection() as client_sock: client_ctx = ssl.create_default_context(cafile="trio-test-CA.pem") wrapped = wrap_socket( - client_ctx, client_sock, server_hostname="trio-test-1.example.org") + client_ctx, client_sock, server_hostname="trio-test-1.example.org" + ) wrapped.do_handshake() wrapped.sendall(b"x") assert wrapped.recv(1) == b"x" @@ -107,7 +113,8 @@ def wrap_socket_via_wrap_bio(ctx, sock, **kwargs): with echo_server_connection() as client_sock: client_ctx = ssl.create_default_context(cafile="trio-test-CA.pem") wrapped = wrap_socket( - client_ctx, client_sock, server_hostname="trio-test-2.example.org") + client_ctx, client_sock, server_hostname="trio-test-2.example.org" + ) try: wrapped.do_handshake() except Exception: @@ -119,7 +126,8 @@ def wrap_socket_via_wrap_bio(ctx, sock, **kwargs): with echo_server_connection() as client_sock: client_ctx = ssl.create_default_context(cafile="trio-test-CA.pem") wrapped = wrap_socket( - client_ctx, client_sock, server_hostname="trio-test-2.example.org") + client_ctx, client_sock, server_hostname="trio-test-2.example.org" + ) # We forgot to call do_handshake # But the hostname is wrong so something had better error out... sent = b"x" diff --git a/notes-to-self/sslobject.py b/notes-to-self/sslobject.py index cfac98676e..0692af319c 100644 --- a/notes-to-self/sslobject.py +++ b/notes-to-self/sslobject.py @@ -15,6 +15,7 @@ soutb = ssl.MemoryBIO() sso = server_ctx.wrap_bio(sinb, soutb, server_side=True) + @contextmanager def expect(etype): try: @@ -22,7 +23,8 @@ def expect(etype): except etype: pass else: - raise AssertionError("expected {}".format(etype)) + raise AssertionError(f"expected {etype}") + with expect(ssl.SSLWantReadError): cso.do_handshake() diff --git a/notes-to-self/thread-closure-bug-demo.py b/notes-to-self/thread-closure-bug-demo.py index 514636a1b4..b09a87fe5f 100644 --- a/notes-to-self/thread-closure-bug-demo.py +++ b/notes-to-self/thread-closure-bug-demo.py @@ -8,18 +8,21 @@ COUNT = 100 + def slow_tracefunc(frame, event, arg): # A no-op trace function that sleeps briefly to make us more likely to hit # the race condition. time.sleep(0.01) return slow_tracefunc + def run_with_slow_tracefunc(fn): # settrace() only takes effect when you enter a new frame, so we need this # little dance: sys.settrace(slow_tracefunc) return fn() + def outer(): x = 0 # We hide the done variable inside a list, because we want to use it to @@ -46,13 +49,14 @@ def traced_looper(): t.start() for i in range(COUNT): - print("after {} increments, x is {}".format(i, x)) + print(f"after {i} increments, x is {x}") x += 1 time.sleep(0.01) done[0] = True t.join() - print("Final discrepancy: {} (should be 0)".format(COUNT - x)) + print(f"Final discrepancy: {COUNT - x} (should be 0)") + outer() diff --git a/notes-to-self/thread-dispatch-bench.py b/notes-to-self/thread-dispatch-bench.py index 1625efae17..9afb4bbec8 100644 --- a/notes-to-self/thread-dispatch-bench.py +++ b/notes-to-self/thread-dispatch-bench.py @@ -10,11 +10,13 @@ COUNT = 10000 + def worker(in_q, out_q): while True: job = in_q.get() out_q.put(job()) + def main(): in_q = Queue() out_q = Queue() @@ -28,6 +30,7 @@ def main(): in_q.put(lambda: None) out_q.get() end = time.monotonic() - print("{:.2f} µs/job".format((end - start) / COUNT * 1e6)) + print(f"{(end - start) / COUNT * 1e6:.2f} µs/job") + main() diff --git a/notes-to-self/time-wait-windows-exclusiveaddruse.py b/notes-to-self/time-wait-windows-exclusiveaddruse.py index db3aaad08a..dcb4a27dd0 100644 --- a/notes-to-self/time-wait-windows-exclusiveaddruse.py +++ b/notes-to-self/time-wait-windows-exclusiveaddruse.py @@ -8,15 +8,17 @@ import socket from contextlib import contextmanager + @contextmanager def report_outcome(tagline): try: yield except OSError as exc: - print("{}: failed".format(tagline)) - print(" details: {!r}".format(exc)) + print(f"{tagline}: failed") + print(f" details: {exc!r}") else: - print("{}: succeeded".format(tagline)) + print(f"{tagline}: succeeded") + # Set up initial listening socket lsock = socket.socket() diff --git a/notes-to-self/time-wait.py b/notes-to-self/time-wait.py index e865a94982..08c71b0048 100644 --- a/notes-to-self/time-wait.py +++ b/notes-to-self/time-wait.py @@ -31,6 +31,7 @@ import attr + @attr.s(repr=False) class Options: listen1_early = attr.ib(default=None) @@ -49,9 +50,10 @@ def describe(self): for f in attr.fields(self.__class__): value = getattr(self, f.name) if value is not None: - info.append("{}={}".format(f.name, value)) + info.append(f"{f.name}={value}") return "Set/unset: {}".format(", ".join(info)) + def time_wait(options): print(options.describe()) @@ -60,7 +62,7 @@ def time_wait(options): listen0 = socket.socket() listen0.bind(("127.0.0.1", 0)) sockaddr = listen0.getsockname() - #print(" ", sockaddr) + # print(" ", sockaddr) listen0.close() listen1 = socket.socket() @@ -98,6 +100,7 @@ def time_wait(options): else: print(" -> ok") + time_wait(Options()) time_wait(Options(listen1_early=True, server=True, listen2=True)) time_wait(Options(listen1_early=True)) diff --git a/pyproject.toml b/pyproject.toml index cfb4060ee7..5b5e56ff2e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [tool.black] -target-version = ['py37'] +target-version = ['py38'] [tool.flake8] extend-ignore = ['D', 'E', 'W', 'F403', 'F405', 'F821', 'F822'] diff --git a/setup.py b/setup.py index 3f420f6c63..2917f7c12e 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ Vital statistics: * Supported environments: Linux, macOS, or Windows running some kind of Python - 3.7-or-better (either CPython or PyPy3 is fine). \\*BSD and illumos likely + 3.8-or-better (either CPython or PyPy3 is fine). \\*BSD and illumos likely work too, but are not tested. * Install: ``python3 -m pip install -U trio`` (or on Windows, maybe @@ -96,7 +96,7 @@ # This means, just install *everything* you see under trio/, even if it # doesn't look like a source file, so long as it appears in MANIFEST.in: include_package_data=True, - python_requires=">=3.7", + python_requires=">=3.8", keywords=["async", "io", "networking", "trio"], classifiers=[ "Development Status :: 3 - Alpha", @@ -110,11 +110,11 @@ "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: System :: Networking", "Framework :: Trio", ], diff --git a/test-requirements.in b/test-requirements.in index 1e1e23c2b8..03997ad2e7 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -3,28 +3,25 @@ pytest >= 5.0 # for faulthandler in core coverage >= 7.2.5 async_generator >= 1.9 pyright -# ipython 7.x is the last major version supporting Python 3.7 -ipython < 7.35 # for the IPython traceback integration tests +ipython # for the IPython traceback integration tests pyOpenSSL >= 22.0.0 # for the ssl + DTLS tests trustme # for the ssl + DTLS tests pylint # for pylint finding all symbols tests jedi # for jedi code completion tests -cryptography>=36.0.0 # 35.0.0 is transitive but fails +cryptography>=41.0.0 # cryptography<41 segfaults on pypy3.10 # Tools black; implementation_name == "cpython" mypy; implementation_name == "cpython" types-pyOpenSSL; implementation_name == "cpython" -flake8 < 6.0.0 # 6.0.0 drops python 3.7 +flake8 flake8-pyproject astor # code generation pip-tools >= 6.13.0 # https://github.com/python-trio/trio/pull/654#issuecomment-420518745 -# typed_ast is deprecated as of 3.8, and straight up doesn't compile on 3.10-dev as of 2021-12-13 -typed_ast; implementation_name == "cpython" and python_version < "3.8" mypy-extensions; implementation_name == "cpython" -typing-extensions < 4.7.0 +typing-extensions # Trio's own dependencies cffi; os_name == "nt" @@ -34,9 +31,3 @@ idna outcome sniffio exceptiongroup >= 1.0.0rc9; python_version < "3.11" - -# isort 5.12.0 requires python 3.8 -isort < 5.12.0 - -# cryptography 40.0.2 (and presumably prior) segfaults on PyPy 3.7 -cryptography < 40.0.0 diff --git a/test-requirements.txt b/test-requirements.txt index 6babde688e..17fa0a099a 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -28,7 +28,7 @@ click==8.1.3 # pip-tools coverage==7.2.7 # via -r test-requirements.in -cryptography==39.0.2 +cryptography==41.0.1 # via # -r test-requirements.in # pyopenssl @@ -57,9 +57,7 @@ iniconfig==2.0.0 ipython==7.34.0 # via -r test-requirements.in isort==5.11.5 - # via - # -r test-requirements.in - # pylint + # via pylint jedi==0.18.2 # via # -r test-requirements.in diff --git a/trio/_channel.py b/trio/_channel.py index 7c8ff4660d..f77950c4e4 100644 --- a/trio/_channel.py +++ b/trio/_channel.py @@ -209,7 +209,7 @@ def abort_fn(_: RaiseCancelT) -> Abort: # Return type must be stringified or use a TypeVar @enable_ki_protection - def clone(self) -> "MemorySendChannel[SendType]": + def clone(self) -> MemorySendChannel[SendType]: """Clone this send channel object. This returns a new `MemorySendChannel` object, which acts as a @@ -352,7 +352,7 @@ def abort_fn(_: RaiseCancelT) -> Abort: return await trio.lowlevel.wait_task_rescheduled(abort_fn) # type: ignore[no-any-return] @enable_ki_protection - def clone(self) -> "MemoryReceiveChannel[ReceiveType]": + def clone(self) -> MemoryReceiveChannel[ReceiveType]: """Clone this receive channel object. This returns a new `MemoryReceiveChannel` object, which acts as a diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 585dc4aa41..4f90889c5f 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -51,7 +51,7 @@ import contextvars # An unfortunate name collision here with trio._util.Final - from typing_extensions import Final as FinalT + from typing import Final as FinalT DEADLINE_HEAP_MIN_PRUNE_THRESHOLD: FinalT = 1000 @@ -1349,7 +1349,7 @@ def raise_cancel() -> NoReturn: class RunContext(threading.local): - runner: "Runner" + runner: Runner task: Task diff --git a/trio/_core/_tests/test_multierror.py b/trio/_core/_tests/test_multierror.py index 498f4d435b..ca98ebf982 100644 --- a/trio/_core/_tests/test_multierror.py +++ b/trio/_core/_tests/test_multierror.py @@ -475,9 +475,7 @@ def run_script(name, use_ipython=False): def check_simple_excepthook(completed, uses_ipython): assert_match_in_seq( [ - "in = (3, 8) - else "in ", + "in ", "MultiError", "--- 1 ---", "in exc1_fn", diff --git a/trio/_core/_tests/tutil.py b/trio/_core/_tests/tutil.py index f3a21364be..b3aa73fb7d 100644 --- a/trio/_core/_tests/tutil.py +++ b/trio/_core/_tests/tutil.py @@ -4,7 +4,6 @@ import os import socket as stdlib_socket import sys -import threading import warnings from contextlib import closing, contextmanager from typing import TYPE_CHECKING @@ -42,7 +41,7 @@ with s: try: s.bind(("::1", 0)) - except OSError: + except OSError: # pragma: no cover # since support for 3.7 was removed can_bind_ipv6 = False else: can_bind_ipv6 = True @@ -85,37 +84,13 @@ def _noop(*args, **kwargs): pass -if sys.version_info >= (3, 8): - - @contextmanager - def restore_unraisablehook(): - sys.unraisablehook, prev = sys.__unraisablehook__, sys.unraisablehook - try: - yield - finally: - sys.unraisablehook = prev - - @contextmanager - def disable_threading_excepthook(): - if sys.version_info >= (3, 10): - threading.excepthook, prev = threading.__excepthook__, threading.excepthook - else: - threading.excepthook, prev = _noop, threading.excepthook - - try: - yield - finally: - threading.excepthook = prev - -else: - - @contextmanager - def restore_unraisablehook(): # pragma: no cover - yield - - @contextmanager - def disable_threading_excepthook(): # pragma: no cover +@contextmanager +def restore_unraisablehook(): + sys.unraisablehook, prev = sys.__unraisablehook__, sys.unraisablehook + try: yield + finally: + sys.unraisablehook = prev # template is like: diff --git a/trio/_path.py b/trio/_path.py index bb81759ecf..67234e223d 100644 --- a/trio/_path.py +++ b/trio/_path.py @@ -246,14 +246,14 @@ async def open(self, *args, **kwargs): is_mount: Any owner: Any - if sys.version_info >= (3, 8) and sys.version_info < (3, 12): - link_to: Any if sys.version_info >= (3, 9): is_relative_to: Any with_stem: Any readlink: Any if sys.version_info >= (3, 10): hardlink_to: Any + if sys.version_info < (3, 12): + link_to: Any if sys.version_info >= (3, 12): is_junction: Any walk: Any diff --git a/trio/_socket.py b/trio/_socket.py index eaf0e04d15..659f844078 100644 --- a/trio/_socket.py +++ b/trio/_socket.py @@ -57,18 +57,6 @@ async def __aexit__( return False -################################################################ -# CONSTANTS -################################################################ - -try: - from socket import IPPROTO_IPV6 -except ImportError: - # Before Python 3.8, Windows is missing IPPROTO_IPV6 - # https://bugs.python.org/issue29515 - if sys.platform == "win32": # pragma: no branch - IPPROTO_IPV6 = 41 - ################################################################ # Overrides ################################################################ @@ -569,7 +557,7 @@ async def wait_writable(self) -> None: async def _resolve_address_nocp(self, address, *, local): if self.family == _stdlib_socket.AF_INET6: ipv6_v6only = self._sock.getsockopt( - IPPROTO_IPV6, _stdlib_socket.IPV6_V6ONLY + _stdlib_socket.IPPROTO_IPV6, _stdlib_socket.IPV6_V6ONLY ) else: ipv6_v6only = False diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index e51bbe31f5..20635b0022 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -267,7 +267,7 @@ def no_hidden(symbols): cache_json = json.loads(cache_file.read()) # skip a bunch of file-system activity (probably can un-memoize?) - @functools.lru_cache() + @functools.lru_cache def lookup_symbol(symbol): topname, *modname, name = symbol.split(".") version = next(cache.glob("3.*/")) diff --git a/trio/_tests/test_ssl.py b/trio/_tests/test_ssl.py index 2534c81260..f91cea8549 100644 --- a/trio/_tests/test_ssl.py +++ b/trio/_tests/test_ssl.py @@ -7,7 +7,6 @@ import threading from contextlib import asynccontextmanager, contextmanager from functools import partial -from typing import TYPE_CHECKING import pytest import trustme @@ -31,10 +30,6 @@ memory_stream_pair, ) -if TYPE_CHECKING: - from _pytest.mark import MarkDecorator - - # We have two different kinds of echo server fixtures we use for testing. The # first is a real server written using the stdlib ssl module and blocking # sockets. It runs in a thread and we talk to it over a real socketpair(), to @@ -63,12 +58,6 @@ TRIO_TEST_1_CERT.configure_cert(SERVER_CTX) -skip_on_broken_openssl: MarkDecorator = pytest.mark.skipif( - sys.version_info < (3, 8) and ssl.OPENSSL_VERSION_INFO[0] > 1, - reason="Python 3.7 does not work with OpenSSL versions higher than 1.X", -) - - # TLS 1.3 has a lot of changes from previous versions. So we want to run tests # with both TLS 1.3, and TLS 1.2. # "tls13" means that we're willing to negotiate TLS 1.3. Usually that's @@ -111,22 +100,6 @@ def ssl_echo_serve_sync(sock, *, expect_fail=False): wrapped.unwrap() except exceptions: pass - except ssl.SSLWantWriteError: # pragma: no cover - # Under unclear conditions, CPython sometimes raises - # SSLWantWriteError here. This is a bug (bpo-32219), - # but it's not our bug. Christian Heimes thinks - # it's fixed in 'recent' CPython versions so we fail - # the test for those and ignore it for earlier - # versions. - if ( - sys.implementation.name != "cpython" - or sys.version_info >= (3, 8) - ): - pytest.fail( - "still an issue on recent python versions " - "add a comment to " - "https://bugs.python.org/issue32219" - ) return wrapped.sendall(data) # This is an obscure workaround for an openssl bug. In server mode, in @@ -817,7 +790,6 @@ async def test_send_all_empty_string(client_ctx): await s.aclose() -@skip_on_broken_openssl @pytest.mark.parametrize("https_compatible", [False, True]) async def test_SSLStream_generic(client_ctx, https_compatible): async def stream_maker(): @@ -1033,7 +1005,6 @@ async def test_ssl_bad_shutdown(client_ctx): await server.aclose() -@skip_on_broken_openssl async def test_ssl_bad_shutdown_but_its_ok(client_ctx): client, server = ssl_memory_stream_pair( client_ctx, @@ -1098,7 +1069,6 @@ def close_hook(): assert transport_close_count == 1 -@skip_on_broken_openssl async def test_ssl_https_compatibility_disagreement(client_ctx): client, server = ssl_memory_stream_pair( client_ctx, @@ -1123,7 +1093,6 @@ async def receive_and_expect_error(): nursery.start_soon(receive_and_expect_error) -@skip_on_broken_openssl async def test_https_mode_eof_before_handshake(client_ctx): client, server = ssl_memory_stream_pair( client_ctx, diff --git a/trio/_unix_pipes.py b/trio/_unix_pipes.py index f4158eb27d..716550790e 100644 --- a/trio/_unix_pipes.py +++ b/trio/_unix_pipes.py @@ -10,7 +10,7 @@ from ._util import ConflictDetector, Final if TYPE_CHECKING: - from typing_extensions import Final as FinalType + from typing import Final as FinalType if os.name != "posix": # We raise an error here rather than gating the import in lowlevel.py diff --git a/trio/_util.py b/trio/_util.py index 0a0795fc15..b7b4403115 100644 --- a/trio/_util.py +++ b/trio/_util.py @@ -141,6 +141,7 @@ def _return_value_looks_like_wrong_library(value): # function. So we have to just call it and then check whether the # return value is a coroutine object. # Note: will not be necessary on python>=3.8, see https://bugs.python.org/issue34890 + # TODO: python3.7 support is now dropped, so the above can be addressed. if not isinstance(coro, collections.abc.Coroutine): # Give good error for: nursery.start_soon(func_returning_future) if _return_value_looks_like_wrong_library(coro): diff --git a/trio/socket.py b/trio/socket.py index 61ba48ad3b..a9e276c782 100644 --- a/trio/socket.py +++ b/trio/socket.py @@ -79,9 +79,6 @@ except ImportError: pass -# get names used by Trio that we define on our own -from ._socket import IPPROTO_IPV6 as IPPROTO_IPV6 - if _t.TYPE_CHECKING: IP_BIND_ADDRESS_NO_PORT: int else: @@ -297,6 +294,7 @@ IPPROTO_IPCOMP as IPPROTO_IPCOMP, IPPROTO_IPIP as IPPROTO_IPIP, IPPROTO_IPV4 as IPPROTO_IPV4, + IPPROTO_IPV6 as IPPROTO_IPV6, IPPROTO_L2TP as IPPROTO_L2TP, IPPROTO_MAX as IPPROTO_MAX, IPPROTO_MOBILE as IPPROTO_MOBILE, From 315dbed5abf0663b11522022e9c7abbc1df54dc8 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 18 Jul 2023 03:40:19 +0000 Subject: [PATCH 098/162] Bump dependencies from commit bf9fe6 (#2709) * Dependency updates * Fix IPython tests? --------- Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: EXPLOSION --- docs-requirements.txt | 12 ++++---- test-requirements.txt | 44 ++++++++++++++++++---------- trio/_core/_tests/test_multierror.py | 8 ++--- 3 files changed, 38 insertions(+), 26 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 06136fd765..1c1b89e873 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -16,9 +16,9 @@ babel==2.12.1 # via sphinx certifi==2023.5.7 # via requests -charset-normalizer==3.1.0 +charset-normalizer==3.2.0 # via requests -click==8.1.3 +click==8.1.5 # via # click-default-group # towncrier @@ -28,7 +28,7 @@ docutils==0.18.1 # via # sphinx # sphinx-rtd-theme -exceptiongroup==1.1.1 +exceptiongroup==1.1.2 # via -r docs-requirements.in idna==3.4 # via @@ -38,9 +38,9 @@ imagesize==1.4.1 # via sphinx immutables==0.19 # via -r docs-requirements.in -importlib-metadata==6.7.0 +importlib-metadata==6.8.0 # via sphinx -importlib-resources==5.12.0 +importlib-resources==6.0.0 # via towncrier incremental==22.10.0 # via towncrier @@ -97,7 +97,7 @@ towncrier==23.6.0 # via -r docs-requirements.in urllib3==2.0.3 # via requests -zipp==3.15.0 +zipp==3.16.2 # via # importlib-metadata # importlib-resources diff --git a/test-requirements.txt b/test-requirements.txt index 17fa0a099a..939bf7a61e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,8 +6,10 @@ # astor==0.8.1 # via -r test-requirements.in -astroid==2.15.5 +astroid==2.15.6 # via pylint +asttokens==2.2.1 + # via stack-data async-generator==1.10 # via -r test-requirements.in attrs==23.1.0 @@ -16,19 +18,19 @@ attrs==23.1.0 # outcome backcall==0.2.0 # via ipython -black==23.3.0 ; implementation_name == "cpython" +black==23.7.0 ; implementation_name == "cpython" # via -r test-requirements.in build==0.10.0 # via pip-tools cffi==1.15.1 # via cryptography -click==8.1.3 +click==8.1.5 # via # black # pip-tools coverage==7.2.7 # via -r test-requirements.in -cryptography==41.0.1 +cryptography==41.0.2 # via # -r test-requirements.in # pyopenssl @@ -38,11 +40,13 @@ decorator==5.1.1 # via ipython dill==0.3.6 # via pylint -exceptiongroup==1.1.1 ; python_version < "3.11" +exceptiongroup==1.1.2 ; python_version < "3.11" # via # -r test-requirements.in # pytest -flake8==5.0.4 +executing==1.2.0 + # via stack-data +flake8==6.0.0 # via # -r test-requirements.in # flake8-pyproject @@ -54,9 +58,9 @@ idna==3.4 # trustme iniconfig==2.0.0 # via pytest -ipython==7.34.0 +ipython==8.12.2 # via -r test-requirements.in -isort==5.11.5 +isort==5.12.0 # via pylint jedi==0.18.2 # via @@ -94,23 +98,25 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==6.14.0 +pip-tools==7.0.0 # via -r test-requirements.in -platformdirs==3.8.0 +platformdirs==3.9.1 # via # black # pylint pluggy==1.2.0 # via pytest -prompt-toolkit==3.0.38 +prompt-toolkit==3.0.39 # via ipython ptyprocess==0.7.0 # via pexpect -pycodestyle==2.9.1 +pure-eval==0.2.2 + # via stack-data +pycodestyle==2.10.0 # via flake8 pycparser==2.21 # via cffi -pyflakes==2.5.0 +pyflakes==3.0.1 # via flake8 pygments==2.15.1 # via ipython @@ -120,14 +126,18 @@ pyopenssl==23.2.0 # via -r test-requirements.in pyproject-hooks==1.0.0 # via build -pyright==1.1.316 +pyright==1.1.317 # via -r test-requirements.in pytest==7.4.0 # via -r test-requirements.in +six==1.16.0 + # via asttokens sniffio==1.3.0 # via -r test-requirements.in sortedcontainers==2.4.0 # via -r test-requirements.in +stack-data==0.6.2 + # via ipython tomli==2.0.1 # via # black @@ -136,6 +146,7 @@ tomli==2.0.1 # mypy # pip-tools # pylint + # pyproject-hooks # pytest tomlkit==0.11.8 # via pylint @@ -143,15 +154,16 @@ traitlets==5.9.0 # via # ipython # matplotlib-inline -trustme==1.0.0 +trustme==1.1.0 # via -r test-requirements.in types-pyopenssl==23.2.0.1 ; implementation_name == "cpython" # via -r test-requirements.in -typing-extensions==4.6.3 +typing-extensions==4.7.1 # via # -r test-requirements.in # astroid # black + # ipython # mypy # pylint wcwidth==0.2.6 diff --git a/trio/_core/_tests/test_multierror.py b/trio/_core/_tests/test_multierror.py index ca98ebf982..7a8bd2f9a8 100644 --- a/trio/_core/_tests/test_multierror.py +++ b/trio/_core/_tests/test_multierror.py @@ -472,10 +472,10 @@ def run_script(name, use_ipython=False): return completed -def check_simple_excepthook(completed, uses_ipython): +def check_simple_excepthook(completed): assert_match_in_seq( [ - "in ", + "in ", "MultiError", "--- 1 ---", "in exc1_fn", @@ -502,14 +502,14 @@ def check_simple_excepthook(completed, uses_ipython): @need_ipython def test_ipython_exc_handler(): completed = run_script("simple_excepthook.py", use_ipython=True) - check_simple_excepthook(completed, True) + check_simple_excepthook(completed) @slow @need_ipython def test_ipython_imported_but_unused(): completed = run_script("simple_excepthook_IPython.py") - check_simple_excepthook(completed, False) + check_simple_excepthook(completed) @slow From a451bc454bf76f646e0f13c7f48a4fdd7df626e3 Mon Sep 17 00:00:00 2001 From: "Kulothungan U.G" Date: Wed, 19 Jul 2023 21:14:51 +0530 Subject: [PATCH 099/162] Update tutorial.rst --- docs/source/tutorial.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/tutorial.rst b/docs/source/tutorial.rst index 0faffd119b..0584446fb7 100644 --- a/docs/source/tutorial.rst +++ b/docs/source/tutorial.rst @@ -436,15 +436,15 @@ Now that we understand ``async with``, let's look at ``parent`` again: :end-at: all done! There are only 4 lines of code that really do anything here. On line -17, we use :func:`trio.open_nursery` to get a "nursery" object, and +20, we use :func:`trio.open_nursery` to get a "nursery" object, and then inside the ``async with`` block we call ``nursery.start_soon`` twice, -on lines 19 and 22. There are actually two ways to call an async +on lines 22 and 25. There are actually two ways to call an async function: the first one is the one we already saw, using ``await async_fn()``; the new one is ``nursery.start_soon(async_fn)``: it asks Trio to start running this async function, *but then returns immediately without waiting for the function to finish*. So after our two calls to ``nursery.start_soon``, ``child1`` and ``child2`` are now running in the -background. And then at line 25, the commented line, we hit the end of +background. And then at line 28, the commented line, we hit the end of the ``async with`` block, and the nursery's ``__aexit__`` function runs. What this does is force ``parent`` to stop here and wait for all the children in the nursery to exit. This is why you have to use From 47c1850de82937b24d01438ba5dbadbe83641326 Mon Sep 17 00:00:00 2001 From: Sam McCandlish <30874603+samsamoa@users.noreply.github.com> Date: Wed, 19 Jul 2023 11:54:22 -0700 Subject: [PATCH 100/162] Fix typo in reference-core.rst Normally I wouldn't make such a trivial PR, but this is such an excellent quote that I think it deserves to be fixed. --- docs/source/reference-core.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/reference-core.rst b/docs/source/reference-core.rst index 141e128026..f571d23294 100644 --- a/docs/source/reference-core.rst +++ b/docs/source/reference-core.rst @@ -693,7 +693,7 @@ Errors in multiple child tasks ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Normally, in Python, only one thing happens at a time, which means -that only one thing can wrong at a time. Trio has no such +that only one thing can go wrong at a time. Trio has no such limitation. Consider code like:: async def broken1(): From 11fa77d88d14d2577bec94e36c56391a767e4388 Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Thu, 20 Jul 2023 19:33:52 +0900 Subject: [PATCH 101/162] Make sure to make GitHub releases (#2710) * Improve documentation to make sure to make GitHub releases --- docs/source/releasing.rst | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/docs/source/releasing.rst b/docs/source/releasing.rst index 27cee864c0..0fe51370d5 100644 --- a/docs/source/releasing.rst +++ b/docs/source/releasing.rst @@ -29,7 +29,7 @@ Things to do for releasing: - review history change - - ``git rm`` changes + - ``git rm`` the now outdated newfragments + commit @@ -53,4 +53,10 @@ Things to do for releasing: * merge the release pull request +* make a GitHub release (go to the tag and press "Create release from tag") + + + paste in the new content in ``history.rst`` and convert it to markdown: turn the parts under section into ``---``, update links to just be the links, and whatever else is necessary. + + + include anything else that might be pertinent, like a link to the commits between the latest and current release. + * announce on gitter From 00816bf53330c5b4cf65d1229f5248d6ae86ba88 Mon Sep 17 00:00:00 2001 From: jakkdl Date: Thu, 20 Jul 2023 13:01:38 +0200 Subject: [PATCH 102/162] update emphasized lines --- docs/source/reference-core.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/reference-core.rst b/docs/source/reference-core.rst index 141e128026..b434a13273 100644 --- a/docs/source/reference-core.rst +++ b/docs/source/reference-core.rst @@ -1167,7 +1167,7 @@ the previous version, and then exits cleanly. The only change is the addition of ``async with`` blocks inside the producer and consumer: .. literalinclude:: reference-core/channels-shutdown.py - :emphasize-lines: 10,15 + :emphasize-lines: 11,17 The really important thing here is the producer's ``async with`` . When the producer exits, this closes the ``send_channel``, and that @@ -1246,7 +1246,7 @@ Fortunately, there's a better way! Here's a fixed version of our program above: .. literalinclude:: reference-core/channels-mpmc-fixed.py - :emphasize-lines: 7, 9, 10, 12, 13 + :emphasize-lines: 8, 10, 11, 13, 14 This example demonstrates using the `MemorySendChannel.clone` and `MemoryReceiveChannel.clone` methods. What these do is create copies From 748adffac8c2c9a701d9f5c2b3b4c01d5f7b8b7a Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Fri, 21 Jul 2023 10:27:37 +0000 Subject: [PATCH 103/162] typecheck trio/_abc.py (#2703) * typecheck trio/_abc.py --- docs/source/conf.py | 3 ++ pyproject.toml | 7 +++++ trio/_abc.py | 54 ++++++++++++++++++++++++++--------- trio/_tests/verify_types.json | 26 ++++------------- 4 files changed, 56 insertions(+), 34 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 68a5a22a81..cc9f63c4c9 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -63,6 +63,9 @@ ("py:obj", "trio._abc.T"), ("py:obj", "trio._abc.T_resource"), ("py:class", "types.FrameType"), + # these are not defined in https://docs.python.org/3/objects.inv + ("py:class", "socket.AddressFamily"), + ("py:class", "socket.SocketKind"), ] autodoc_inherit_docstrings = False default_role = "obj" diff --git a/pyproject.toml b/pyproject.toml index 5b5e56ff2e..280ba165b4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,13 @@ disallow_untyped_defs = false # DO NOT use `ignore_errors`; it doesn't apply # downstream and users have to deal with them. +[[tool.mypy.overrides]] +module = [ + "trio._abc" +] +disallow_incomplete_defs = true +disallow_untyped_defs = true + [tool.pytest.ini_options] addopts = ["--strict-markers", "--strict-config"] faulthandler_timeout = 60 diff --git a/trio/_abc.py b/trio/_abc.py index 2a1721db13..59454b794c 100644 --- a/trio/_abc.py +++ b/trio/_abc.py @@ -1,5 +1,6 @@ from __future__ import annotations +import socket from abc import ABCMeta, abstractmethod from typing import TYPE_CHECKING, Generic, TypeVar @@ -10,6 +11,10 @@ from typing_extensions import Self + # both of these introduce circular imports if outside a TYPE_CHECKING guard + from ._socket import _SocketType + from .lowlevel import Task + # We use ABCMeta instead of ABC, plus set __slots__=(), so as not to force a # __dict__ onto subclasses. @@ -73,13 +78,13 @@ class Instrument(metaclass=ABCMeta): __slots__ = () - def before_run(self): + def before_run(self) -> None: """Called at the beginning of :func:`trio.run`.""" - def after_run(self): + def after_run(self) -> None: """Called just before :func:`trio.run` returns.""" - def task_spawned(self, task): + def task_spawned(self, task: Task) -> None: """Called when the given task is created. Args: @@ -87,7 +92,7 @@ def task_spawned(self, task): """ - def task_scheduled(self, task): + def task_scheduled(self, task: Task) -> None: """Called when the given task becomes runnable. It may still be some time before it actually runs, if there are other @@ -98,7 +103,7 @@ def task_scheduled(self, task): """ - def before_task_step(self, task): + def before_task_step(self, task: Task) -> None: """Called immediately before we resume running the given task. Args: @@ -106,7 +111,7 @@ def before_task_step(self, task): """ - def after_task_step(self, task): + def after_task_step(self, task: Task) -> None: """Called when we return to the main run loop after a task has yielded. Args: @@ -114,7 +119,7 @@ def after_task_step(self, task): """ - def task_exited(self, task): + def task_exited(self, task: Task) -> None: """Called when the given task exits. Args: @@ -122,7 +127,7 @@ def task_exited(self, task): """ - def before_io_wait(self, timeout): + def before_io_wait(self, timeout: float) -> None: """Called before blocking to wait for I/O readiness. Args: @@ -130,7 +135,7 @@ def before_io_wait(self, timeout): """ - def after_io_wait(self, timeout): + def after_io_wait(self, timeout: float) -> None: """Called after handling pending I/O. Args: @@ -152,7 +157,23 @@ class HostnameResolver(metaclass=ABCMeta): __slots__ = () @abstractmethod - async def getaddrinfo(self, host, port, family=0, type=0, proto=0, flags=0): + async def getaddrinfo( + self, + host: bytes | str | None, + port: bytes | str | int | None, + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + socket.AddressFamily, + socket.SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: """A custom implementation of :func:`~trio.socket.getaddrinfo`. Called by :func:`trio.socket.getaddrinfo`. @@ -169,7 +190,9 @@ async def getaddrinfo(self, host, port, family=0, type=0, proto=0, flags=0): """ @abstractmethod - async def getnameinfo(self, sockaddr, flags): + async def getnameinfo( + self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int + ) -> tuple[str, str]: """A custom implementation of :func:`~trio.socket.getnameinfo`. Called by :func:`trio.socket.getnameinfo`. @@ -186,7 +209,12 @@ class SocketFactory(metaclass=ABCMeta): """ @abstractmethod - def socket(self, family=None, type=None, proto=None): + def socket( + self, + family: socket.AddressFamily | int | None = None, + type: socket.SocketKind | int | None = None, + proto: int | None = None, + ) -> _SocketType: """Create and return a socket object. Your socket object must inherit from :class:`trio.socket.SocketType`, @@ -537,7 +565,7 @@ class Listener(AsyncResource, Generic[T_resource]): __slots__ = () @abstractmethod - async def accept(self): + async def accept(self) -> AsyncResource: """Wait until an incoming connection arrives, and then return it. Returns: diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index 9d7d7aa912..c506eb6d8f 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.8764044943820225, + "completenessScore": 0.884430176565008, "exportedSymbolCounts": { "withAmbiguousType": 1, - "withKnownType": 546, - "withUnknownType": 76 + "withKnownType": 551, + "withUnknownType": 71 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -46,24 +46,12 @@ ], "otherSymbolCounts": { "withAmbiguousType": 8, - "withKnownType": 433, - "withUnknownType": 135 + "withKnownType": 448, + "withUnknownType": 120 }, "packageName": "trio", "symbols": [ "trio.__deprecated_attributes__", - "trio._abc.HostnameResolver.getaddrinfo", - "trio._abc.HostnameResolver.getnameinfo", - "trio._abc.Instrument.after_io_wait", - "trio._abc.Instrument.after_run", - "trio._abc.Instrument.after_task_step", - "trio._abc.Instrument.before_io_wait", - "trio._abc.Instrument.before_run", - "trio._abc.Instrument.before_task_step", - "trio._abc.Instrument.task_exited", - "trio._abc.Instrument.task_scheduled", - "trio._abc.Instrument.task_spawned", - "trio._abc.Listener.accept", "trio._abc.SocketFactory.socket", "trio._core._entry_queue.TrioToken.run_sync_soon", "trio._core._local.RunVar.__repr__", @@ -97,7 +85,6 @@ "trio._dtls.DTLSEndpoint.incoming_packets_buffer", "trio._dtls.DTLSEndpoint.serve", "trio._dtls.DTLSEndpoint.socket", - "trio._highlevel_socket.SocketListener", "trio._highlevel_socket.SocketListener.__init__", "trio._highlevel_socket.SocketStream.__init__", "trio._highlevel_socket.SocketStream.getsockopt", @@ -128,7 +115,6 @@ "trio._socket._SocketType.send", "trio._socket._SocketType.sendmsg", "trio._socket._SocketType.sendto", - "trio._ssl.SSLListener", "trio._ssl.SSLListener.__init__", "trio._ssl.SSLListener.accept", "trio._ssl.SSLListener.aclose", @@ -160,7 +146,6 @@ "trio.current_time", "trio.from_thread.run", "trio.from_thread.run_sync", - "trio.lowlevel.add_instrument", "trio.lowlevel.cancel_shielded_checkpoint", "trio.lowlevel.current_clock", "trio.lowlevel.current_root_task", @@ -171,7 +156,6 @@ "trio.lowlevel.open_process", "trio.lowlevel.permanently_detach_coroutine_object", "trio.lowlevel.reattach_detached_coroutine_object", - "trio.lowlevel.remove_instrument", "trio.lowlevel.reschedule", "trio.lowlevel.spawn_system_task", "trio.lowlevel.start_guest_run", From a9a9170a0142ccf865ceaff10bd3ac0b4f30911e Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Fri, 21 Jul 2023 11:06:23 +0000 Subject: [PATCH 104/162] set merge strategy for verify_types.json in .gitattributes (#2713) --- .gitattributes | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitattributes b/.gitattributes index 991065e069..7fbcb4fe2d 100644 --- a/.gitattributes +++ b/.gitattributes @@ -2,3 +2,5 @@ trio/_core/_generated* linguist-generated=true # Treat generated files as binary in git diff trio/_core/_generated* -diff +# don't merge the generated json file, let the user (script) handle it +trio/_tests/verify_types.json merge=binary From 15ddd50afb7499170271885df23c1fe5d8ee0771 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Wed, 26 Jul 2023 11:26:10 +0000 Subject: [PATCH 105/162] typecheck trio/_dtls.py (#2704) * typecheck trio/_dtls.py --- docs-requirements.in | 3 + docs-requirements.txt | 8 ++ docs/source/conf.py | 4 + docs/source/reference-core.rst | 2 + docs/source/reference-io.rst | 3 + pyproject.toml | 4 + test-requirements.in | 2 +- trio/__init__.py | 7 +- trio/_channel.py | 10 +- trio/_core/__init__.py | 1 + trio/_core/_run.py | 8 +- trio/_dtls.py | 221 ++++++++++++++++++++------------- trio/_socket.py | 9 +- trio/_tests/verify_types.json | 27 ++-- 14 files changed, 194 insertions(+), 115 deletions(-) diff --git a/docs-requirements.in b/docs-requirements.in index 98d5030bc5..d6214ec1d0 100644 --- a/docs-requirements.in +++ b/docs-requirements.in @@ -19,3 +19,6 @@ exceptiongroup >= 1.0.0rc9 # See note in test-requirements.in immutables >= 0.6 + +# types used in annotations +pyOpenSSL diff --git a/docs-requirements.txt b/docs-requirements.txt index 1c1b89e873..9f7794f2e8 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -16,6 +16,8 @@ babel==2.12.1 # via sphinx certifi==2023.5.7 # via requests +cffi==1.15.1 + # via cryptography charset-normalizer==3.2.0 # via requests click==8.1.5 @@ -24,6 +26,8 @@ click==8.1.5 # towncrier click-default-group==1.2.2 # via towncrier +cryptography==41.0.2 + # via pyopenssl docutils==0.18.1 # via # sphinx @@ -55,8 +59,12 @@ outcome==1.2.0 # via -r docs-requirements.in packaging==23.1 # via sphinx +pycparser==2.21 + # via cffi pygments==2.15.1 # via sphinx +pyopenssl==23.2.0 + # via -r docs-requirements.in pytz==2023.3 # via babel requests==2.31.0 diff --git a/docs/source/conf.py b/docs/source/conf.py index cc9f63c4c9..91ce7d884c 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -63,6 +63,10 @@ ("py:obj", "trio._abc.T"), ("py:obj", "trio._abc.T_resource"), ("py:class", "types.FrameType"), + # TODO: figure out if you can link this to SSL + ("py:class", "Context"), + # TODO: temporary type + ("py:class", "_SocketType"), # these are not defined in https://docs.python.org/3/objects.inv ("py:class", "socket.AddressFamily"), ("py:class", "socket.SocketKind"), diff --git a/docs/source/reference-core.rst b/docs/source/reference-core.rst index 26554af558..980a3106e5 100644 --- a/docs/source/reference-core.rst +++ b/docs/source/reference-core.rst @@ -922,6 +922,8 @@ The nursery API See :meth:`~Nursery.start`. +.. autoclass:: TaskStatus + :members: .. _task-local-storage: diff --git a/docs/source/reference-io.rst b/docs/source/reference-io.rst index a3291ef2ae..9ad11b2c5a 100644 --- a/docs/source/reference-io.rst +++ b/docs/source/reference-io.rst @@ -304,6 +304,9 @@ unfortunately that's not yet possible. .. automethod:: statistics +.. autoclass:: DTLSChannelStatistics + :members: + .. module:: trio.socket Low-level networking with :mod:`trio.socket` diff --git a/pyproject.toml b/pyproject.toml index 280ba165b4..3b14a075da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,10 +46,14 @@ disallow_untyped_defs = false [[tool.mypy.overrides]] module = [ + "trio._dtls", "trio._abc" ] disallow_incomplete_defs = true disallow_untyped_defs = true +disallow_any_generics = true +disallow_any_decorated = true +disallow_subclassing_any = true [tool.pytest.ini_options] addopts = ["--strict-markers", "--strict-config"] diff --git a/test-requirements.in b/test-requirements.in index 03997ad2e7..23f128f99a 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -13,7 +13,7 @@ cryptography>=41.0.0 # cryptography<41 segfaults on pypy3.10 # Tools black; implementation_name == "cpython" mypy; implementation_name == "cpython" -types-pyOpenSSL; implementation_name == "cpython" +types-pyOpenSSL; implementation_name == "cpython" # and annotations flake8 flake8-pyproject astor # code generation diff --git a/trio/__init__.py b/trio/__init__.py index 2b8810504b..ac0687f529 100644 --- a/trio/__init__.py +++ b/trio/__init__.py @@ -34,6 +34,7 @@ EndOfChannel as EndOfChannel, Nursery as Nursery, RunFinishedError as RunFinishedError, + TaskStatus as TaskStatus, TrioInternalError as TrioInternalError, WouldBlock as WouldBlock, current_effective_deadline as current_effective_deadline, @@ -46,7 +47,11 @@ NonBaseMultiError as _NonBaseMultiError, ) from ._deprecate import TrioDeprecationWarning as TrioDeprecationWarning -from ._dtls import DTLSChannel as DTLSChannel, DTLSEndpoint as DTLSEndpoint +from ._dtls import ( + DTLSChannel as DTLSChannel, + DTLSChannelStatistics as DTLSChannelStatistics, + DTLSEndpoint as DTLSEndpoint, +) from ._file_io import open_file as open_file, wrap_file as wrap_file from ._highlevel_generic import ( StapledStream as StapledStream, diff --git a/trio/_channel.py b/trio/_channel.py index f77950c4e4..c8d27695b8 100644 --- a/trio/_channel.py +++ b/trio/_channel.py @@ -20,7 +20,7 @@ def _open_memory_channel( - max_buffer_size: int, + max_buffer_size: int | float, ) -> tuple[MemorySendChannel[T], MemoryReceiveChannel[T]]: """Open a channel for passing objects between tasks within a process. @@ -92,11 +92,11 @@ def _open_memory_channel( # Need to use Tuple instead of tuple due to CI check running on 3.8 class open_memory_channel(Tuple[MemorySendChannel[T], MemoryReceiveChannel[T]]): def __new__( # type: ignore[misc] # "must return a subtype" - cls, max_buffer_size: int + cls, max_buffer_size: int | float ) -> tuple[MemorySendChannel[T], MemoryReceiveChannel[T]]: return _open_memory_channel(max_buffer_size) - def __init__(self, max_buffer_size: int): + def __init__(self, max_buffer_size: int | float): ... else: @@ -108,7 +108,7 @@ def __init__(self, max_buffer_size: int): @attr.s(frozen=True, slots=True) class MemoryChannelStats: current_buffer_used: int = attr.ib() - max_buffer_size: int = attr.ib() + max_buffer_size: int | float = attr.ib() open_send_channels: int = attr.ib() open_receive_channels: int = attr.ib() tasks_waiting_send: int = attr.ib() @@ -117,7 +117,7 @@ class MemoryChannelStats: @attr.s(slots=True) class MemoryChannelState(Generic[T]): - max_buffer_size: int = attr.ib() + max_buffer_size: int | float = attr.ib() data: deque[T] = attr.ib(factory=deque) # Counts of open endpoints using this state open_send_channels: int = attr.ib(default=0) diff --git a/trio/_core/__init__.py b/trio/_core/__init__.py index abd58245e3..aa898fffe0 100644 --- a/trio/_core/__init__.py +++ b/trio/_core/__init__.py @@ -28,6 +28,7 @@ CancelScope, Nursery, Task, + TaskStatus, add_instrument, checkpoint, checkpoint_if_cancelled, diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 4f90889c5f..ce8feb2827 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -783,7 +783,7 @@ def cancel_called(self) -> bool: # This code needs to be read alongside the code from Nursery.start to make # sense. @attr.s(eq=False, hash=False, repr=False) -class _TaskStatus: +class TaskStatus(metaclass=Final): _old_nursery = attr.ib() _new_nursery = attr.ib() _called_started = attr.ib(default=False) @@ -1137,16 +1137,16 @@ async def async_fn(arg1, arg2, *, task_status=trio.TASK_STATUS_IGNORED): try: self._pending_starts += 1 async with open_nursery() as old_nursery: - task_status = _TaskStatus(old_nursery, self) + task_status = TaskStatus(old_nursery, self) thunk = functools.partial(async_fn, task_status=task_status) task = GLOBAL_RUN_CONTEXT.runner.spawn_impl( thunk, args, old_nursery, name ) task._eventual_parent_nursery = self - # Wait for either _TaskStatus.started or an exception to + # Wait for either TaskStatus.started or an exception to # cancel this nursery: # If we get here, then the child either got reparented or exited - # normally. The complicated logic is all in _TaskStatus.started(). + # normally. The complicated logic is all in TaskStatus.started(). # (Any exceptions propagate directly out of the above.) if not task_status._called_started: raise RuntimeError("child exited without calling task_status.started()") diff --git a/trio/_dtls.py b/trio/_dtls.py index 722a9499f8..8675cb75b6 100644 --- a/trio/_dtls.py +++ b/trio/_dtls.py @@ -16,34 +16,53 @@ import warnings import weakref from itertools import count -from typing import TYPE_CHECKING +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Generic, + Iterable, + Iterator, + TypeVar, + Union, +) +from weakref import ReferenceType, WeakValueDictionary import attr +from OpenSSL import SSL import trio -from trio._util import Final, NoPublicConstructor + +from ._util import Final, NoPublicConstructor if TYPE_CHECKING: from types import TracebackType + from OpenSSL.SSL import Context + from typing_extensions import Self, TypeAlias + + from ._core._run import TaskStatus + from ._socket import Address, _SocketType + MAX_UDP_PACKET_SIZE = 65527 -def packet_header_overhead(sock): +def packet_header_overhead(sock: _SocketType) -> int: if sock.family == trio.socket.AF_INET: return 28 else: return 48 -def worst_case_mtu(sock): +def worst_case_mtu(sock: _SocketType) -> int: if sock.family == trio.socket.AF_INET: return 576 - packet_header_overhead(sock) else: return 1280 - packet_header_overhead(sock) -def best_guess_mtu(sock): +def best_guess_mtu(sock: _SocketType) -> int: return 1500 - packet_header_overhead(sock) @@ -105,14 +124,14 @@ class BadPacket(Exception): # ChangeCipherSpec is used during the handshake but has its own ContentType. # # Cannot fail. -def part_of_handshake_untrusted(packet): +def part_of_handshake_untrusted(packet: bytes) -> bool: # If the packet is too short, then slicing will successfully return a # short string, which will necessarily fail to match. return packet[3:5] == b"\x00\x00" # Cannot fail -def is_client_hello_untrusted(packet): +def is_client_hello_untrusted(packet: bytes) -> bool: try: return ( packet[0] == ContentType.handshake @@ -147,7 +166,7 @@ class Record: payload: bytes = attr.ib(repr=to_hex) -def records_untrusted(packet): +def records_untrusted(packet: bytes) -> Iterator[Record]: i = 0 while i < len(packet): try: @@ -165,7 +184,7 @@ def records_untrusted(packet): yield Record(ct, version, epoch_seqno, payload) -def encode_record(record): +def encode_record(record: Record) -> bytes: header = RECORD_HEADER.pack( record.content_type, record.version, @@ -194,7 +213,7 @@ class HandshakeFragment: frag: bytes = attr.ib(repr=to_hex) -def decode_handshake_fragment_untrusted(payload): +def decode_handshake_fragment_untrusted(payload: bytes) -> HandshakeFragment: # Raises BadPacket if decoding fails try: ( @@ -224,7 +243,7 @@ def decode_handshake_fragment_untrusted(payload): ) -def encode_handshake_fragment(hsf): +def encode_handshake_fragment(hsf: HandshakeFragment) -> bytes: hs_header = HANDSHAKE_MESSAGE_HEADER.pack( hsf.msg_type, hsf.msg_len.to_bytes(3, "big"), @@ -235,7 +254,7 @@ def encode_handshake_fragment(hsf): return hs_header + hsf.frag -def decode_client_hello_untrusted(packet): +def decode_client_hello_untrusted(packet: bytes) -> tuple[int, bytes, bytes]: # Raises BadPacket if parsing fails # Returns (record epoch_seqno, cookie from the packet, data that should be # hashed into cookie) @@ -331,12 +350,19 @@ class OpaqueHandshakeMessage: record: Record +_AnyHandshakeMessage: TypeAlias = Union[ + HandshakeMessage, PseudoHandshakeMessage, OpaqueHandshakeMessage +] + + # This takes a raw outgoing handshake volley that openssl generated, and # reconstructs the handshake messages inside it, so that we can repack them # into records while retransmitting. So the data ought to be well-behaved -- # it's not coming from the network. -def decode_volley_trusted(volley): - messages = [] +def decode_volley_trusted( + volley: bytes, +) -> list[_AnyHandshakeMessage]: + messages: list[_AnyHandshakeMessage] = [] messages_by_seq = {} for record in records_untrusted(volley): # ChangeCipherSpec isn't a handshake message, so it can't be fragmented. @@ -380,13 +406,17 @@ def decode_volley_trusted(volley): class RecordEncoder: - def __init__(self): + def __init__(self) -> None: self._record_seq = count() - def set_first_record_number(self, n): + def set_first_record_number(self, n: int) -> None: self._record_seq = count(n) - def encode_volley(self, messages, mtu): + def encode_volley( + self, + messages: Iterable[_AnyHandshakeMessage], + mtu: int, + ) -> list[bytearray]: packets = [] packet = bytearray() for message in messages: @@ -518,13 +548,13 @@ def encode_volley(self, messages, mtu): COOKIE_LENGTH = 32 -def _current_cookie_tick(): +def _current_cookie_tick() -> int: return int(trio.current_time() / COOKIE_REFRESH_INTERVAL) # Simple deterministic and invertible serializer -- i.e., a useful tool for converting # structured data into something we can cryptographically sign. -def _signable(*fields): +def _signable(*fields: bytes) -> bytes: out = [] for field in fields: out.append(struct.pack("!Q", len(field))) @@ -532,7 +562,9 @@ def _signable(*fields): return b"".join(out) -def _make_cookie(key, salt, tick, address, client_hello_bits): +def _make_cookie( + key: bytes, salt: bytes, tick: int, address: Address, client_hello_bits: bytes +) -> bytes: assert len(salt) == SALT_BYTES assert len(key) == KEY_BYTES @@ -548,7 +580,9 @@ def _make_cookie(key, salt, tick, address, client_hello_bits): return (salt + hmac.digest(key, signable_data, COOKIE_HASH))[:COOKIE_LENGTH] -def valid_cookie(key, cookie, address, client_hello_bits): +def valid_cookie( + key: bytes, cookie: bytes, address: Address, client_hello_bits: bytes +) -> bool: if len(cookie) > SALT_BYTES: salt = cookie[:SALT_BYTES] @@ -568,7 +602,9 @@ def valid_cookie(key, cookie, address, client_hello_bits): return False -def challenge_for(key, address, epoch_seqno, client_hello_bits): +def challenge_for( + key: bytes, address: Address, epoch_seqno: int, client_hello_bits: bytes +) -> bytes: salt = os.urandom(SALT_BYTES) tick = _current_cookie_tick() cookie = _make_cookie(key, salt, tick, address, client_hello_bits) @@ -608,12 +644,15 @@ def challenge_for(key, address, epoch_seqno, client_hello_bits): return packet -class _Queue: - def __init__(self, incoming_packets_buffer): - self.s, self.r = trio.open_memory_channel(incoming_packets_buffer) +_T = TypeVar("_T") -def _read_loop(read_fn): +class _Queue(Generic[_T]): + def __init__(self, incoming_packets_buffer: int | float): + self.s, self.r = trio.open_memory_channel[_T](incoming_packets_buffer) + + +def _read_loop(read_fn: Callable[[int], bytes]) -> bytes: chunks = [] while True: try: @@ -624,7 +663,9 @@ def _read_loop(read_fn): return b"".join(chunks) -async def handle_client_hello_untrusted(endpoint, address, packet): +async def handle_client_hello_untrusted( + endpoint: DTLSEndpoint, address: Address, packet: bytes +) -> None: if endpoint._listening_context is None: return @@ -697,7 +738,9 @@ async def handle_client_hello_untrusted(endpoint, address, packet): endpoint._incoming_connections_q.s.send_nowait(stream) -async def dtls_receive_loop(endpoint_ref, sock): +async def dtls_receive_loop( + endpoint_ref: ReferenceType[DTLSEndpoint], sock: _SocketType +) -> None: try: while True: try: @@ -732,7 +775,8 @@ async def dtls_receive_loop(endpoint_ref, sock): await stream._resend_final_volley() else: try: - stream._q.s.send_nowait(packet) + # mypy for some reason cannot determine type of _q + stream._q.s.send_nowait(packet) # type:ignore[has-type] except trio.WouldBlock: stream._packets_dropped_in_trio += 1 else: @@ -754,6 +798,17 @@ async def dtls_receive_loop(endpoint_ref, sock): @attr.frozen class DTLSChannelStatistics: + """Currently this has only one attribute: + + - ``incoming_packets_dropped_in_trio`` (``int``): Gives a count of the number of + incoming packets from this peer that Trio successfully received from the + network, but then got dropped because the internal channel buffer was full. If + this is non-zero, then you might want to call ``receive`` more often, or use a + larger ``incoming_packets_buffer``, or just not worry about it because your + UDP-based protocol should be able to handle the occasional lost packet, right? + + """ + incoming_packets_dropped_in_trio: int @@ -773,7 +828,7 @@ class DTLSChannel(trio.abc.Channel[bytes], metaclass=NoPublicConstructor): """ - def __init__(self, endpoint, peer_address, ctx): + def __init__(self, endpoint: DTLSEndpoint, peer_address: Address, ctx: Context): self.endpoint = endpoint self.peer_address = peer_address self._packets_dropped_in_trio = 0 @@ -784,25 +839,32 @@ def __init__(self, endpoint, peer_address, ctx): # OP_NO_RENEGOTIATION disables renegotiation, which is too complex for us to # support and isn't useful anyway -- especially for DTLS where it's equivalent # to just performing a new handshake. - ctx.set_options(SSL.OP_NO_QUERY_MTU | SSL.OP_NO_RENEGOTIATION) + ctx.set_options( + ( + SSL.OP_NO_QUERY_MTU + | SSL.OP_NO_RENEGOTIATION # type: ignore[attr-defined] + ) + ) self._ssl = SSL.Connection(ctx) - self._handshake_mtu = None + self._handshake_mtu = 0 # This calls self._ssl.set_ciphertext_mtu, which is important, because if you # don't call it then openssl doesn't work. self.set_ciphertext_mtu(best_guess_mtu(self.endpoint.socket)) self._replaced = False self._closed = False - self._q = _Queue(endpoint.incoming_packets_buffer) + self._q = _Queue[bytes](endpoint.incoming_packets_buffer) self._handshake_lock = trio.Lock() - self._record_encoder = RecordEncoder() + self._record_encoder: RecordEncoder = RecordEncoder() + + self._final_volley: list[_AnyHandshakeMessage] = [] - def _set_replaced(self): + def _set_replaced(self) -> None: self._replaced = True # Any packets we already received could maybe possibly still be processed, but # there are no more coming. So we close this on the sender side. self._q.s.close() - def _check_replaced(self): + def _check_replaced(self) -> None: if self._replaced: raise trio.BrokenResourceError( "peer tore down this connection to start a new one" @@ -836,7 +898,7 @@ def close(self) -> None: # ClosedResourceError self._q.r.close() - def __enter__(self): + def __enter__(self) -> Self: return self def __exit__( @@ -847,7 +909,7 @@ def __exit__( ) -> None: return self.close() - async def aclose(self): + async def aclose(self) -> None: """Close this connection, but asynchronously. This is included to satisfy the `trio.abc.Channel` contract. It's @@ -857,7 +919,7 @@ async def aclose(self): self.close() await trio.lowlevel.checkpoint() - async def _send_volley(self, volley_messages): + async def _send_volley(self, volley_messages: list[_AnyHandshakeMessage]) -> None: packets = self._record_encoder.encode_volley( volley_messages, self._handshake_mtu ) @@ -865,10 +927,10 @@ async def _send_volley(self, volley_messages): async with self.endpoint._send_lock: await self.endpoint.socket.sendto(packet, self.peer_address) - async def _resend_final_volley(self): + async def _resend_final_volley(self) -> None: await self._send_volley(self._final_volley) - async def do_handshake(self, *, initial_retransmit_timeout=1.0): + async def do_handshake(self, *, initial_retransmit_timeout: float = 1.0) -> None: """Perform the handshake. Calling this is optional – if you don't, then it will be automatically called @@ -901,16 +963,17 @@ async def do_handshake(self, *, initial_retransmit_timeout=1.0): return timeout = initial_retransmit_timeout - volley_messages = [] + volley_messages: list[_AnyHandshakeMessage] = [] volley_failed_sends = 0 - def read_volley(): + def read_volley() -> list[_AnyHandshakeMessage]: volley_bytes = _read_loop(self._ssl.bio_read) new_volley_messages = decode_volley_trusted(volley_bytes) if ( new_volley_messages and volley_messages and isinstance(new_volley_messages[0], HandshakeMessage) + and isinstance(volley_messages[0], HandshakeMessage) and new_volley_messages[0].msg_seq == volley_messages[0].msg_seq ): # openssl decided to retransmit; discard because we handle @@ -998,7 +1061,7 @@ def read_volley(): self._handshake_mtu, worst_case_mtu(self.endpoint.socket) ) - async def send(self, data): + async def send(self, data: bytes) -> None: """Send a packet of data, securely.""" if self._closed: @@ -1014,7 +1077,7 @@ async def send(self, data): _read_loop(self._ssl.bio_read), self.peer_address ) - async def receive(self): + async def receive(self) -> bytes: """Fetch the next packet of data from this connection's peer, waiting if necessary. @@ -1040,7 +1103,7 @@ async def receive(self): if cleartext: return cleartext - def set_ciphertext_mtu(self, new_mtu): + def set_ciphertext_mtu(self, new_mtu: int) -> None: """Tells Trio the `largest amount of data that can be sent in a single packet to this peer `__. @@ -1075,7 +1138,7 @@ def set_ciphertext_mtu(self, new_mtu): self._handshake_mtu = new_mtu self._ssl.set_ciphertext_mtu(new_mtu) - def get_cleartext_mtu(self): + def get_cleartext_mtu(self) -> int: """Returns the largest number of bytes that you can pass in a single call to `send` while still fitting within the network-level MTU. @@ -1084,21 +1147,10 @@ def get_cleartext_mtu(self): """ if not self._did_handshake: raise trio.NeedHandshakeError - return self._ssl.get_cleartext_mtu() - - def statistics(self): - """Returns an object with statistics about this connection. - - Currently this has only one attribute: - - - ``incoming_packets_dropped_in_trio`` (``int``): Gives a count of the number of - incoming packets from this peer that Trio successfully received from the - network, but then got dropped because the internal channel buffer was full. If - this is non-zero, then you might want to call ``receive`` more often, or use a - larger ``incoming_packets_buffer``, or just not worry about it because your - UDP-based protocol should be able to handle the occasional lost packet, right? + return self._ssl.get_cleartext_mtu() # type: ignore[no-any-return] - """ + def statistics(self) -> DTLSChannelStatistics: + """Returns a `DTLSChannelStatistics` object with statistics about this connection.""" return DTLSChannelStatistics(self._packets_dropped_in_trio) @@ -1126,18 +1178,18 @@ class DTLSEndpoint(metaclass=Final): """ - def __init__(self, socket, *, incoming_packets_buffer=10): + def __init__(self, socket: _SocketType, *, incoming_packets_buffer: int = 10): # We do this lazily on first construction, so only people who actually use DTLS # have to install PyOpenSSL. global SSL from OpenSSL import SSL - # TODO: create a `self._initialized` for `__del__`, so self.socket can be typed - # as trio.socket.SocketType and `is not None` checks can be removed. - self.socket = None # for __del__, in case the next line raises + # for __del__, in case the next line raises + self._initialized: bool = False if socket.type != trio.socket.SOCK_DGRAM: raise ValueError("DTLS requires a SOCK_DGRAM socket") - self.socket = socket + self._initialized = True + self.socket: _SocketType = socket self.incoming_packets_buffer = incoming_packets_buffer self._token = trio.lowlevel.current_trio_token() @@ -1146,15 +1198,15 @@ def __init__(self, socket, *, incoming_packets_buffer=10): # as a peer provides a valid cookie, we can immediately tear down the # old connection. # {remote address: DTLSChannel} - self._streams = weakref.WeakValueDictionary() - self._listening_context = None - self._listening_key = None - self._incoming_connections_q = _Queue(float("inf")) + self._streams: WeakValueDictionary[Address, DTLSChannel] = WeakValueDictionary() + self._listening_context: Context | None = None + self._listening_key: bytes | None = None + self._incoming_connections_q = _Queue[DTLSChannel](float("inf")) self._send_lock = trio.Lock() self._closed = False self._receive_loop_spawned = False - def _ensure_receive_loop(self): + def _ensure_receive_loop(self) -> None: # We have to spawn this lazily, because on Windows it will immediately error out # if the socket isn't already bound -- which for clients might not happen until # after we send our first packet. @@ -1164,9 +1216,9 @@ def _ensure_receive_loop(self): ) self._receive_loop_spawned = True - def __del__(self): + def __del__(self) -> None: # Do nothing if this object was never fully constructed - if self.socket is None: + if not self._initialized: return # Close the socket in Trio context (if our Trio context still exists), so that # the background task gets notified about the closure and can exit. @@ -1186,17 +1238,13 @@ def close(self) -> None: This object can also be used as a context manager. """ - # Do nothing if this object was never fully constructed - if self.socket is None: # pragma: no cover - return - self._closed = True self.socket.close() for stream in list(self._streams.values()): stream.close() self._incoming_connections_q.s.close() - def __enter__(self): + def __enter__(self) -> Self: return self def __exit__( @@ -1207,13 +1255,20 @@ def __exit__( ) -> None: return self.close() - def _check_closed(self): + def _check_closed(self) -> None: if self._closed: raise trio.ClosedResourceError + # async_fn cannot be typed with ParamSpec, since we don't accept + # kwargs. Can be typed with TypeVarTuple once it's fully supported + # in mypy. async def serve( - self, ssl_context, async_fn, *args, task_status=trio.TASK_STATUS_IGNORED - ): + self, + ssl_context: Context, + async_fn: Callable[..., Awaitable[object]], + *args: Any, + task_status: TaskStatus = trio.TASK_STATUS_IGNORED, # type: ignore[has-type] + ) -> None: """Listen for incoming connections, and spawn a handler for each using an internal nursery. @@ -1257,7 +1312,7 @@ async def handler(dtls_channel): self._listening_context = ssl_context task_status.started() - async def handler_wrapper(stream): + async def handler_wrapper(stream: DTLSChannel) -> None: with stream: await async_fn(stream, *args) @@ -1267,7 +1322,7 @@ async def handler_wrapper(stream): finally: self._listening_context = None - def connect(self, address, ssl_context): + def connect(self, address: tuple[str, int], ssl_context: Context) -> DTLSChannel: """Initiate an outgoing DTLS connection. Notice that this is a synchronous method. That's because it doesn't actually diff --git a/trio/_socket.py b/trio/_socket.py index 659f844078..26b03fc3e0 100644 --- a/trio/_socket.py +++ b/trio/_socket.py @@ -5,7 +5,7 @@ import socket as _stdlib_socket import sys from functools import wraps as _wraps -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Tuple, Union import idna as _idna @@ -17,7 +17,12 @@ from collections.abc import Iterable from types import TracebackType - from typing_extensions import Self + from typing_extensions import Self, TypeAlias + +# must use old-style typing because it's evaluated at runtime +Address: TypeAlias = Union[ + str, bytes, Tuple[str, int], Tuple[str, int, int], Tuple[str, int, int, int] +] # Usage: diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index c506eb6d8f..ba26a34e9f 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.884430176565008, + "completenessScore": 0.8832, "exportedSymbolCounts": { "withAmbiguousType": 1, - "withKnownType": 551, - "withUnknownType": 71 + "withKnownType": 552, + "withUnknownType": 72 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -45,9 +45,9 @@ } ], "otherSymbolCounts": { - "withAmbiguousType": 8, - "withKnownType": 448, - "withUnknownType": 120 + "withAmbiguousType": 6, + "withKnownType": 475, + "withUnknownType": 114 }, "packageName": "trio", "symbols": [ @@ -61,6 +61,8 @@ "trio._core._mock_clock.MockClock.jump", "trio._core._run.Nursery.start", "trio._core._run.Nursery.start_soon", + "trio._core._run.TaskStatus.__repr__", + "trio._core._run.TaskStatus.started", "trio._core._unbounded_queue.UnboundedQueue.__aiter__", "trio._core._unbounded_queue.UnboundedQueue.__anext__", "trio._core._unbounded_queue.UnboundedQueue.__repr__", @@ -69,22 +71,9 @@ "trio._core._unbounded_queue.UnboundedQueue.get_batch_nowait", "trio._core._unbounded_queue.UnboundedQueue.qsize", "trio._core._unbounded_queue.UnboundedQueue.statistics", - "trio._dtls.DTLSChannel.__enter__", "trio._dtls.DTLSChannel.__init__", - "trio._dtls.DTLSChannel.aclose", - "trio._dtls.DTLSChannel.do_handshake", - "trio._dtls.DTLSChannel.get_cleartext_mtu", - "trio._dtls.DTLSChannel.receive", - "trio._dtls.DTLSChannel.send", - "trio._dtls.DTLSChannel.set_ciphertext_mtu", - "trio._dtls.DTLSChannel.statistics", - "trio._dtls.DTLSEndpoint.__del__", - "trio._dtls.DTLSEndpoint.__enter__", "trio._dtls.DTLSEndpoint.__init__", - "trio._dtls.DTLSEndpoint.connect", - "trio._dtls.DTLSEndpoint.incoming_packets_buffer", "trio._dtls.DTLSEndpoint.serve", - "trio._dtls.DTLSEndpoint.socket", "trio._highlevel_socket.SocketListener.__init__", "trio._highlevel_socket.SocketStream.__init__", "trio._highlevel_socket.SocketStream.getsockopt", From c3e790b9e6968d5e07cd2fce822de9af12492499 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 26 Jul 2023 11:27:40 +0000 Subject: [PATCH 106/162] Bump certifi from 2023.5.7 to 2023.7.22 Bumps [certifi](https://github.com/certifi/python-certifi) from 2023.5.7 to 2023.7.22. - [Commits](https://github.com/certifi/python-certifi/compare/2023.05.07...2023.07.22) --- updated-dependencies: - dependency-name: certifi dependency-type: indirect ... Signed-off-by: dependabot[bot] --- docs-requirements.txt | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 9f7794f2e8..ce7eb68715 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -14,7 +14,7 @@ attrs==23.1.0 # outcome babel==2.12.1 # via sphinx -certifi==2023.5.7 +certifi==2023.7.22 # via requests cffi==1.15.1 # via cryptography @@ -42,10 +42,6 @@ imagesize==1.4.1 # via sphinx immutables==0.19 # via -r docs-requirements.in -importlib-metadata==6.8.0 - # via sphinx -importlib-resources==6.0.0 - # via towncrier incremental==22.10.0 # via towncrier jinja2==3.0.3 @@ -65,8 +61,6 @@ pygments==2.15.1 # via sphinx pyopenssl==23.2.0 # via -r docs-requirements.in -pytz==2023.3 - # via babel requests==2.31.0 # via sphinx sniffio==1.3.0 @@ -99,13 +93,7 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-trio==1.1.2 # via -r docs-requirements.in -tomli==2.0.1 - # via towncrier towncrier==23.6.0 # via -r docs-requirements.in urllib3==2.0.3 # via requests -zipp==3.16.2 - # via - # importlib-metadata - # importlib-resources From db89701352a2827fb64cebb9073936d83737a360 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Wed, 26 Jul 2023 12:04:24 +0000 Subject: [PATCH 107/162] add codespell to CI (#2698) --- check.sh | 2 ++ pyproject.toml | 3 +++ test-requirements.in | 1 + test-requirements.txt | 2 ++ trio/_tests/test_exports.py | 2 +- 5 files changed, 9 insertions(+), 1 deletion(-) diff --git a/check.sh b/check.sh index 8912f709e7..a0efa531b6 100755 --- a/check.sh +++ b/check.sh @@ -41,6 +41,8 @@ if git status --porcelain | grep -q "requirements.txt"; then EXIT_STATUS=1 fi +codespell || EXIT_STATUS=$? + python trio/_tests/check_type_completeness.py --overwrite-file || EXIT_STATUS=$? if git status --porcelain trio/_tests/verify_types.json | grep -q "M"; then echo "Type completeness changed, please update!" diff --git a/pyproject.toml b/pyproject.toml index 3b14a075da..264345d8c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,9 @@ [tool.black] target-version = ['py38'] +[tool.codespell] +ignore-words-list = 'astroid,crasher,asend' + [tool.flake8] extend-ignore = ['D', 'E', 'W', 'F403', 'F405', 'F821', 'F822'] per-file-ignores = [ diff --git a/test-requirements.in b/test-requirements.in index 23f128f99a..1911b1bf11 100644 --- a/test-requirements.in +++ b/test-requirements.in @@ -18,6 +18,7 @@ flake8 flake8-pyproject astor # code generation pip-tools >= 6.13.0 +codespell # https://github.com/python-trio/trio/pull/654#issuecomment-420518745 mypy-extensions; implementation_name == "cpython" diff --git a/test-requirements.txt b/test-requirements.txt index 939bf7a61e..7e0d86e62e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -28,6 +28,8 @@ click==8.1.5 # via # black # pip-tools +codespell==2.2.5 + # via -r test-requirements.in coverage==7.2.7 # via -r test-requirements.in cryptography==41.0.2 diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index 20635b0022..b5d0a44088 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -175,7 +175,7 @@ def no_underscores(symbols): if modname == "trio": static_names.add("testing") - # these are hidden behind `if sys.plaftorm != "win32" or not TYPE_CHECKING` + # these are hidden behind `if sys.platform != "win32" or not TYPE_CHECKING` # so presumably pyright is parsing that if statement, in which case we don't # care about them being missing. if modname == "trio.socket" and sys.platform == "win32": From e5454843d04830a062e7a1ea0c84a03fba7278d4 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Thu, 27 Jul 2023 16:02:49 +0000 Subject: [PATCH 108/162] Partially revert "Bump certifi from 2023.5.7 to 2023.7.22" (#2716) * Partially revert "Bump certifi from 2023.5.7 to 2023.7.22", restoring several lines that were removed from docs-requirements.txt --- docs-requirements.txt | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs-requirements.txt b/docs-requirements.txt index ce7eb68715..fabf3e901a 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -42,6 +42,10 @@ imagesize==1.4.1 # via sphinx immutables==0.19 # via -r docs-requirements.in +importlib-metadata==6.8.0 + # via sphinx +importlib-resources==6.0.0 + # via towncrier incremental==22.10.0 # via towncrier jinja2==3.0.3 @@ -61,6 +65,8 @@ pygments==2.15.1 # via sphinx pyopenssl==23.2.0 # via -r docs-requirements.in +pytz==2023.3 + # via babel requests==2.31.0 # via sphinx sniffio==1.3.0 @@ -93,7 +99,13 @@ sphinxcontrib-serializinghtml==1.1.5 # via sphinx sphinxcontrib-trio==1.1.2 # via -r docs-requirements.in +tomli==2.0.1 + # via towncrier towncrier==23.6.0 # via -r docs-requirements.in urllib3==2.0.3 # via requests +zipp==3.16.2 + # via + # importlib-metadata + # importlib-resources From 2fd005b1cd9014c11b47da6baff59d82171b34d7 Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Fri, 28 Jul 2023 21:52:59 +1000 Subject: [PATCH 109/162] Define types for `AsyncIOWrapper` and `trio.Path` (#2706) * Add types to directly defined objects in _file_io * Implement methods on AsyncIOWrapper using a pile of self-type-properties. * Add types for open_file() * Suppress the duplicate type hints for trio.open_file() * Add types to Path wrapper class * Overloads are never executed, so they should be ignored by coverage * Type trio._util.async_wraps * Add mostly-any types for internal _path functions These are way too dynamic to properly type. * Disallow untyped definitions in _path and _file_io --------- Co-authored-by: EXPLOSION --- .coveragerc | 1 + docs/source/conf.py | 2 + docs/source/reference-io.rst | 6 +- pyproject.toml | 6 + trio/_file_io.py | 391 ++++++++++++++++++++++++++++++---- trio/_path.py | 356 +++++++++++++++++++++++-------- trio/_tests/test_file_io.py | 45 +++- trio/_tests/verify_types.json | 30 +-- trio/_util.py | 11 +- 9 files changed, 690 insertions(+), 158 deletions(-) diff --git a/.coveragerc b/.coveragerc index 98f923bd8e..d577aa8adf 100644 --- a/.coveragerc +++ b/.coveragerc @@ -21,6 +21,7 @@ exclude_lines = abc.abstractmethod if TYPE_CHECKING: if _t.TYPE_CHECKING: + @overload partial_branches = pragma: no branch diff --git a/docs/source/conf.py b/docs/source/conf.py index 91ce7d884c..650688717a 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -63,6 +63,8 @@ ("py:obj", "trio._abc.T"), ("py:obj", "trio._abc.T_resource"), ("py:class", "types.FrameType"), + ("py:class", "P.args"), + ("py:class", "P.kwargs"), # TODO: figure out if you can link this to SSL ("py:class", "Context"), # TODO: temporary type diff --git a/docs/source/reference-io.rst b/docs/source/reference-io.rst index 9ad11b2c5a..85969174aa 100644 --- a/docs/source/reference-io.rst +++ b/docs/source/reference-io.rst @@ -637,9 +637,11 @@ Asynchronous path objects Asynchronous file objects ~~~~~~~~~~~~~~~~~~~~~~~~~ -.. autofunction:: open_file +.. Suppress type annotations here, they refer to lots of internal types. + The normal Python docs go into better detail. +.. autofunction:: open_file(file, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=None, opener=None) -.. autofunction:: wrap_file +.. autofunction:: wrap_file(file) .. interface:: Asynchronous file interface diff --git a/pyproject.toml b/pyproject.toml index 264345d8c5..d479442c7a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,6 +46,12 @@ disallow_untyped_defs = false # DO NOT use `ignore_errors`; it doesn't apply # downstream and users have to deal with them. +[[tool.mypy.overrides]] +module = [ + "trio._path", + "trio._file_io", +] +disallow_untyped_defs = true [[tool.mypy.overrides]] module = [ diff --git a/trio/_file_io.py b/trio/_file_io.py index 9f7d81adef..6b79ae25b5 100644 --- a/trio/_file_io.py +++ b/trio/_file_io.py @@ -1,13 +1,39 @@ +from __future__ import annotations + import io from functools import partial +from typing import ( + IO, + TYPE_CHECKING, + Any, + AnyStr, + BinaryIO, + Callable, + Generic, + Iterable, + TypeVar, + Union, + overload, +) import trio from ._util import async_wraps from .abc import AsyncResource +if TYPE_CHECKING: + from _typeshed import ( + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + StrOrBytesPath, + ) + from typing_extensions import Literal + # This list is also in the docs, make sure to keep them in sync -_FILE_SYNC_ATTRS = { +_FILE_SYNC_ATTRS: set[str] = { "closed", "encoding", "errors", @@ -29,7 +55,7 @@ } # This list is also in the docs, make sure to keep them in sync -_FILE_ASYNC_METHODS = { +_FILE_ASYNC_METHODS: set[str] = { "flush", "read", "read1", @@ -48,59 +74,201 @@ } -class AsyncIOWrapper(AsyncResource): +FileT = TypeVar("FileT") +FileT_co = TypeVar("FileT_co", covariant=True) +T = TypeVar("T") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) +AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True) +AnyStr_contra = TypeVar("AnyStr_contra", str, bytes, contravariant=True) + +# This is a little complicated. IO objects have a lot of methods, and which are available on +# different types varies wildly. We want to match the interface of whatever file we're wrapping. +# This pile of protocols each has one sync method/property, meaning they're going to be compatible +# with a file class that supports that method/property. The ones parameterized with AnyStr take +# either str or bytes depending. + +# The wrapper is then a generic class, where the typevar is set to the type of the sync file we're +# wrapping. For generics, adding a type to self has a special meaning - properties/methods can be +# conditional - it's only valid to call them if the object you're accessing them on is compatible +# with that type hint. By using the protocols, the type checker will be checking to see if the +# wrapped type has that method, and only allow the methods that do to be called. We can then alter +# the signature however it needs to match runtime behaviour. +# More info: https://mypy.readthedocs.io/en/stable/more_types.html#advanced-uses-of-self-types +if TYPE_CHECKING: + from typing_extensions import Buffer, Protocol + + # fmt: off + + class _HasClosed(Protocol): + @property + def closed(self) -> bool: ... + + class _HasEncoding(Protocol): + @property + def encoding(self) -> str: ... + + class _HasErrors(Protocol): + @property + def errors(self) -> str | None: ... + + class _HasFileNo(Protocol): + def fileno(self) -> int: ... + + class _HasIsATTY(Protocol): + def isatty(self) -> bool: ... + + class _HasNewlines(Protocol[T_co]): + # Type varies here - documented to be None, tuple of strings, strings. Typeshed uses Any. + @property + def newlines(self) -> T_co: ... + + class _HasReadable(Protocol): + def readable(self) -> bool: ... + + class _HasSeekable(Protocol): + def seekable(self) -> bool: ... + + class _HasWritable(Protocol): + def writable(self) -> bool: ... + + class _HasBuffer(Protocol): + @property + def buffer(self) -> BinaryIO: ... + + class _HasRaw(Protocol): + @property + def raw(self) -> io.RawIOBase: ... + + class _HasLineBuffering(Protocol): + @property + def line_buffering(self) -> bool: ... + + class _HasCloseFD(Protocol): + @property + def closefd(self) -> bool: ... + + class _HasName(Protocol): + @property + def name(self) -> str: ... + + class _HasMode(Protocol): + @property + def mode(self) -> str: ... + + class _CanGetValue(Protocol[AnyStr_co]): + def getvalue(self) -> AnyStr_co: ... + + class _CanGetBuffer(Protocol): + def getbuffer(self) -> memoryview: ... + + class _CanFlush(Protocol): + def flush(self) -> None: ... + + class _CanRead(Protocol[AnyStr_co]): + def read(self, size: int | None = ..., /) -> AnyStr_co: ... + + class _CanRead1(Protocol): + def read1(self, size: int | None = ..., /) -> bytes: ... + + class _CanReadAll(Protocol[AnyStr_co]): + def readall(self) -> AnyStr_co: ... + + class _CanReadInto(Protocol): + def readinto(self, buf: Buffer, /) -> int | None: ... + + class _CanReadInto1(Protocol): + def readinto1(self, buffer: Buffer, /) -> int: ... + + class _CanReadLine(Protocol[AnyStr_co]): + def readline(self, size: int = ..., /) -> AnyStr_co: ... + + class _CanReadLines(Protocol[AnyStr]): + def readlines(self, hint: int = ...) -> list[AnyStr]: ... + + class _CanSeek(Protocol): + def seek(self, target: int, whence: int = 0, /) -> int: ... + + class _CanTell(Protocol): + def tell(self) -> int: ... + + class _CanTruncate(Protocol): + def truncate(self, size: int | None = ..., /) -> int: ... + + class _CanWrite(Protocol[AnyStr_contra]): + def write(self, data: AnyStr_contra, /) -> int: ... + + class _CanWriteLines(Protocol[T_contra]): + # The lines parameter varies for bytes/str, so use a typevar to make the async match. + def writelines(self, lines: Iterable[T_contra], /) -> None: ... + + class _CanPeek(Protocol[AnyStr_co]): + def peek(self, size: int = 0, /) -> AnyStr_co: ... + + class _CanDetach(Protocol[T_co]): + # The T typevar will be the unbuffered/binary file this file wraps. + def detach(self) -> T_co: ... + + class _CanClose(Protocol): + def close(self) -> None: ... + + +# FileT needs to be covariant for the protocol trick to work - the real IO types are effectively a +# subtype of the protocols. +class AsyncIOWrapper(AsyncResource, Generic[FileT_co]): """A generic :class:`~io.IOBase` wrapper that implements the :term:`asynchronous file object` interface. Wrapped methods that could block are executed in :meth:`trio.to_thread.run_sync`. - All properties and methods defined in in :mod:`~io` are exposed by this + All properties and methods defined in :mod:`~io` are exposed by this wrapper, if they exist in the wrapped file object. - """ - def __init__(self, file): + def __init__(self, file: FileT_co) -> None: self._wrapped = file @property - def wrapped(self): + def wrapped(self) -> FileT_co: """object: A reference to the wrapped file object""" return self._wrapped - def __getattr__(self, name): - if name in _FILE_SYNC_ATTRS: - return getattr(self._wrapped, name) - if name in _FILE_ASYNC_METHODS: - meth = getattr(self._wrapped, name) + if not TYPE_CHECKING: - @async_wraps(self.__class__, self._wrapped.__class__, name) - async def wrapper(*args, **kwargs): - func = partial(meth, *args, **kwargs) - return await trio.to_thread.run_sync(func) + def __getattr__(self, name: str) -> object: + if name in _FILE_SYNC_ATTRS: + return getattr(self._wrapped, name) + if name in _FILE_ASYNC_METHODS: + meth = getattr(self._wrapped, name) - # cache the generated method - setattr(self, name, wrapper) - return wrapper + @async_wraps(self.__class__, self._wrapped.__class__, name) + async def wrapper(*args, **kwargs): + func = partial(meth, *args, **kwargs) + return await trio.to_thread.run_sync(func) - raise AttributeError(name) + # cache the generated method + setattr(self, name, wrapper) + return wrapper - def __dir__(self): + raise AttributeError(name) + + def __dir__(self) -> Iterable[str]: attrs = set(super().__dir__()) attrs.update(a for a in _FILE_SYNC_ATTRS if hasattr(self.wrapped, a)) attrs.update(a for a in _FILE_ASYNC_METHODS if hasattr(self.wrapped, a)) return attrs - def __aiter__(self): + def __aiter__(self) -> AsyncIOWrapper[FileT_co]: return self - async def __anext__(self): + async def __anext__(self: AsyncIOWrapper[_CanReadLine[AnyStr]]) -> AnyStr: line = await self.readline() if line: return line else: raise StopAsyncIteration - async def detach(self): + async def detach(self: AsyncIOWrapper[_CanDetach[T]]) -> AsyncIOWrapper[T]: """Like :meth:`io.BufferedIOBase.detach`, but async. This also re-wraps the result in a new :term:`asynchronous file object` @@ -111,7 +279,7 @@ async def detach(self): raw = await trio.to_thread.run_sync(self._wrapped.detach) return wrap_file(raw) - async def aclose(self): + async def aclose(self: AsyncIOWrapper[_CanClose]) -> None: """Like :meth:`io.IOBase.close`, but async. This is also shielded from cancellation; if a cancellation scope is @@ -125,18 +293,167 @@ async def aclose(self): await trio.lowlevel.checkpoint_if_cancelled() + if TYPE_CHECKING: + # fmt: off + # Based on typing.IO and io stubs. + @property + def closed(self: AsyncIOWrapper[_HasClosed]) -> bool: ... + @property + def encoding(self: AsyncIOWrapper[_HasEncoding]) -> str: ... + @property + def errors(self: AsyncIOWrapper[_HasErrors]) -> str | None: ... + @property + def newlines(self: AsyncIOWrapper[_HasNewlines[T]]) -> T: ... + @property + def buffer(self: AsyncIOWrapper[_HasBuffer]) -> BinaryIO: ... + @property + def raw(self: AsyncIOWrapper[_HasRaw]) -> io.RawIOBase: ... + @property + def line_buffering(self: AsyncIOWrapper[_HasLineBuffering]) -> int: ... + @property + def closefd(self: AsyncIOWrapper[_HasCloseFD]) -> bool: ... + @property + def name(self: AsyncIOWrapper[_HasName]) -> str: ... + @property + def mode(self: AsyncIOWrapper[_HasMode]) -> str: ... + + def fileno(self: AsyncIOWrapper[_HasFileNo]) -> int: ... + def isatty(self: AsyncIOWrapper[_HasIsATTY]) -> bool: ... + def readable(self: AsyncIOWrapper[_HasReadable]) -> bool: ... + def seekable(self: AsyncIOWrapper[_HasSeekable]) -> bool: ... + def writable(self: AsyncIOWrapper[_HasWritable]) -> bool: ... + def getvalue(self: AsyncIOWrapper[_CanGetValue[AnyStr]]) -> AnyStr: ... + def getbuffer(self: AsyncIOWrapper[_CanGetBuffer]) -> memoryview: ... + async def flush(self: AsyncIOWrapper[_CanFlush]) -> None: ... + async def read(self: AsyncIOWrapper[_CanRead[AnyStr]], size: int | None = -1, /) -> AnyStr: ... + async def read1(self: AsyncIOWrapper[_CanRead1], size: int | None = -1, /) -> bytes: ... + async def readall(self: AsyncIOWrapper[_CanReadAll[AnyStr]]) -> AnyStr: ... + async def readinto(self: AsyncIOWrapper[_CanReadInto], buf: Buffer, /) -> int | None: ... + async def readline(self: AsyncIOWrapper[_CanReadLine[AnyStr]], size: int = -1, /) -> AnyStr: ... + async def readlines(self: AsyncIOWrapper[_CanReadLines[AnyStr]]) -> list[AnyStr]: ... + async def seek(self: AsyncIOWrapper[_CanSeek], target: int, whence: int = 0, /) -> int: ... + async def tell(self: AsyncIOWrapper[_CanTell]) -> int: ... + async def truncate(self: AsyncIOWrapper[_CanTruncate], size: int | None = None, /) -> int: ... + async def write(self: AsyncIOWrapper[_CanWrite[AnyStr]], data: AnyStr, /) -> int: ... + async def writelines(self: AsyncIOWrapper[_CanWriteLines[T]], lines: Iterable[T], /) -> None: ... + async def readinto1(self: AsyncIOWrapper[_CanReadInto1], buffer: Buffer, /) -> int: ... + async def peek(self: AsyncIOWrapper[_CanPeek[AnyStr]], size: int = 0, /) -> AnyStr: ... + + +# Type hints are copied from builtin open. +_OpenFile = Union["StrOrBytesPath", int] +_Opener = Callable[[str, int], int] + + +@overload +async def open_file( + file: _OpenFile, + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> AsyncIOWrapper[io.TextIOWrapper]: + ... + + +@overload +async def open_file( + file: _OpenFile, + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> AsyncIOWrapper[io.FileIO]: + ... + + +@overload +async def open_file( + file: _OpenFile, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> AsyncIOWrapper[io.BufferedRandom]: + ... + + +@overload +async def open_file( + file: _OpenFile, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> AsyncIOWrapper[io.BufferedWriter]: + ... + + +@overload +async def open_file( + file: _OpenFile, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> AsyncIOWrapper[io.BufferedReader]: + ... + + +@overload +async def open_file( + file: _OpenFile, + mode: OpenBinaryMode, + buffering: int, + encoding: None = None, + errors: None = None, + newline: None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> AsyncIOWrapper[BinaryIO]: + ... + + +@overload +async def open_file( + file: _OpenFile, + mode: str, + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> AsyncIOWrapper[IO[Any]]: + ... + async def open_file( - file, - mode="r", - buffering=-1, - encoding=None, - errors=None, - newline=None, - closefd=True, - opener=None, -): - """Asynchronous version of :func:`io.open`. + file: _OpenFile, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: _Opener | None = None, +) -> AsyncIOWrapper[Any]: + """Asynchronous version of :func:`open`. Returns: An :term:`asynchronous file object` @@ -161,7 +478,7 @@ async def open_file( return _file -def wrap_file(file): +def wrap_file(file: FileT) -> AsyncIOWrapper[FileT]: """This wraps any file object in a wrapper that provides an asynchronous file object interface. @@ -179,7 +496,7 @@ def wrap_file(file): """ - def has(attr): + def has(attr: str) -> bool: return hasattr(file, attr) and callable(getattr(file, attr)) if not (has("close") and (has("read") or has("write"))): diff --git a/trio/_path.py b/trio/_path.py index 67234e223d..b7e6b16e4a 100644 --- a/trio/_path.py +++ b/trio/_path.py @@ -1,49 +1,96 @@ +from __future__ import annotations + +import inspect import os import pathlib import sys import types +from collections.abc import Awaitable, Callable, Iterable from functools import partial, wraps -from typing import TYPE_CHECKING, Any +from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper +from typing import ( + IO, + TYPE_CHECKING, + Any, + BinaryIO, + ClassVar, + TypeVar, + Union, + cast, + overload, +) import trio +from trio._file_io import AsyncIOWrapper as _AsyncIOWrapper from trio._util import Final, async_wraps +if TYPE_CHECKING: + from _typeshed import ( + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ) + from typing_extensions import Concatenate, Literal, ParamSpec, TypeAlias + + P = ParamSpec("P") + +T = TypeVar("T") +StrPath: TypeAlias = Union[str, "os.PathLike[str]"] # Only subscriptable in 3.9+ + # re-wrap return value from methods that return new instances of pathlib.Path -def rewrap_path(value): +def rewrap_path(value: T) -> T | Path: if isinstance(value, pathlib.Path): - value = Path(value) - return value + return Path(value) + else: + return value -def _forward_factory(cls, attr_name, attr): +def _forward_factory( + cls: AsyncAutoWrapperType, + attr_name: str, + attr: Callable[Concatenate[pathlib.Path, P], T], +) -> Callable[Concatenate[Path, P], T | Path]: @wraps(attr) - def wrapper(self, *args, **kwargs): + def wrapper(self: Path, *args: P.args, **kwargs: P.kwargs) -> T | Path: attr = getattr(self._wrapped, attr_name) value = attr(*args, **kwargs) return rewrap_path(value) + # Assigning this makes inspect and therefore Sphinx show the original parameters. + # It's not defined on functions normally though, this is a custom attribute. + assert isinstance(wrapper, types.FunctionType) + wrapper.__signature__ = inspect.signature(attr) + return wrapper -def _forward_magic(cls, attr): +def _forward_magic( + cls: AsyncAutoWrapperType, attr: Callable[..., T] +) -> Callable[..., Path | T]: sentinel = object() @wraps(attr) - def wrapper(self, other=sentinel): + def wrapper(self: Path, other: object = sentinel) -> Path | T: if other is sentinel: return attr(self._wrapped) if isinstance(other, cls): - other = other._wrapped + other = cast(Path, other)._wrapped value = attr(self._wrapped, other) return rewrap_path(value) + assert isinstance(wrapper, types.FunctionType) + wrapper.__signature__ = inspect.signature(attr) return wrapper -def iter_wrapper_factory(cls, meth_name): +def iter_wrapper_factory( + cls: AsyncAutoWrapperType, meth_name: str +) -> Callable[Concatenate[Path, P], Awaitable[Iterable[Path]]]: @async_wraps(cls, cls._wraps, meth_name) - async def wrapper(self, *args, **kwargs): + async def wrapper(self: Path, *args: P.args, **kwargs: P.kwargs) -> Iterable[Path]: meth = getattr(self._wrapped, meth_name) func = partial(meth, *args, **kwargs) # Make sure that the full iteration is performed in the thread @@ -54,9 +101,11 @@ async def wrapper(self, *args, **kwargs): return wrapper -def thread_wrapper_factory(cls, meth_name): +def thread_wrapper_factory( + cls: AsyncAutoWrapperType, meth_name: str +) -> Callable[Concatenate[Path, P], Awaitable[Path]]: @async_wraps(cls, cls._wraps, meth_name) - async def wrapper(self, *args, **kwargs): + async def wrapper(self: Path, *args: P.args, **kwargs: P.kwargs) -> Path: meth = getattr(self._wrapped, meth_name) func = partial(meth, *args, **kwargs) value = await trio.to_thread.run_sync(func) @@ -65,20 +114,31 @@ async def wrapper(self, *args, **kwargs): return wrapper -def classmethod_wrapper_factory(cls, meth_name): - @classmethod +def classmethod_wrapper_factory( + cls: AsyncAutoWrapperType, meth_name: str +) -> classmethod: @async_wraps(cls, cls._wraps, meth_name) - async def wrapper(cls, *args, **kwargs): + async def wrapper(cls: type[Path], *args: Any, **kwargs: Any) -> Path: meth = getattr(cls._wraps, meth_name) func = partial(meth, *args, **kwargs) value = await trio.to_thread.run_sync(func) return rewrap_path(value) - return wrapper + assert isinstance(wrapper, types.FunctionType) + wrapper.__signature__ = inspect.signature(getattr(cls._wraps, meth_name)) + return classmethod(wrapper) class AsyncAutoWrapperType(Final): - def __init__(cls, name, bases, attrs): + _forwards: type + _wraps: type + _forward_magic: list[str] + _wrap_iter: list[str] + _forward: list[str] + + def __init__( + cls, name: str, bases: tuple[type, ...], attrs: dict[str, object] + ) -> None: super().__init__(name, bases, attrs) cls._forward = [] @@ -87,7 +147,7 @@ def __init__(cls, name, bases, attrs): type(cls).generate_magic(cls, attrs) type(cls).generate_iter(cls, attrs) - def generate_forwards(cls, attrs): + def generate_forwards(cls, attrs: dict[str, object]) -> None: # forward functions of _forwards for attr_name, attr in cls._forwards.__dict__.items(): if attr_name.startswith("_") or attr_name in attrs: @@ -101,8 +161,9 @@ def generate_forwards(cls, attrs): else: raise TypeError(attr_name, type(attr)) - def generate_wraps(cls, attrs): + def generate_wraps(cls, attrs: dict[str, object]) -> None: # generate wrappers for functions of _wraps + wrapper: classmethod | Callable for attr_name, attr in cls._wraps.__dict__.items(): # .z. exclude cls._wrap_iter if attr_name.startswith("_") or attr_name in attrs: @@ -112,22 +173,27 @@ def generate_wraps(cls, attrs): setattr(cls, attr_name, wrapper) elif isinstance(attr, types.FunctionType): wrapper = thread_wrapper_factory(cls, attr_name) + assert isinstance(wrapper, types.FunctionType) + wrapper.__signature__ = inspect.signature(attr) setattr(cls, attr_name, wrapper) else: raise TypeError(attr_name, type(attr)) - def generate_magic(cls, attrs): + def generate_magic(cls, attrs: dict[str, object]) -> None: # generate wrappers for magic for attr_name in cls._forward_magic: attr = getattr(cls._forwards, attr_name) wrapper = _forward_magic(cls, attr) setattr(cls, attr_name, wrapper) - def generate_iter(cls, attrs): + def generate_iter(cls, attrs: dict[str, object]) -> None: # generate wrappers for methods that return iterators + wrapper: Callable for attr_name, attr in cls._wraps.__dict__.items(): if attr_name in cls._wrap_iter: wrapper = iter_wrapper_factory(cls, attr_name) + assert isinstance(wrapper, types.FunctionType) + wrapper.__signature__ = inspect.signature(attr) setattr(cls, attr_name, wrapper) @@ -137,9 +203,10 @@ class Path(metaclass=AsyncAutoWrapperType): """ - _wraps = pathlib.Path - _forwards = pathlib.PurePath - _forward_magic = [ + _forward: ClassVar[list[str]] + _wraps: ClassVar[type] = pathlib.Path + _forwards: ClassVar[type] = pathlib.PurePath + _forward_magic: ClassVar[list[str]] = [ "__str__", "__bytes__", "__truediv__", @@ -151,9 +218,9 @@ class Path(metaclass=AsyncAutoWrapperType): "__ge__", "__hash__", ] - _wrap_iter = ["glob", "rglob", "iterdir"] + _wrap_iter: ClassVar[list[str]] = ["glob", "rglob", "iterdir"] - def __init__(self, *args): + def __init__(self, *args: StrPath) -> None: self._wrapped = pathlib.Path(*args) # type checkers allow accessing any attributes on class instances with `__getattr__` @@ -167,17 +234,94 @@ def __getattr__(self, name): return rewrap_path(value) raise AttributeError(name) - def __dir__(self): - return super().__dir__() + self._forward + def __dir__(self) -> list[str]: + return [*super().__dir__(), *self._forward] - def __repr__(self): + def __repr__(self) -> str: return f"trio.Path({repr(str(self))})" - def __fspath__(self): + def __fspath__(self) -> str: return os.fspath(self._wrapped) - @wraps(pathlib.Path.open) - async def open(self, *args, **kwargs): + @overload + def open( + self, + mode: OpenTextMode = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> _AsyncIOWrapper[TextIOWrapper]: + ... + + @overload + def open( + self, + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> _AsyncIOWrapper[FileIO]: + ... + + @overload + def open( + self, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> _AsyncIOWrapper[BufferedRandom]: + ... + + @overload + def open( + self, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> _AsyncIOWrapper[BufferedWriter]: + ... + + @overload + def open( + self, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> _AsyncIOWrapper[BufferedReader]: + ... + + @overload + def open( + self, + mode: OpenBinaryMode, + buffering: int = -1, + encoding: None = None, + errors: None = None, + newline: None = None, + ) -> _AsyncIOWrapper[BinaryIO]: + ... + + @overload + def open( + self, + mode: str, + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> _AsyncIOWrapper[IO[Any]]: + ... + + @wraps(pathlib.Path.open) # type: ignore[misc] # Overload return mismatch. + async def open(self, *args: Any, **kwargs: Any) -> _AsyncIOWrapper[IO[Any]]: """Open the file pointed to by the path, like the :func:`trio.open_file` function does. @@ -189,75 +333,101 @@ async def open(self, *args, **kwargs): if TYPE_CHECKING: # the dunders listed in _forward_magic that aren't seen otherwise - __bytes__ = pathlib.Path.__bytes__ - __truediv__ = pathlib.Path.__truediv__ - __rtruediv__ = pathlib.Path.__rtruediv__ - - # These should be fully typed, either manually or with some magic wrapper - # function that copies the type of pathlib.Path except sticking an async in - # front of all of them. The latter is unfortunately not trivial, see attempts in - # https://github.com/python-trio/trio/issues/2630 + # fmt: off + def __bytes__(self) -> bytes: ... + def __truediv__(self, other: StrPath) -> Path: ... + def __rtruediv__(self, other: StrPath) -> Path: ... # wrapped methods handled by __getattr__ - absolute: Any - as_posix: Any - as_uri: Any - chmod: Any - cwd: Any - exists: Any - expanduser: Any - glob: Any - home: Any - is_absolute: Any - is_block_device: Any - is_char_device: Any - is_dir: Any - is_fifo: Any - is_file: Any - is_reserved: Any - is_socket: Any - is_symlink: Any - iterdir: Any - joinpath: Any - lchmod: Any - lstat: Any - match: Any - mkdir: Any - read_bytes: Any - read_text: Any - relative_to: Any - rename: Any - replace: Any - resolve: Any - rglob: Any - rmdir: Any - samefile: Any - stat: Any - symlink_to: Any - touch: Any - unlink: Any - with_name: Any - with_suffix: Any - write_bytes: Any - write_text: Any + async def absolute(self) -> Path: ... + async def as_posix(self) -> str: ... + async def as_uri(self) -> str: ... + + if sys.version_info >= (3, 10): + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: ... + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: ... + else: + async def stat(self) -> os.stat_result: ... + async def chmod(self, mode: int) -> None: ... + + @classmethod + async def cwd(self) -> Path: ... + + async def exists(self) -> bool: ... + async def expanduser(self) -> Path: ... + async def glob(self, pattern: str) -> Iterable[Path]: ... + async def home(self) -> Path: ... + async def is_absolute(self) -> bool: ... + async def is_block_device(self) -> bool: ... + async def is_char_device(self) -> bool: ... + async def is_dir(self) -> bool: ... + async def is_fifo(self) -> bool: ... + async def is_file(self) -> bool: ... + async def is_reserved(self) -> bool: ... + async def is_socket(self) -> bool: ... + async def is_symlink(self) -> bool: ... + async def iterdir(self) -> Iterable[Path]: ... + async def joinpath(self, *other: StrPath) -> Path: ... + async def lchmod(self, mode: int) -> None: ... + async def lstat(self) -> os.stat_result: ... + async def match(self, path_pattern: str) -> bool: ... + async def mkdir(self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False) -> None: ... + async def read_bytes(self) -> bytes: ... + async def read_text(self, encoding: str | None = None, errors: str | None = None) -> str: ... + async def relative_to(self, *other: StrPath) -> Path: ... + + if sys.version_info >= (3, 8): + def rename(self, target: str | pathlib.PurePath) -> Path: ... + def replace(self, target: str | pathlib.PurePath) -> Path: ... + else: + def rename(self, target: str | pathlib.PurePath) -> None: ... + def replace(self, target: str | pathlib.PurePath) -> None: ... + + async def resolve(self, strict: bool = False) -> Path: ... + async def rglob(self, pattern: str) -> Iterable[Path]: ... + async def rmdir(self) -> None: ... + async def samefile(self, other_path: str | bytes | int | Path) -> bool: ... + async def symlink_to(self, target: str | Path, target_is_directory: bool = False) -> None: ... + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: ... + if sys.version_info >= (3, 8): + def unlink(self, missing_ok: bool = False) -> None: ... + else: + def unlink(self) -> None: ... + async def with_name(self, name: str) -> Path: ... + async def with_suffix(self, suffix: str) -> Path: ... + async def write_bytes(self, data: bytes) -> int: ... + + if sys.version_info >= (3, 10): + async def write_text( + self, data: str, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> int: ... + else: + async def write_text( + self, data: str, + encoding: str | None = None, + errors: str | None = None, + ) -> int: ... if sys.platform != "win32": - group: Any - is_mount: Any - owner: Any + async def owner(self) -> str: ... + async def group(self) -> str: ... + async def is_mount(self) -> bool: ... if sys.version_info >= (3, 9): - is_relative_to: Any - with_stem: Any - readlink: Any + async def is_relative_to(self, *other: StrPath) -> bool: ... + async def with_stem(self, stem: str) -> Path: ... + async def readlink(self) -> Path: ... if sys.version_info >= (3, 10): - hardlink_to: Any + async def hardlink_to(self, target: str | pathlib.Path) -> None: ... if sys.version_info < (3, 12): - link_to: Any + async def link_to(self, target: StrPath | bytes) -> None: ... if sys.version_info >= (3, 12): - is_junction: Any - walk: Any - with_segments: Any + async def is_junction(self) -> bool: ... + walk: Any # TODO + async def with_segments(self, *pathsegments: StrPath) -> Path: ... Path.iterdir.__doc__ = """ diff --git a/trio/_tests/test_file_io.py b/trio/_tests/test_file_io.py index e99788efc5..bae426cf48 100644 --- a/trio/_tests/test_file_io.py +++ b/trio/_tests/test_file_io.py @@ -1,12 +1,15 @@ +import importlib import io import os +import re +from typing import List, Tuple from unittest import mock from unittest.mock import sentinel import pytest import trio -from trio import _core +from trio import _core, _file_io from trio._file_io import _FILE_ASYNC_METHODS, _FILE_SYNC_ATTRS, AsyncIOWrapper @@ -78,6 +81,46 @@ def unsupported_attr(self): # pragma: no cover getattr(async_file, "unsupported_attr") +def test_type_stubs_match_lists() -> None: + """Check the manual stubs match the list of wrapped methods.""" + # Fetch the module's source code. + assert _file_io.__spec__ is not None + loader = _file_io.__spec__.loader + assert isinstance(loader, importlib.abc.SourceLoader) + source = io.StringIO(loader.get_source("trio._file_io")) + + # Find the class, then find the TYPE_CHECKING block. + for line in source: + if "class AsyncIOWrapper" in line: + break + else: # pragma: no cover - should always find this + pytest.fail("No class definition line?") + + for line in source: + if "if TYPE_CHECKING" in line: + break + else: # pragma: no cover - should always find this + pytest.fail("No TYPE CHECKING line?") + + # Now we should be at the type checking block. + found: List[Tuple[str, str]] = [] + for line in source: # pragma: no branch - expected to break early + if line.strip() and not line.startswith(" " * 8): + break # Dedented out of the if TYPE_CHECKING block. + match = re.match(r"\s*(async )?def ([a-zA-Z0-9_]+)\(", line) + if match is not None: + kind = "async" if match.group(1) is not None else "sync" + found.append((match.group(2), kind)) + + # Compare two lists so that we can easily see duplicates, and see what is different overall. + expected = [(fname, "async") for fname in _FILE_ASYNC_METHODS] + expected += [(fname, "sync") for fname in _FILE_SYNC_ATTRS] + # Ignore order, error if duplicates are present. + found.sort() + expected.sort() + assert found == expected + + def test_sync_attrs_forwarded(async_file, wrapped): for attr_name in _FILE_SYNC_ATTRS: if attr_name not in dir(async_file): diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index ba26a34e9f..d08c03060c 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.8832, + "completenessScore": 0.888, "exportedSymbolCounts": { "withAmbiguousType": 1, - "withKnownType": 552, - "withUnknownType": 72 + "withKnownType": 555, + "withUnknownType": 69 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -45,9 +45,9 @@ } ], "otherSymbolCounts": { - "withAmbiguousType": 6, - "withKnownType": 475, - "withUnknownType": 114 + "withAmbiguousType": 3, + "withKnownType": 529, + "withUnknownType": 102 }, "packageName": "trio", "symbols": [ @@ -79,20 +79,6 @@ "trio._highlevel_socket.SocketStream.getsockopt", "trio._highlevel_socket.SocketStream.send_all", "trio._highlevel_socket.SocketStream.setsockopt", - "trio._path.AsyncAutoWrapperType.__init__", - "trio._path.AsyncAutoWrapperType.generate_forwards", - "trio._path.AsyncAutoWrapperType.generate_iter", - "trio._path.AsyncAutoWrapperType.generate_magic", - "trio._path.AsyncAutoWrapperType.generate_wraps", - "trio._path.Path", - "trio._path.Path.__bytes__", - "trio._path.Path.__dir__", - "trio._path.Path.__fspath__", - "trio._path.Path.__init__", - "trio._path.Path.__repr__", - "trio._path.Path.__rtruediv__", - "trio._path.Path.__truediv__", - "trio._path.Path.open", "trio._socket._SocketType.__getattr__", "trio._socket._SocketType.accept", "trio._socket._SocketType.connect", @@ -152,7 +138,6 @@ "trio.lowlevel.temporarily_detach_coroutine_object", "trio.lowlevel.wait_readable", "trio.lowlevel.wait_writable", - "trio.open_file", "trio.open_ssl_over_tcp_listeners", "trio.open_ssl_over_tcp_stream", "trio.open_tcp_listeners", @@ -204,8 +189,7 @@ "trio.testing.trio_test", "trio.testing.wait_all_tasks_blocked", "trio.tests.TestsDeprecationWrapper", - "trio.to_thread.current_default_thread_limiter", - "trio.wrap_file" + "trio.to_thread.current_default_thread_limiter" ] } } diff --git a/trio/_util.py b/trio/_util.py index b7b4403115..a87f1fc02c 100644 --- a/trio/_util.py +++ b/trio/_util.py @@ -13,6 +13,9 @@ import trio +CallT = t.TypeVar("CallT", bound=t.Callable[..., t.Any]) + + # Equivalent to the C function raise(), which Python doesn't wrap if os.name == "nt": # On Windows, os.kill exists but is really weird. @@ -199,10 +202,14 @@ def __exit__( self._held = False -def async_wraps(cls, wrapped_cls, attr_name): +def async_wraps( + cls: type[object], + wrapped_cls: type[object], + attr_name: str, +) -> t.Callable[[CallT], CallT]: """Similar to wraps, but for async wrappers of non-async functions.""" - def decorator(func): + def decorator(func: CallT) -> CallT: func.__name__ = attr_name func.__qualname__ = ".".join((cls.__qualname__, attr_name)) From cf1f3c745a6966241ab967f43f73ecff632a337a Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Sat, 29 Jul 2023 10:37:24 +0000 Subject: [PATCH 110/162] typecheck _socket and _core._local (#2705) * add type hints to _socket and _core.local --------- Co-authored-by: Spencer Brown Co-authored-by: EXPLOSION --- docs/source/conf.py | 2 + docs/source/reference-io.rst | 8 + pyproject.toml | 15 +- trio/_core/_local.py | 66 ++--- trio/_socket.py | 447 ++++++++++++++++++++++++++-------- trio/_sync.py | 19 +- trio/_tests/test_socket.py | 24 +- trio/_tests/verify_types.json | 38 +-- trio/_threads.py | 24 +- trio/socket.py | 1 + 10 files changed, 455 insertions(+), 189 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 650688717a..0e16b2d426 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -62,6 +62,8 @@ ("py:obj", "trio._abc.SendType"), ("py:obj", "trio._abc.T"), ("py:obj", "trio._abc.T_resource"), + ("py:class", "trio._threads.T"), + # why aren't these found in stdlib? ("py:class", "types.FrameType"), ("py:class", "P.args"), ("py:class", "P.kwargs"), diff --git a/docs/source/reference-io.rst b/docs/source/reference-io.rst index 85969174aa..9207afb41b 100644 --- a/docs/source/reference-io.rst +++ b/docs/source/reference-io.rst @@ -504,6 +504,14 @@ Socket objects * :meth:`~socket.socket.set_inheritable` * :meth:`~socket.socket.get_inheritable` +The internal SocketType +~~~~~~~~~~~~~~~~~~~~~~~~~~ +.. autoclass:: _SocketType +.. + TODO: adding `:members:` here gives error due to overload+_wraps on `sendto` + TODO: rewrite ... all of the above when fixing _SocketType vs SocketType + + .. currentmodule:: trio diff --git a/pyproject.toml b/pyproject.toml index d479442c7a..445c40e28c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,20 +48,25 @@ disallow_untyped_defs = false # downstream and users have to deal with them. [[tool.mypy.overrides]] module = [ - "trio._path", + "trio._socket", + "trio._core._local", + "trio._sync", "trio._file_io", ] +disallow_incomplete_defs = true disallow_untyped_defs = true +disallow_any_generics = true +disallow_any_decorated = true +disallow_subclassing_any = true [[tool.mypy.overrides]] module = [ - "trio._dtls", - "trio._abc" + "trio._path", ] disallow_incomplete_defs = true disallow_untyped_defs = true -disallow_any_generics = true -disallow_any_decorated = true +#disallow_any_generics = true +#disallow_any_decorated = true disallow_subclassing_any = true [tool.pytest.ini_options] diff --git a/trio/_core/_local.py b/trio/_core/_local.py index a54f424fdf..7f2c632153 100644 --- a/trio/_core/_local.py +++ b/trio/_core/_local.py @@ -1,25 +1,34 @@ +from __future__ import annotations + +from typing import Generic, TypeVar, final + # Runvar implementations import attr -from .._util import Final +from .._util import Final, NoPublicConstructor from . import _run +T = TypeVar("T") + + +@final +class _NoValue(metaclass=Final): + ... -@attr.s(eq=False, hash=False, slots=True) -class _RunVarToken: - _no_value = object() - _var = attr.ib() - previous_value = attr.ib(default=_no_value) - redeemed = attr.ib(default=False, init=False) +@attr.s(eq=False, hash=False, slots=False) +class RunVarToken(Generic[T], metaclass=NoPublicConstructor): + _var: RunVar[T] = attr.ib() + previous_value: T | type[_NoValue] = attr.ib(default=_NoValue) + redeemed: bool = attr.ib(default=False, init=False) @classmethod - def empty(cls, var): - return cls(var) + def _empty(cls, var: RunVar[T]) -> RunVarToken[T]: + return cls._create(var) @attr.s(eq=False, hash=False, slots=True) -class RunVar(metaclass=Final): +class RunVar(Generic[T], metaclass=Final): """The run-local variant of a context variable. :class:`RunVar` objects are similar to context variable objects, @@ -28,27 +37,28 @@ class RunVar(metaclass=Final): """ - _NO_DEFAULT = object() - _name = attr.ib() - _default = attr.ib(default=_NO_DEFAULT) + _name: str = attr.ib() + _default: T | type[_NoValue] = attr.ib(default=_NoValue) - def get(self, default=_NO_DEFAULT): + def get(self, default: T | type[_NoValue] = _NoValue) -> T: """Gets the value of this :class:`RunVar` for the current run call.""" try: - return _run.GLOBAL_RUN_CONTEXT.runner._locals[self] + # not typed yet + return _run.GLOBAL_RUN_CONTEXT.runner._locals[self] # type: ignore[return-value, index] except AttributeError: raise RuntimeError("Cannot be used outside of a run context") from None except KeyError: # contextvars consistency - if default is not self._NO_DEFAULT: - return default + # `type: ignore` awaiting https://github.com/python/mypy/issues/15553 to be fixed & released + if default is not _NoValue: + return default # type: ignore[return-value] - if self._default is not self._NO_DEFAULT: - return self._default + if self._default is not _NoValue: + return self._default # type: ignore[return-value] raise LookupError(self) from None - def set(self, value): + def set(self, value: T) -> RunVarToken[T]: """Sets the value of this :class:`RunVar` for this current run call. @@ -56,16 +66,16 @@ def set(self, value): try: old_value = self.get() except LookupError: - token = _RunVarToken.empty(self) + token = RunVarToken._empty(self) else: - token = _RunVarToken(self, old_value) + token = RunVarToken[T]._create(self, old_value) # This can't fail, because if we weren't in Trio context then the # get() above would have failed. - _run.GLOBAL_RUN_CONTEXT.runner._locals[self] = value + _run.GLOBAL_RUN_CONTEXT.runner._locals[self] = value # type: ignore[assignment, index] return token - def reset(self, token): + def reset(self, token: RunVarToken[T]) -> None: """Resets the value of this :class:`RunVar` to what it was previously specified by the token. @@ -81,14 +91,14 @@ def reset(self, token): previous = token.previous_value try: - if previous is _RunVarToken._no_value: - _run.GLOBAL_RUN_CONTEXT.runner._locals.pop(self) + if previous is _NoValue: + _run.GLOBAL_RUN_CONTEXT.runner._locals.pop(self) # type: ignore[arg-type] else: - _run.GLOBAL_RUN_CONTEXT.runner._locals[self] = previous + _run.GLOBAL_RUN_CONTEXT.runner._locals[self] = previous # type: ignore[index, assignment] except AttributeError: raise RuntimeError("Cannot be used outside of a run context") token.redeemed = True - def __repr__(self): + def __repr__(self) -> str: return f"" diff --git a/trio/_socket.py b/trio/_socket.py index 26b03fc3e0..b0ec1d480d 100644 --- a/trio/_socket.py +++ b/trio/_socket.py @@ -5,7 +5,20 @@ import socket as _stdlib_socket import sys from functools import wraps as _wraps -from typing import TYPE_CHECKING, Tuple, Union +from operator import index +from socket import AddressFamily, SocketKind +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + NoReturn, + SupportsIndex, + Tuple, + TypeVar, + Union, + overload, +) import idna as _idna @@ -17,7 +30,14 @@ from collections.abc import Iterable from types import TracebackType - from typing_extensions import Self, TypeAlias + from typing_extensions import Buffer, Concatenate, ParamSpec, Self, TypeAlias + + from ._abc import HostnameResolver, SocketFactory + + P = ParamSpec("P") + + +T = TypeVar("T") # must use old-style typing because it's evaluated at runtime Address: TypeAlias = Union[ @@ -34,16 +54,18 @@ # return await do_it_properly_with_a_check_point() # class _try_sync: - def __init__(self, blocking_exc_override=None): + def __init__( + self, blocking_exc_override: Callable[[BaseException], bool] | None = None + ): self._blocking_exc_override = blocking_exc_override - def _is_blocking_io_error(self, exc): + def _is_blocking_io_error(self, exc: BaseException) -> bool: if self._blocking_exc_override is None: return isinstance(exc, BlockingIOError) else: return self._blocking_exc_override(exc) - async def __aenter__(self): + async def __aenter__(self) -> None: await trio.lowlevel.checkpoint_if_cancelled() async def __aexit__( @@ -66,11 +88,13 @@ async def __aexit__( # Overrides ################################################################ -_resolver = _core.RunVar("hostname_resolver") -_socket_factory = _core.RunVar("socket_factory") +_resolver: _core.RunVar[HostnameResolver | None] = _core.RunVar("hostname_resolver") +_socket_factory: _core.RunVar[SocketFactory | None] = _core.RunVar("socket_factory") -def set_custom_hostname_resolver(hostname_resolver): +def set_custom_hostname_resolver( + hostname_resolver: HostnameResolver | None, +) -> HostnameResolver | None: """Set a custom hostname resolver. By default, Trio's :func:`getaddrinfo` and :func:`getnameinfo` functions @@ -102,7 +126,9 @@ def set_custom_hostname_resolver(hostname_resolver): return old -def set_custom_socket_factory(socket_factory): +def set_custom_socket_factory( + socket_factory: SocketFactory | None, +) -> SocketFactory | None: """Set a custom socket object factory. This function allows you to replace Trio's normal socket class with a @@ -136,7 +162,23 @@ def set_custom_socket_factory(socket_factory): _NUMERIC_ONLY = _stdlib_socket.AI_NUMERICHOST | _stdlib_socket.AI_NUMERICSERV -async def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): +# It would be possible to @overload the return value depending on Literal[AddressFamily.INET/6], but should probably be added in typeshed first +async def getaddrinfo( + host: bytes | str | None, + port: bytes | str | int | None, + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, +) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] +]: """Look up a numeric address given a name. Arguments and return values are identical to :func:`socket.getaddrinfo`, @@ -157,7 +199,7 @@ async def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): # skip the whole thread thing, which seems worthwhile. So we try first # with the _NUMERIC_ONLY flags set, and then only spawn a thread if that # fails with EAI_NONAME: - def numeric_only_failure(exc): + def numeric_only_failure(exc: BaseException) -> bool: return ( isinstance(exc, _stdlib_socket.gaierror) and exc.errno == _stdlib_socket.EAI_NONAME @@ -199,7 +241,9 @@ def numeric_only_failure(exc): ) -async def getnameinfo(sockaddr, flags): +async def getnameinfo( + sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int +) -> tuple[str, str]: """Look up a name given a numeric address. Arguments and return values are identical to :func:`socket.getnameinfo`, @@ -218,7 +262,7 @@ async def getnameinfo(sockaddr, flags): ) -async def getprotobyname(name): +async def getprotobyname(name: str) -> int: """Look up a protocol number by name. (Rarely used.) Like :func:`socket.getprotobyname`, but async. @@ -237,7 +281,7 @@ async def getprotobyname(name): ################################################################ -def from_stdlib_socket(sock): +def from_stdlib_socket(sock: _stdlib_socket.socket) -> _SocketType: """Convert a standard library :class:`socket.socket` object into a Trio socket object. @@ -246,9 +290,14 @@ def from_stdlib_socket(sock): @_wraps(_stdlib_socket.fromfd, assigned=(), updated=()) -def fromfd(fd, family, type, proto=0): +def fromfd( + fd: SupportsIndex, + family: AddressFamily | int = _stdlib_socket.AF_INET, + type: SocketKind | int = _stdlib_socket.SOCK_STREAM, + proto: int = 0, +) -> _SocketType: """Like :func:`socket.fromfd`, but returns a Trio socket object.""" - family, type, proto = _sniff_sockopts_for_fileno(family, type, proto, fd) + family, type, proto = _sniff_sockopts_for_fileno(family, type, proto, index(fd)) return from_stdlib_socket(_stdlib_socket.fromfd(fd, family, type, proto)) @@ -257,27 +306,41 @@ def fromfd(fd, family, type, proto=0): ): @_wraps(_stdlib_socket.fromshare, assigned=(), updated=()) - def fromshare(*args, **kwargs): - return from_stdlib_socket(_stdlib_socket.fromshare(*args, **kwargs)) + def fromshare(info: bytes) -> _SocketType: + return from_stdlib_socket(_stdlib_socket.fromshare(info)) + + +if sys.platform == "win32": + FamilyT: TypeAlias = int + TypeT: TypeAlias = int + FamilyDefault = _stdlib_socket.AF_INET +else: + FamilyDefault = None + FamilyT: TypeAlias = Union[int, AddressFamily, None] + TypeT: TypeAlias = Union[_stdlib_socket.socket, int] @_wraps(_stdlib_socket.socketpair, assigned=(), updated=()) -def socketpair(*args, **kwargs): +def socketpair( + family: FamilyT = FamilyDefault, + type: TypeT = SocketKind.SOCK_STREAM, + proto: int = 0, +) -> tuple[_SocketType, _SocketType]: """Like :func:`socket.socketpair`, but returns a pair of Trio socket objects. """ - left, right = _stdlib_socket.socketpair(*args, **kwargs) + left, right = _stdlib_socket.socketpair(family, type, proto) return (from_stdlib_socket(left), from_stdlib_socket(right)) @_wraps(_stdlib_socket.socket, assigned=(), updated=()) def socket( - family=_stdlib_socket.AF_INET, - type=_stdlib_socket.SOCK_STREAM, - proto=0, - fileno=None, -): + family: AddressFamily | int = _stdlib_socket.AF_INET, + type: SocketKind | int = _stdlib_socket.SOCK_STREAM, + proto: int = 0, + fileno: int | None = None, +) -> _SocketType: """Create a new Trio socket, like :class:`socket.socket`. This function's behavior can be customized using @@ -294,14 +357,24 @@ def socket( return from_stdlib_socket(stdlib_socket) -def _sniff_sockopts_for_fileno(family, type, proto, fileno): +def _sniff_sockopts_for_fileno( + family: AddressFamily | int, + type: SocketKind | int, + proto: int, + fileno: int | None, +) -> tuple[AddressFamily | int, SocketKind | int, int]: """Correct SOCKOPTS for given fileno, falling back to provided values.""" # Wrap the raw fileno into a Python socket object # This object might have the wrong metadata, but it lets us easily call getsockopt # and then we'll throw it away and construct a new one with the correct metadata. if sys.platform != "linux": return family, type, proto - from socket import SO_DOMAIN, SO_PROTOCOL, SO_TYPE, SOL_SOCKET + from socket import ( # type: ignore[attr-defined] + SO_DOMAIN, + SO_PROTOCOL, + SO_TYPE, + SOL_SOCKET, + ) sockobj = _stdlib_socket.socket(family, type, proto, fileno=fileno) try: @@ -331,19 +404,21 @@ def _sniff_sockopts_for_fileno(family, type, proto, fileno): ) -def _make_simple_sock_method_wrapper(methname, wait_fn, maybe_avail=False): - fn = getattr(_stdlib_socket.socket, methname) - +def _make_simple_sock_method_wrapper( + fn: Callable[Concatenate[_stdlib_socket.socket, P], T], + wait_fn: Callable[[_stdlib_socket.socket], Awaitable[None]], + maybe_avail: bool = False, +) -> Callable[Concatenate[_SocketType, P], Awaitable[T]]: @_wraps(fn, assigned=("__name__",), updated=()) - async def wrapper(self, *args, **kwargs): - return await self._nonblocking_helper(fn, args, kwargs, wait_fn) + async def wrapper(self: _SocketType, *args: P.args, **kwargs: P.kwargs) -> T: + return await self._nonblocking_helper(wait_fn, fn, *args, **kwargs) - wrapper.__doc__ = f"""Like :meth:`socket.socket.{methname}`, but async. + wrapper.__doc__ = f"""Like :meth:`socket.socket.{fn.__name__}`, but async. """ if maybe_avail: wrapper.__doc__ += ( - f"Only available on platforms where :meth:`socket.socket.{methname}` is " + f"Only available on platforms where :meth:`socket.socket.{fn.__name__}` is " "available." ) return wrapper @@ -362,8 +437,21 @@ async def wrapper(self, *args, **kwargs): # local=False means that the address is being used with connect() or sendto() or # similar. # + + +# Using a TypeVar to indicate we return the same type of address appears to give errors +# when passed a union of address types. +# @overload likely works, but is extremely verbose. # NOTE: this function does not always checkpoint -async def _resolve_address_nocp(type, family, proto, *, ipv6_v6only, address, local): +async def _resolve_address_nocp( + type: int, + family: AddressFamily, + proto: int, + *, + ipv6_v6only: bool | int, + address: Address, + local: bool, +) -> Address: # Do some pre-checking (or exit early for non-IP sockets) if family == _stdlib_socket.AF_INET: if not isinstance(address, tuple) or not len(address) == 2: @@ -373,13 +461,15 @@ async def _resolve_address_nocp(type, family, proto, *, ipv6_v6only, address, lo raise ValueError( "address should be a (host, port, [flowinfo, [scopeid]]) tuple" ) - elif family == _stdlib_socket.AF_UNIX: + elif family == getattr(_stdlib_socket, "AF_UNIX"): # unwrap path-likes + assert isinstance(address, (str, bytes)) return os.fspath(address) else: return address # -- From here on we know we have IPv4 or IPV6 -- + host: str | None host, port, *_ = address # Fast path for the simple case: already-resolved IP address, # already-resolved port. This is particularly important for UDP, since @@ -417,18 +507,24 @@ async def _resolve_address_nocp(type, family, proto, *, ipv6_v6only, address, lo # The above ignored any flowid and scopeid in the passed-in address, # so restore them if present: if family == _stdlib_socket.AF_INET6: - normed = list(normed) + list_normed = list(normed) assert len(normed) == 4 + # typechecking certainly doesn't like this logic, but given just how broad + # Address is, it's quite cumbersome to write the below without type: ignore if len(address) >= 3: - normed[2] = address[2] + list_normed[2] = address[2] # type: ignore if len(address) >= 4: - normed[3] = address[3] - normed = tuple(normed) + list_normed[3] = address[3] # type: ignore + return tuple(list_normed) # type: ignore return normed +# TODO: stopping users from initializing this type should be done in a different way, +# so SocketType can be used as a type. Note that this is *far* from trivial without +# breaking subclasses of SocketType. Can maybe add abstract methods to SocketType, +# or rename _SocketType. class SocketType: - def __init__(self): + def __init__(self) -> NoReturn: raise TypeError( "SocketType is an abstract class; use trio.socket.socket if you " "want to construct a socket object" @@ -451,36 +547,80 @@ def __init__(self, sock: _stdlib_socket.socket): # Simple + portable methods and attributes ################################################################ - # NB this doesn't work because for loops don't create a scope - # for _name in [ - # ]: - # _meth = getattr(_stdlib_socket.socket, _name) - # @_wraps(_meth, assigned=("__name__", "__doc__"), updated=()) - # def _wrapped(self, *args, **kwargs): - # return getattr(self._sock, _meth)(*args, **kwargs) - # locals()[_meth] = _wrapped - # del _name, _meth, _wrapped - - _forward = { - "detach", - "get_inheritable", - "set_inheritable", - "fileno", - "getpeername", - "getsockname", - "getsockopt", - "setsockopt", - "listen", - "share", - } - - def __getattr__(self, name): - if name in self._forward: - return getattr(self._sock, name) - raise AttributeError(name) - - def __dir__(self) -> Iterable[str]: - return [*super().__dir__(), *self._forward] + # forwarded methods + def detach(self) -> int: + return self._sock.detach() + + def fileno(self) -> int: + return self._sock.fileno() + + def getpeername(self) -> Any: + return self._sock.getpeername() + + def getsockname(self) -> Any: + return self._sock.getsockname() + + @overload + def getsockopt(self, /, level: int, optname: int) -> int: + ... + + @overload + def getsockopt(self, /, level: int, optname: int, buflen: int) -> bytes: + ... + + def getsockopt( + self, /, level: int, optname: int, buflen: int | None = None + ) -> int | bytes: + if buflen is None: + return self._sock.getsockopt(level, optname) + return self._sock.getsockopt(level, optname, buflen) + + @overload + def setsockopt(self, /, level: int, optname: int, value: int | Buffer) -> None: + ... + + @overload + def setsockopt(self, /, level: int, optname: int, value: None, optlen: int) -> None: + ... + + def setsockopt( + self, + /, + level: int, + optname: int, + value: int | Buffer | None, + optlen: int | None = None, + ) -> None: + if optlen is None: + if value is None: + raise TypeError( + "invalid value for argument 'value', must not be None when specifying optlen" + ) + return self._sock.setsockopt(level, optname, value) + if value is not None: + raise TypeError( + "invalid value for argument 'value': {value!r}, must be None when specifying optlen" + ) + + # Note: PyPy may crash here due to setsockopt only supporting + # four parameters. + return self._sock.setsockopt(level, optname, value, optlen) + + def listen(self, /, backlog: int = min(_stdlib_socket.SOMAXCONN, 128)) -> None: + return self._sock.listen(backlog) + + def get_inheritable(self) -> bool: + return self._sock.get_inheritable() + + def set_inheritable(self, inheritable: bool) -> None: + return self._sock.set_inheritable(inheritable) + + if sys.platform == "win32" or ( + not TYPE_CHECKING and hasattr(_stdlib_socket.socket, "share") + ): + + def share(self, /, process_id: int) -> bytes: + return self._sock.share(process_id) def __enter__(self) -> Self: return self @@ -494,11 +634,11 @@ def __exit__( return self._sock.__exit__(exc_type, exc_value, traceback) @property - def family(self) -> _stdlib_socket.AddressFamily: + def family(self) -> AddressFamily: return self._sock.family @property - def type(self) -> _stdlib_socket.SocketKind: + def type(self) -> SocketKind: return self._sock.type @property @@ -521,7 +661,7 @@ def close(self) -> None: trio.lowlevel.notify_closing(self._sock) self._sock.close() - async def bind(self, address: tuple[object, ...] | str | bytes) -> None: + async def bind(self, address: Address) -> None: address = await self._resolve_address_nocp(address, local=True) if ( hasattr(_stdlib_socket, "AF_UNIX") @@ -530,8 +670,7 @@ async def bind(self, address: tuple[object, ...] | str | bytes) -> None: ): # Use a thread for the filesystem traversal (unless it's an # abstract domain socket) - # remove the `type: ignore` when run.sync is typed. - return await trio.to_thread.run_sync(self._sock.bind, address) # type: ignore[no-any-return] + return await trio.to_thread.run_sync(self._sock.bind, address) else: # POSIX actually says that bind can return EWOULDBLOCK and # complete asynchronously, like connect. But in practice AFAICT @@ -559,7 +698,12 @@ def is_readable(self) -> bool: async def wait_writable(self) -> None: await _core.wait_writable(self._sock) - async def _resolve_address_nocp(self, address, *, local): + async def _resolve_address_nocp( + self, + address: Address, + *, + local: bool, + ) -> Address: if self.family == _stdlib_socket.AF_INET6: ipv6_v6only = self._sock.getsockopt( _stdlib_socket.IPPROTO_IPV6, _stdlib_socket.IPV6_V6ONLY @@ -575,7 +719,19 @@ async def _resolve_address_nocp(self, address, *, local): local=local, ) - async def _nonblocking_helper(self, fn, args, kwargs, wait_fn): + # args and kwargs must be starred, otherwise pyright complains: + # '"args" member of ParamSpec is valid only when used with *args parameter' + # '"kwargs" member of ParamSpec is valid only when used with **kwargs parameter' + # wait_fn and fn must also be first in the signature + # 'Keyword parameter cannot appear in signature after ParamSpec args parameter' + + async def _nonblocking_helper( + self, + wait_fn: Callable[[_stdlib_socket.socket], Awaitable[None]], + fn: Callable[Concatenate[_stdlib_socket.socket, P], T], + *args: P.args, + **kwargs: P.kwargs, + ) -> T: # We have to reconcile two conflicting goals: # - We want to make it look like we always blocked in doing these # operations. The obvious way is to always do an IO wait before @@ -611,9 +767,11 @@ async def _nonblocking_helper(self, fn, args, kwargs, wait_fn): # accept ################################################################ - _accept = _make_simple_sock_method_wrapper("accept", _core.wait_readable) + _accept = _make_simple_sock_method_wrapper( + _stdlib_socket.socket.accept, _core.wait_readable + ) - async def accept(self): + async def accept(self) -> tuple[_SocketType, object]: """Like :meth:`socket.socket.accept`, but async.""" sock, addr = await self._accept() return from_stdlib_socket(sock), addr @@ -622,7 +780,7 @@ async def accept(self): # connect ################################################################ - async def connect(self, address): + async def connect(self, address: Address) -> None: # nonblocking connect is weird -- you call it to start things # off, then the socket becomes writable as a completion # notification. This means it isn't really cancellable... we close the @@ -690,38 +848,71 @@ async def connect(self, address): # Okay, the connect finished, but it might have failed: err = self._sock.getsockopt(_stdlib_socket.SOL_SOCKET, _stdlib_socket.SO_ERROR) if err != 0: - raise OSError(err, f"Error connecting to {address}: {os.strerror(err)}") + raise OSError(err, f"Error connecting to {address!r}: {os.strerror(err)}") ################################################################ # recv ################################################################ + # Not possible to typecheck with a Callable (due to DefaultArg), nor with a + # callback Protocol (https://github.com/python/typing/discussions/1040) + # but this seems to work. If not explicitly defined then pyright --verifytypes will + # complain about AmbiguousType if TYPE_CHECKING: - async def recv(self, buffersize: int, flags: int = 0) -> bytes: + def recv(__self, __buflen: int, __flags: int = 0) -> Awaitable[bytes]: ... - else: - recv = _make_simple_sock_method_wrapper("recv", _core.wait_readable) + # _make_simple_sock_method_wrapper is typed, so this checks that the above is correct + # this requires that we refrain from using `/` to specify pos-only + # args, or mypy thinks the signature differs from typeshed. + recv = _make_simple_sock_method_wrapper( # noqa: F811 + _stdlib_socket.socket.recv, _core.wait_readable + ) ################################################################ # recv_into ################################################################ - recv_into = _make_simple_sock_method_wrapper("recv_into", _core.wait_readable) + if TYPE_CHECKING: + + def recv_into( + __self, buffer: Buffer, nbytes: int = 0, flags: int = 0 + ) -> Awaitable[int]: + ... + + recv_into = _make_simple_sock_method_wrapper( # noqa: F811 + _stdlib_socket.socket.recv_into, _core.wait_readable + ) ################################################################ # recvfrom ################################################################ - recvfrom = _make_simple_sock_method_wrapper("recvfrom", _core.wait_readable) + if TYPE_CHECKING: + # return type of socket.socket.recvfrom in typeshed is tuple[bytes, Any] + def recvfrom( + __self, __bufsize: int, __flags: int = 0 + ) -> Awaitable[tuple[bytes, Address]]: + ... + + recvfrom = _make_simple_sock_method_wrapper( # noqa: F811 + _stdlib_socket.socket.recvfrom, _core.wait_readable + ) ################################################################ # recvfrom_into ################################################################ - recvfrom_into = _make_simple_sock_method_wrapper( - "recvfrom_into", _core.wait_readable + if TYPE_CHECKING: + # return type of socket.socket.recvfrom_into in typeshed is tuple[bytes, Any] + def recvfrom_into( + __self, buffer: Buffer, nbytes: int = 0, flags: int = 0 + ) -> Awaitable[tuple[int, Address]]: + ... + + recvfrom_into = _make_simple_sock_method_wrapper( # noqa: F811 + _stdlib_socket.socket.recvfrom_into, _core.wait_readable ) ################################################################ @@ -729,8 +920,15 @@ async def recv(self, buffersize: int, flags: int = 0) -> bytes: ################################################################ if hasattr(_stdlib_socket.socket, "recvmsg"): - recvmsg = _make_simple_sock_method_wrapper( - "recvmsg", _core.wait_readable, maybe_avail=True + if TYPE_CHECKING: + + def recvmsg( + __self, __bufsize: int, __ancbufsize: int = 0, __flags: int = 0 + ) -> Awaitable[tuple[bytes, list[tuple[int, int, bytes]], int, Any]]: + ... + + recvmsg = _make_simple_sock_method_wrapper( # noqa: F811 + _stdlib_socket.socket.recvmsg, _core.wait_readable, maybe_avail=True ) ################################################################ @@ -738,29 +936,58 @@ async def recv(self, buffersize: int, flags: int = 0) -> bytes: ################################################################ if hasattr(_stdlib_socket.socket, "recvmsg_into"): - recvmsg_into = _make_simple_sock_method_wrapper( - "recvmsg_into", _core.wait_readable, maybe_avail=True + if TYPE_CHECKING: + + def recvmsg_into( + __self, + __buffers: Iterable[Buffer], + __ancbufsize: int = 0, + __flags: int = 0, + ) -> Awaitable[tuple[int, list[tuple[int, int, bytes]], int, Any]]: + ... + + recvmsg_into = _make_simple_sock_method_wrapper( # noqa: F811 + _stdlib_socket.socket.recvmsg_into, _core.wait_readable, maybe_avail=True ) ################################################################ # send ################################################################ - send = _make_simple_sock_method_wrapper("send", _core.wait_writable) + if TYPE_CHECKING: + + def send(__self, __bytes: Buffer, __flags: int = 0) -> Awaitable[int]: + ... + + send = _make_simple_sock_method_wrapper( # noqa: F811 + _stdlib_socket.socket.send, _core.wait_writable + ) ################################################################ # sendto ################################################################ - @_wraps(_stdlib_socket.socket.sendto, assigned=(), updated=()) - async def sendto(self, *args): + @overload + async def sendto( + self, __data: Buffer, __address: tuple[Any, ...] | str | Buffer + ) -> int: + ... + + @overload + async def sendto( + self, __data: Buffer, __flags: int, __address: tuple[Any, ...] | str | Buffer + ) -> int: + ... + + @_wraps(_stdlib_socket.socket.sendto, assigned=(), updated=()) # type: ignore[misc] + async def sendto(self, *args: Any) -> int: """Similar to :meth:`socket.socket.sendto`, but async.""" # args is: data[, flags], address) # and kwargs are not accepted - args = list(args) - args[-1] = await self._resolve_address_nocp(args[-1], local=False) + args_list = list(args) + args_list[-1] = await self._resolve_address_nocp(args[-1], local=False) return await self._nonblocking_helper( - _stdlib_socket.socket.sendto, args, {}, _core.wait_writable + _core.wait_writable, _stdlib_socket.socket.sendto, *args_list ) ################################################################ @@ -772,20 +999,28 @@ async def sendto(self, *args): ): @_wraps(_stdlib_socket.socket.sendmsg, assigned=(), updated=()) - async def sendmsg(self, *args): + async def sendmsg( + self, + __buffers: Iterable[Buffer], + __ancdata: Iterable[tuple[int, int, Buffer]] = (), + __flags: int = 0, + __address: Address | None = None, + ) -> int: """Similar to :meth:`socket.socket.sendmsg`, but async. Only available on platforms where :meth:`socket.socket.sendmsg` is available. """ - # args is: buffers[, ancdata[, flags[, address]]] - # and kwargs are not accepted - if len(args) == 4 and args[-1] is not None: - args = list(args) - args[-1] = await self._resolve_address_nocp(args[-1], local=False) + if __address is not None: + __address = await self._resolve_address_nocp(__address, local=False) return await self._nonblocking_helper( - _stdlib_socket.socket.sendmsg, args, {}, _core.wait_writable + _core.wait_writable, + _stdlib_socket.socket.sendmsg, + __buffers, + __ancdata, + __flags, + __address, ) ################################################################ diff --git a/trio/_sync.py b/trio/_sync.py index 5a7f240d5e..bd2122858e 100644 --- a/trio/_sync.py +++ b/trio/_sync.py @@ -8,7 +8,7 @@ import trio from . import _core -from ._core import ParkingLot, enable_ki_protection +from ._core import Abort, ParkingLot, RaiseCancelT, enable_ki_protection from ._util import Final if TYPE_CHECKING: @@ -87,7 +87,7 @@ async def wait(self) -> None: task = _core.current_task() self._tasks.add(task) - def abort_fn(_): + def abort_fn(_: RaiseCancelT) -> Abort: self._tasks.remove(task) return _core.Abort.SUCCEEDED @@ -143,10 +143,13 @@ class CapacityLimiterStatistics: borrowed_tokens: int = attr.ib() total_tokens: int | float = attr.ib() - borrowers: list[Task] = attr.ib() + borrowers: list[Task | object] = attr.ib() tasks_waiting: int = attr.ib() +# Can be a generic type with a default of Task if/when PEP 696 is released +# and implemented in type checkers. Making it fully generic would currently +# introduce a lot of unnecessary hassle. class CapacityLimiter(AsyncContextManagerMixin, metaclass=Final): """An object for controlling access to a resource with limited capacity. @@ -204,9 +207,9 @@ class CapacityLimiter(AsyncContextManagerMixin, metaclass=Final): # total_tokens would ideally be int|Literal[math.inf] - but that's not valid typing def __init__(self, total_tokens: int | float): self._lot = ParkingLot() - self._borrowers: set[Task] = set() + self._borrowers: set[Task | object] = set() # Maps tasks attempting to acquire -> borrower, to handle on-behalf-of - self._pending_borrowers: dict[Task, Task] = {} + self._pending_borrowers: dict[Task, Task | object] = {} # invoke the property setter for validation self.total_tokens: int | float = total_tokens assert self._total_tokens == total_tokens @@ -268,7 +271,7 @@ def acquire_nowait(self) -> None: self.acquire_on_behalf_of_nowait(trio.lowlevel.current_task()) @enable_ki_protection - def acquire_on_behalf_of_nowait(self, borrower: Task) -> None: + def acquire_on_behalf_of_nowait(self, borrower: Task | object) -> None: """Borrow a token from the sack on behalf of ``borrower``, without blocking. @@ -307,7 +310,7 @@ async def acquire(self) -> None: await self.acquire_on_behalf_of(trio.lowlevel.current_task()) @enable_ki_protection - async def acquire_on_behalf_of(self, borrower: Task) -> None: + async def acquire_on_behalf_of(self, borrower: Task | object) -> None: """Borrow a token from the sack on behalf of ``borrower``, blocking if necessary. @@ -347,7 +350,7 @@ def release(self) -> None: self.release_on_behalf_of(trio.lowlevel.current_task()) @enable_ki_protection - def release_on_behalf_of(self, borrower: Task) -> None: + def release_on_behalf_of(self, borrower: Task | object) -> None: """Put a token back into the sack on behalf of ``borrower``. Raises: diff --git a/trio/_tests/test_socket.py b/trio/_tests/test_socket.py index e559b98240..e9baff436a 100644 --- a/trio/_tests/test_socket.py +++ b/trio/_tests/test_socket.py @@ -2,7 +2,7 @@ import inspect import os import socket as stdlib_socket -import sys as _sys +import sys import tempfile import attr @@ -277,7 +277,7 @@ async def test_socket_v6(): assert s.family == tsocket.AF_INET6 -@pytest.mark.skipif(not _sys.platform == "linux", reason="linux only") +@pytest.mark.skipif(not sys.platform == "linux", reason="linux only") async def test_sniff_sockopts(): from socket import AF_INET, AF_INET6, SOCK_DGRAM, SOCK_STREAM @@ -360,6 +360,26 @@ async def test_SocketType_basics(): sock.close() +async def test_SocketType_setsockopt(): + sock = tsocket.socket() + with sock as _: + # specifying optlen. Not supported on pypy, and I couldn't find + # valid calls on darwin or win32. + if hasattr(tsocket, "SO_BINDTODEVICE"): + sock.setsockopt(tsocket.SOL_SOCKET, tsocket.SO_BINDTODEVICE, None, 0) + + # specifying value + sock.setsockopt(tsocket.IPPROTO_TCP, tsocket.TCP_NODELAY, False) + + # specifying both + with pytest.raises(TypeError, match="invalid value for argument 'value'"): + sock.setsockopt(tsocket.IPPROTO_TCP, tsocket.TCP_NODELAY, False, 5) # type: ignore[call-overload] + + # specifying neither + with pytest.raises(TypeError, match="invalid value for argument 'value'"): + sock.setsockopt(tsocket.IPPROTO_TCP, tsocket.TCP_NODELAY, None) # type: ignore[call-overload] + + async def test_SocketType_dup(): a, b = tsocket.socketpair() with a, b: diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index d08c03060c..60132e07fd 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.888, + "completenessScore": 0.9072, "exportedSymbolCounts": { "withAmbiguousType": 1, - "withKnownType": 555, - "withUnknownType": 69 + "withKnownType": 567, + "withUnknownType": 57 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -46,18 +46,13 @@ ], "otherSymbolCounts": { "withAmbiguousType": 3, - "withKnownType": 529, - "withUnknownType": 102 + "withKnownType": 574, + "withUnknownType": 76 }, "packageName": "trio", "symbols": [ "trio.__deprecated_attributes__", - "trio._abc.SocketFactory.socket", "trio._core._entry_queue.TrioToken.run_sync_soon", - "trio._core._local.RunVar.__repr__", - "trio._core._local.RunVar.get", - "trio._core._local.RunVar.reset", - "trio._core._local.RunVar.set", "trio._core._mock_clock.MockClock.jump", "trio._core._run.Nursery.start", "trio._core._run.Nursery.start_soon", @@ -72,24 +67,10 @@ "trio._core._unbounded_queue.UnboundedQueue.qsize", "trio._core._unbounded_queue.UnboundedQueue.statistics", "trio._dtls.DTLSChannel.__init__", - "trio._dtls.DTLSEndpoint.__init__", "trio._dtls.DTLSEndpoint.serve", - "trio._highlevel_socket.SocketListener.__init__", - "trio._highlevel_socket.SocketStream.__init__", "trio._highlevel_socket.SocketStream.getsockopt", "trio._highlevel_socket.SocketStream.send_all", "trio._highlevel_socket.SocketStream.setsockopt", - "trio._socket._SocketType.__getattr__", - "trio._socket._SocketType.accept", - "trio._socket._SocketType.connect", - "trio._socket._SocketType.recv_into", - "trio._socket._SocketType.recvfrom", - "trio._socket._SocketType.recvfrom_into", - "trio._socket._SocketType.recvmsg", - "trio._socket._SocketType.recvmsg_into", - "trio._socket._SocketType.send", - "trio._socket._SocketType.sendmsg", - "trio._socket._SocketType.sendto", "trio._ssl.SSLListener.__init__", "trio._ssl.SSLListener.accept", "trio._ssl.SSLListener.aclose", @@ -148,15 +129,6 @@ "trio.serve_listeners", "trio.serve_ssl_over_tcp", "trio.serve_tcp", - "trio.socket.from_stdlib_socket", - "trio.socket.fromfd", - "trio.socket.getaddrinfo", - "trio.socket.getnameinfo", - "trio.socket.getprotobyname", - "trio.socket.set_custom_hostname_resolver", - "trio.socket.set_custom_socket_factory", - "trio.socket.socket", - "trio.socket.socketpair", "trio.testing._memory_streams.MemoryReceiveStream.__init__", "trio.testing._memory_streams.MemoryReceiveStream.aclose", "trio.testing._memory_streams.MemoryReceiveStream.close", diff --git a/trio/_threads.py b/trio/_threads.py index 807212e0f9..3fbab05750 100644 --- a/trio/_threads.py +++ b/trio/_threads.py @@ -1,16 +1,19 @@ +from __future__ import annotations + import contextvars import functools import inspect import queue as stdlib_queue import threading from itertools import count -from typing import Optional +from typing import Any, Callable, Optional, TypeVar import attr import outcome from sniffio import current_async_library_cvar import trio +from trio._core._traps import RaiseCancelT from ._core import ( RunVar, @@ -22,10 +25,12 @@ from ._sync import CapacityLimiter from ._util import coroutine_or_error +T = TypeVar("T") + # Global due to Threading API, thread local storage for trio token TOKEN_LOCAL = threading.local() -_limiter_local = RunVar("limiter") +_limiter_local: RunVar[CapacityLimiter] = RunVar("limiter") # I pulled this number out of the air; it isn't based on anything. Probably we # should make some kind of measurements to pick a good value. DEFAULT_LIMIT = 40 @@ -59,8 +64,12 @@ class ThreadPlaceholder: @enable_ki_protection async def to_thread_run_sync( - sync_fn, *args, thread_name: Optional[str] = None, cancellable=False, limiter=None -): + sync_fn: Callable[..., T], + *args: Any, + thread_name: Optional[str] = None, + cancellable: bool = False, + limiter: CapacityLimiter | None = None, +) -> T: """Convert a blocking operation into an async operation using a thread. These two lines are equivalent:: @@ -152,7 +161,7 @@ async def to_thread_run_sync( # Holds a reference to the task that's blocked in this function waiting # for the result – or None if this function was cancelled and we should # discard the result. - task_register = [trio.lowlevel.current_task()] + task_register: list[trio.lowlevel.Task | None] = [trio.lowlevel.current_task()] name = f"trio.to_thread.run_sync-{next(_thread_counter)}" placeholder = ThreadPlaceholder(name) @@ -217,14 +226,15 @@ def deliver_worker_fn_result(result): limiter.release_on_behalf_of(placeholder) raise - def abort(_): + def abort(_: RaiseCancelT) -> trio.lowlevel.Abort: if cancellable: task_register[0] = None return trio.lowlevel.Abort.SUCCEEDED else: return trio.lowlevel.Abort.FAILED - return await trio.lowlevel.wait_task_rescheduled(abort) + # wait_task_rescheduled return value cannot be typed + return await trio.lowlevel.wait_task_rescheduled(abort) # type: ignore[no-any-return] def _run_fn_as_system_task(cb, fn, *args, context, trio_token=None): diff --git a/trio/socket.py b/trio/socket.py index a9e276c782..f6aebb6a6e 100644 --- a/trio/socket.py +++ b/trio/socket.py @@ -35,6 +35,7 @@ # import the overwrites from ._socket import ( SocketType as SocketType, + _SocketType as _SocketType, from_stdlib_socket as from_stdlib_socket, fromfd as fromfd, getaddrinfo as getaddrinfo, From a1f57daaa9c2ad5e8807014500b0d173216adbcf Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Mon, 31 Jul 2023 09:42:44 +1000 Subject: [PATCH 111/162] Add types to some internal modules (`_util`, `_deprecate`, `_ki`) (#2719) --- .coveragerc | 3 ++ pyproject.toml | 14 ++++++- trio/__init__.py | 5 ++- trio/_core/_ki.py | 60 +++++++++++++++++++----------- trio/_deprecate.py | 61 +++++++++++++++++++++++-------- trio/_tests/test_exports.py | 7 ++++ trio/_tests/test_util.py | 69 +++++++++++++++++++++++++++++++++++ trio/_tests/verify_types.json | 16 +++----- trio/_util.py | 66 ++++++++++++++++++++++----------- trio/tests.py | 4 +- 10 files changed, 230 insertions(+), 75 deletions(-) diff --git a/.coveragerc b/.coveragerc index d577aa8adf..431a02971b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -21,11 +21,14 @@ exclude_lines = abc.abstractmethod if TYPE_CHECKING: if _t.TYPE_CHECKING: + if t.TYPE_CHECKING: @overload partial_branches = pragma: no branch if not TYPE_CHECKING: if not _t.TYPE_CHECKING: + if not t.TYPE_CHECKING: if .* or not TYPE_CHECKING: if .* or not _t.TYPE_CHECKING: + if .* or not t.TYPE_CHECKING: diff --git a/pyproject.toml b/pyproject.toml index 445c40e28c..73813cd58b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,19 +47,28 @@ disallow_untyped_defs = false # DO NOT use `ignore_errors`; it doesn't apply # downstream and users have to deal with them. [[tool.mypy.overrides]] +# Fully typed, enable stricter checks module = [ - "trio._socket", + "trio._abc", "trio._core._local", - "trio._sync", + "trio._deprecate", + "trio._dtls", "trio._file_io", + "trio._ki", + "trio._socket", + "trio._sync", + "trio._util", ] disallow_incomplete_defs = true disallow_untyped_defs = true +disallow_untyped_decorators = true disallow_any_generics = true disallow_any_decorated = true +disallow_any_unimported = true disallow_subclassing_any = true [[tool.mypy.overrides]] +# Needs to use Any due to some complex introspection. module = [ "trio._path", ] @@ -67,6 +76,7 @@ disallow_incomplete_defs = true disallow_untyped_defs = true #disallow_any_generics = true #disallow_any_decorated = true +disallow_any_unimported = true disallow_subclassing_any = true [tool.pytest.ini_options] diff --git a/trio/__init__.py b/trio/__init__.py index ac0687f529..be7de42cde 100644 --- a/trio/__init__.py +++ b/trio/__init__.py @@ -1,5 +1,6 @@ """Trio - A friendly Python library for async concurrency and I/O """ +from __future__ import annotations # General layout: # @@ -16,7 +17,7 @@ # Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625) # must be imported early to avoid circular import -from ._core import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED # isort: skip +from ._core import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED # isort: split # Submodules imported by default from . import abc, from_thread, lowlevel, socket, to_thread @@ -117,7 +118,7 @@ _deprecate.enable_attribute_deprecations(__name__) -__deprecated_attributes__ = { +__deprecated_attributes__: dict[str, _deprecate.DeprecatedAttribute] = { "open_process": _deprecate.DeprecatedAttribute( value=lowlevel.open_process, version="0.20.0", diff --git a/trio/_core/_ki.py b/trio/_core/_ki.py index cc05ef9177..8ae83c287a 100644 --- a/trio/_core/_ki.py +++ b/trio/_core/_ki.py @@ -3,17 +3,21 @@ import inspect import signal import sys +import types +from collections.abc import Callable from functools import wraps -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Final, TypeVar import attr from .._util import is_main_thread +RetT = TypeVar("RetT") + if TYPE_CHECKING: - from typing import Any, Callable, TypeVar + from typing_extensions import ParamSpec, TypeGuard - F = TypeVar("F", bound=Callable[..., Any]) + ArgsT = ParamSpec("ArgsT") # In ordinary single-threaded Python code, when you hit control-C, it raises # an exception and automatically does all the regular unwinding stuff. @@ -80,22 +84,22 @@ # We use this special string as a unique key into the frame locals dictionary. # The @ ensures it is not a valid identifier and can't clash with any possible # real local name. See: https://github.com/python-trio/trio/issues/469 -LOCALS_KEY_KI_PROTECTION_ENABLED = "@TRIO_KI_PROTECTION_ENABLED" +LOCALS_KEY_KI_PROTECTION_ENABLED: Final = "@TRIO_KI_PROTECTION_ENABLED" # NB: according to the signal.signal docs, 'frame' can be None on entry to # this function: -def ki_protection_enabled(frame): +def ki_protection_enabled(frame: types.FrameType | None) -> bool: while frame is not None: if LOCALS_KEY_KI_PROTECTION_ENABLED in frame.f_locals: - return frame.f_locals[LOCALS_KEY_KI_PROTECTION_ENABLED] + return bool(frame.f_locals[LOCALS_KEY_KI_PROTECTION_ENABLED]) if frame.f_code.co_name == "__del__": return True frame = frame.f_back return True -def currently_ki_protected(): +def currently_ki_protected() -> bool: r"""Check whether the calling code has :exc:`KeyboardInterrupt` protection enabled. @@ -115,29 +119,35 @@ def currently_ki_protected(): # functions decorated @async_generator are given this magic property that's a # reference to the object itself # see python-trio/async_generator/async_generator/_impl.py -def legacy_isasyncgenfunction(obj): +def legacy_isasyncgenfunction( + obj: object, +) -> TypeGuard[Callable[..., types.AsyncGeneratorType]]: return getattr(obj, "_async_gen_function", None) == id(obj) -def _ki_protection_decorator(enabled): - def decorator(fn): +def _ki_protection_decorator( + enabled: bool, +) -> Callable[[Callable[ArgsT, RetT]], Callable[ArgsT, RetT]]: + # The "ignore[return-value]" below is because the inspect functions cast away the + # original return type of fn, making it just CoroutineType[Any, Any, Any] etc. + def decorator(fn: Callable[ArgsT, RetT]) -> Callable[ArgsT, RetT]: # In some version of Python, isgeneratorfunction returns true for # coroutine functions, so we have to check for coroutine functions # first. if inspect.iscoroutinefunction(fn): @wraps(fn) - def wrapper(*args, **kwargs): + def wrapper(*args: ArgsT.args, **kwargs: ArgsT.kwargs) -> RetT: # See the comment for regular generators below coro = fn(*args, **kwargs) coro.cr_frame.f_locals[LOCALS_KEY_KI_PROTECTION_ENABLED] = enabled - return coro + return coro # type: ignore[return-value] return wrapper elif inspect.isgeneratorfunction(fn): @wraps(fn) - def wrapper(*args, **kwargs): + def wrapper(*args: ArgsT.args, **kwargs: ArgsT.kwargs) -> RetT: # It's important that we inject this directly into the # generator's locals, as opposed to setting it here and then # doing 'yield from'. The reason is, if a generator is @@ -148,23 +158,23 @@ def wrapper(*args, **kwargs): # https://bugs.python.org/issue29590 gen = fn(*args, **kwargs) gen.gi_frame.f_locals[LOCALS_KEY_KI_PROTECTION_ENABLED] = enabled - return gen + return gen # type: ignore[return-value] return wrapper elif inspect.isasyncgenfunction(fn) or legacy_isasyncgenfunction(fn): @wraps(fn) - def wrapper(*args, **kwargs): + def wrapper(*args: ArgsT.args, **kwargs: ArgsT.kwargs) -> RetT: # See the comment for regular generators above agen = fn(*args, **kwargs) agen.ag_frame.f_locals[LOCALS_KEY_KI_PROTECTION_ENABLED] = enabled - return agen + return agen # type: ignore[return-value] return wrapper else: @wraps(fn) - def wrapper(*args, **kwargs): + def wrapper(*args: ArgsT.args, **kwargs: ArgsT.kwargs) -> RetT: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = enabled return fn(*args, **kwargs) @@ -173,10 +183,14 @@ def wrapper(*args, **kwargs): return decorator -enable_ki_protection: Callable[[F], F] = _ki_protection_decorator(True) +enable_ki_protection: Callable[ + [Callable[ArgsT, RetT]], Callable[ArgsT, RetT] +] = _ki_protection_decorator(True) enable_ki_protection.__name__ = "enable_ki_protection" -disable_ki_protection: Callable[[F], F] = _ki_protection_decorator(False) +disable_ki_protection: Callable[ + [Callable[ArgsT, RetT]], Callable[ArgsT, RetT] +] = _ki_protection_decorator(False) disable_ki_protection.__name__ = "disable_ki_protection" @@ -184,7 +198,11 @@ def wrapper(*args, **kwargs): class KIManager: handler = attr.ib(default=None) - def install(self, deliver_cb, restrict_keyboard_interrupt_to_checkpoints): + def install( + self, + deliver_cb: Callable[[], object], + restrict_keyboard_interrupt_to_checkpoints: bool, + ) -> None: assert self.handler is None if ( not is_main_thread() @@ -192,7 +210,7 @@ def install(self, deliver_cb, restrict_keyboard_interrupt_to_checkpoints): ): return - def handler(signum, frame): + def handler(signum: int, frame: types.FrameType | None) -> None: assert signum == signal.SIGINT protection_enabled = ki_protection_enabled(frame) if protection_enabled or restrict_keyboard_interrupt_to_checkpoints: diff --git a/trio/_deprecate.py b/trio/_deprecate.py index fe00192583..0a9553b854 100644 --- a/trio/_deprecate.py +++ b/trio/_deprecate.py @@ -1,10 +1,21 @@ +from __future__ import annotations + import sys import warnings +from collections.abc import Callable from functools import wraps from types import ModuleType +from typing import TYPE_CHECKING, ClassVar, TypeVar import attr +if TYPE_CHECKING: + from typing_extensions import ParamSpec + + ArgsT = ParamSpec("ArgsT") + +RetT = TypeVar("RetT") + # We want our warnings to be visible by default (at least for now), but we # also want it to be possible to override that using the -W switch. AFAICT @@ -29,17 +40,24 @@ class TrioDeprecationWarning(FutureWarning): """ -def _url_for_issue(issue): +def _url_for_issue(issue: int) -> str: return f"https://github.com/python-trio/trio/issues/{issue}" -def _stringify(thing): +def _stringify(thing: object) -> str: if hasattr(thing, "__module__") and hasattr(thing, "__qualname__"): return f"{thing.__module__}.{thing.__qualname__}" return str(thing) -def warn_deprecated(thing, version, *, issue, instead, stacklevel=2): +def warn_deprecated( + thing: object, + version: str, + *, + issue: int | None, + instead: object, + stacklevel: int = 2, +) -> None: stacklevel += 1 msg = f"{_stringify(thing)} is deprecated since Trio {version}" if instead is None: @@ -53,12 +71,14 @@ def warn_deprecated(thing, version, *, issue, instead, stacklevel=2): # @deprecated("0.2.0", issue=..., instead=...) # def ... -def deprecated(version, *, thing=None, issue, instead): - def do_wrap(fn): +def deprecated( + version: str, *, thing: object = None, issue: int | None, instead: object +) -> Callable[[Callable[ArgsT, RetT]], Callable[ArgsT, RetT]]: + def do_wrap(fn: Callable[ArgsT, RetT]) -> Callable[ArgsT, RetT]: nonlocal thing @wraps(fn) - def wrapper(*args, **kwargs): + def wrapper(*args: ArgsT.args, **kwargs: ArgsT.kwargs) -> RetT: warn_deprecated(thing, version, instead=instead, issue=issue) return fn(*args, **kwargs) @@ -87,11 +107,17 @@ def wrapper(*args, **kwargs): return do_wrap -def deprecated_alias(old_qualname, new_fn, version, *, issue): +def deprecated_alias( + old_qualname: str, + new_fn: Callable[ArgsT, RetT], + version: str, + *, + issue: int | None, +) -> Callable[ArgsT, RetT]: @deprecated(version, issue=issue, instead=new_fn) @wraps(new_fn, assigned=("__module__", "__annotations__")) - def wrapper(*args, **kwargs): - "Deprecated alias." + def wrapper(*args: ArgsT.args, **kwargs: ArgsT.kwargs) -> RetT: + """Deprecated alias.""" return new_fn(*args, **kwargs) wrapper.__qualname__ = old_qualname @@ -101,16 +127,18 @@ def wrapper(*args, **kwargs): @attr.s(frozen=True) class DeprecatedAttribute: - _not_set = object() + _not_set: ClassVar[object] = object() - value = attr.ib() - version = attr.ib() - issue = attr.ib() - instead = attr.ib(default=_not_set) + value: object = attr.ib() + version: str = attr.ib() + issue: int | None = attr.ib() + instead: object = attr.ib(default=_not_set) class _ModuleWithDeprecations(ModuleType): - def __getattr__(self, name): + __deprecated_attributes__: dict[str, DeprecatedAttribute] + + def __getattr__(self, name: str) -> object: if name in self.__deprecated_attributes__: info = self.__deprecated_attributes__[name] instead = info.instead @@ -124,9 +152,10 @@ def __getattr__(self, name): raise AttributeError(msg.format(self.__name__, name)) -def enable_attribute_deprecations(module_name): +def enable_attribute_deprecations(module_name: str) -> None: module = sys.modules[module_name] module.__class__ = _ModuleWithDeprecations + assert isinstance(module, _ModuleWithDeprecations) # Make sure that this is always defined so that # _ModuleWithDeprecations.__getattr__ can access it without jumping # through hoops or risking infinite recursion. diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index b5d0a44088..1b1e8df8da 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -1,3 +1,5 @@ +import __future__ # Regular import, not special! + import enum import functools import importlib @@ -107,6 +109,11 @@ def no_underscores(symbols): if modname == "trio": runtime_names.discard("tests") + # Ignore any __future__ feature objects, if imported under that name. + for name in __future__.all_feature_names: + if getattr(module, name, None) is getattr(__future__, name): + runtime_names.remove(name) + if tool in ("mypy", "pyright_verifytypes"): # create py.typed file py_typed_path = Path(trio.__file__).parent / "py.typed" diff --git a/trio/_tests/test_util.py b/trio/_tests/test_util.py index a4df6d35b4..1ab6f825de 100644 --- a/trio/_tests/test_util.py +++ b/trio/_tests/test_util.py @@ -1,5 +1,6 @@ import signal import sys +import types import pytest @@ -15,6 +16,7 @@ Final, NoPublicConstructor, coroutine_or_error, + fixup_module_metadata, generic_function, is_main_thread, signal_raise, @@ -192,3 +194,70 @@ class SubClass(SpecialClass): # Private constructor should not raise assert isinstance(SpecialClass._create(), SpecialClass) + + +def test_fixup_module_metadata(): + # Ignores modules not in the trio.X tree. + non_trio_module = types.ModuleType("not_trio") + non_trio_module.some_func = lambda: None + non_trio_module.some_func.__name__ = "some_func" + non_trio_module.some_func.__qualname__ = "some_func" + + fixup_module_metadata(non_trio_module.__name__, vars(non_trio_module)) + + assert non_trio_module.some_func.__name__ == "some_func" + assert non_trio_module.some_func.__qualname__ == "some_func" + + # Bulild up a fake module to test. Just use lambdas since all we care about is the names. + mod = types.ModuleType("trio._somemodule_impl") + mod.some_func = lambda: None + mod.some_func.__name__ = "_something_else" + mod.some_func.__qualname__ = "_something_else" + + # No __module__ means it's unchanged. + mod.not_funclike = types.SimpleNamespace() + mod.not_funclike.__name__ = "not_funclike" + + # Check __qualname__ being absent works. + mod.only_has_name = types.SimpleNamespace() + mod.only_has_name.__module__ = "trio._somemodule_impl" + mod.only_has_name.__name__ = "only_name" + + # Underscored names are unchanged. + mod._private = lambda: None + mod._private.__module__ = "trio._somemodule_impl" + mod._private.__name__ = mod._private.__qualname__ = "_private" + + # We recurse into classes. + mod.SomeClass = type( + "SomeClass", + (), + { + "__init__": lambda self: None, + "method": lambda self: None, + }, + ) + mod.SomeClass.recursion = mod.SomeClass # Reference loop is fine. + + fixup_module_metadata("trio.somemodule", vars(mod)) + assert mod.some_func.__name__ == "some_func" + assert mod.some_func.__module__ == "trio.somemodule" + assert mod.some_func.__qualname__ == "some_func" + + assert mod.not_funclike.__name__ == "not_funclike" + assert mod._private.__name__ == "_private" + assert mod._private.__module__ == "trio._somemodule_impl" + assert mod._private.__qualname__ == "_private" + + assert mod.only_has_name.__name__ == "only_has_name" + assert mod.only_has_name.__module__ == "trio.somemodule" + assert not hasattr(mod.only_has_name, "__qualname__") + + assert mod.SomeClass.method.__name__ == "method" + assert mod.SomeClass.method.__module__ == "trio.somemodule" + assert mod.SomeClass.method.__qualname__ == "SomeClass.method" + # Make coverage happy. + non_trio_module.some_func() + mod.some_func() + mod._private() + mod.SomeClass().method() diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index 60132e07fd..4d632d2589 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9072, + "completenessScore": 0.9104, "exportedSymbolCounts": { - "withAmbiguousType": 1, - "withKnownType": 567, - "withUnknownType": 57 + "withAmbiguousType": 0, + "withKnownType": 569, + "withUnknownType": 56 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -46,12 +46,11 @@ ], "otherSymbolCounts": { "withAmbiguousType": 3, - "withKnownType": 574, - "withUnknownType": 76 + "withKnownType": 576, + "withUnknownType": 74 }, "packageName": "trio", "symbols": [ - "trio.__deprecated_attributes__", "trio._core._entry_queue.TrioToken.run_sync_soon", "trio._core._mock_clock.MockClock.jump", "trio._core._run.Nursery.start", @@ -85,10 +84,8 @@ "trio._ssl.SSLStream.transport_stream", "trio._ssl.SSLStream.unwrap", "trio._ssl.SSLStream.wait_send_all_might_not_block", - "trio._subprocess.Process.__aenter__", "trio._subprocess.Process.__init__", "trio._subprocess.Process.__repr__", - "trio._subprocess.Process.aclose", "trio._subprocess.Process.args", "trio._subprocess.Process.encoding", "trio._subprocess.Process.errors", @@ -107,7 +104,6 @@ "trio.lowlevel.current_root_task", "trio.lowlevel.current_statistics", "trio.lowlevel.current_trio_token", - "trio.lowlevel.currently_ki_protected", "trio.lowlevel.notify_closing", "trio.lowlevel.open_process", "trio.lowlevel.permanently_detach_coroutine_object", diff --git a/trio/_util.py b/trio/_util.py index a87f1fc02c..ba56c18385 100644 --- a/trio/_util.py +++ b/trio/_util.py @@ -1,7 +1,7 @@ # Little utilities we use internally from __future__ import annotations -import collections +import collections.abc import inspect import os import signal @@ -9,15 +9,28 @@ import typing as t from abc import ABCMeta from functools import update_wrapper -from types import TracebackType +from types import AsyncGeneratorType, TracebackType import trio CallT = t.TypeVar("CallT", bound=t.Callable[..., t.Any]) +T = t.TypeVar("T") +RetT = t.TypeVar("RetT") + +if t.TYPE_CHECKING: + from typing_extensions import ParamSpec, Self + + ArgsT = ParamSpec("ArgsT") + + +if t.TYPE_CHECKING: + # Don't type check the implementation below, pthread_kill does not exist on Windows. + def signal_raise(signum: int) -> None: + ... # Equivalent to the C function raise(), which Python doesn't wrap -if os.name == "nt": +elif os.name == "nt": # On Windows, os.kill exists but is really weird. # # If you give it CTRL_C_EVENT or CTRL_BREAK_EVENT, it tries to deliver @@ -61,7 +74,7 @@ signal_raise = getattr(_lib, "raise") else: - def signal_raise(signum): + def signal_raise(signum: int) -> None: signal.pthread_kill(threading.get_ident(), signum) @@ -73,7 +86,7 @@ def signal_raise(signum): # Trying to use signal out of the main thread will fail, so we can then # reliably check if this is the main thread without relying on a # potentially modified threading. -def is_main_thread(): +def is_main_thread() -> bool: """Attempt to reliably check if we are in the main thread.""" try: signal.signal(signal.SIGINT, signal.getsignal(signal.SIGINT)) @@ -86,8 +99,11 @@ def is_main_thread(): # Call the function and get the coroutine object, while giving helpful # errors for common mistakes. Returns coroutine object. ###### -def coroutine_or_error(async_fn, *args): - def _return_value_looks_like_wrong_library(value): +# TODO: Use TypeVarTuple here. +def coroutine_or_error( + async_fn: t.Callable[..., t.Awaitable[RetT]], *args: t.Any +) -> t.Awaitable[RetT]: + def _return_value_looks_like_wrong_library(value: object) -> bool: # Returned by legacy @asyncio.coroutine functions, which includes # a surprising proportion of asyncio builtins. if isinstance(value, collections.abc.Generator): @@ -183,11 +199,11 @@ class ConflictDetector: """ - def __init__(self, msg): + def __init__(self, msg: str) -> None: self._msg = msg self._held = False - def __enter__(self): + def __enter__(self) -> None: if self._held: raise trio.BusyResourceError(self._msg) else: @@ -224,10 +240,12 @@ def decorator(func: CallT) -> CallT: return decorator -def fixup_module_metadata(module_name, namespace): - seen_ids = set() +def fixup_module_metadata( + module_name: str, namespace: collections.abc.Mapping[str, object] +) -> None: + seen_ids: set[int] = set() - def fix_one(qualname, name, obj): + def fix_one(qualname: str, name: str, obj: object) -> None: # avoid infinite recursion (relevant when using # typing.Generic, for example) if id(obj) in seen_ids: @@ -242,7 +260,8 @@ def fix_one(qualname, name, obj): # rewriting these. if hasattr(obj, "__name__") and "." not in obj.__name__: obj.__name__ = name - obj.__qualname__ = qualname + if hasattr(obj, "__qualname__"): + obj.__qualname__ = qualname if isinstance(obj, type): for attr_name, attr_value in obj.__dict__.items(): fix_one(objname + "." + attr_name, attr_name, attr_value) @@ -252,7 +271,10 @@ def fix_one(qualname, name, obj): fix_one(objname, objname, obj) -class generic_function: +# We need ParamSpec to type this "properly", but that requires a runtime typing_extensions import +# to use as a class base. This is only used at runtime and isn't correct for type checkers anyway, +# so don't bother. +class generic_function(t.Generic[RetT]): """Decorator that makes a function indexable, to communicate non-inferrable generic type parameters to a static type checker. @@ -269,14 +291,14 @@ def open_memory_channel(max_buffer_size: int) -> Tuple[ but at least it becomes possible to write those. """ - def __init__(self, fn): + def __init__(self, fn: t.Callable[..., RetT]) -> None: update_wrapper(self, fn) self._fn = fn - def __call__(self, *args, **kwargs): + def __call__(self, *args: t.Any, **kwargs: t.Any) -> RetT: return self._fn(*args, **kwargs) - def __getitem__(self, _): + def __getitem__(self, subscript: object) -> Self: return self @@ -296,7 +318,10 @@ class SomeClass(metaclass=Final): """ def __new__( - cls, name: str, bases: tuple[type, ...], cls_namespace: dict[str, object] + cls, + name: str, + bases: tuple[type, ...], + cls_namespace: dict[str, object], ) -> Final: for base in bases: if isinstance(base, Final): @@ -307,9 +332,6 @@ def __new__( return super().__new__(cls, name, bases, cls_namespace) -T = t.TypeVar("T") - - class NoPublicConstructor(Final): """Metaclass that enforces a class to be final (i.e., subclass not allowed) and ensures a private constructor. @@ -338,7 +360,7 @@ def _create(cls: t.Type[T], *args: object, **kwargs: object) -> T: return super().__call__(*args, **kwargs) # type: ignore -def name_asyncgen(agen): +def name_asyncgen(agen: AsyncGeneratorType[object, t.NoReturn]) -> str: """Return the fully-qualified name of the async generator function that produced the async generator iterator *agen*. """ diff --git a/trio/tests.py b/trio/tests.py index 573a076da8..4ffb583a3a 100644 --- a/trio/tests.py +++ b/trio/tests.py @@ -9,7 +9,7 @@ "trio.tests", "0.22.1", instead="trio._tests", - issue="https://github.com/python-trio/trio/issues/274", + issue=274, ) @@ -23,7 +23,7 @@ def __getattr__(self, attr: str) -> Any: f"trio.tests.{attr}", "0.22.1", instead=f"trio._tests.{attr}", - issue="https://github.com/python-trio/trio/issues/274", + issue=274, ) # needed to access e.g. trio._tests.tools, although pytest doesn't need it From e6d4f74a5961694da8cbc992d6124ab459b45ca6 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Mon, 31 Jul 2023 12:19:54 +0000 Subject: [PATCH 112/162] typecheck trio/_core/_unbounded_queue (#2717) * typecheck trio/_core/_unbounded_queue --------- Co-authored-by: EXPLOSION --- pyproject.toml | 1 + trio/_core/__init__.py | 2 +- trio/_core/_unbounded_queue.py | 65 ++++++++++++++++++++-------------- trio/_tests/verify_types.json | 18 +++------- trio/lowlevel.py | 1 + 5 files changed, 46 insertions(+), 41 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 73813cd58b..1cb552dace 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,7 @@ disallow_untyped_defs = false module = [ "trio._abc", "trio._core._local", + "trio._core._unbounded_queue", "trio._deprecate", "trio._dtls", "trio._file_io", diff --git a/trio/_core/__init__.py b/trio/_core/__init__.py index aa898fffe0..8e42d2743b 100644 --- a/trio/_core/__init__.py +++ b/trio/_core/__init__.py @@ -62,7 +62,7 @@ temporarily_detach_coroutine_object, wait_task_rescheduled, ) -from ._unbounded_queue import UnboundedQueue +from ._unbounded_queue import UnboundedQueue, UnboundedQueueStatistics # Windows imports if sys.platform == "win32": diff --git a/trio/_core/_unbounded_queue.py b/trio/_core/_unbounded_queue.py index 9c747749b4..1b7dea095f 100644 --- a/trio/_core/_unbounded_queue.py +++ b/trio/_core/_unbounded_queue.py @@ -1,17 +1,36 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Generic, TypeVar + import attr from .. import _core from .._deprecate import deprecated from .._util import Final +T = TypeVar("T") + +if TYPE_CHECKING: + from typing_extensions import Self + -@attr.s(frozen=True) -class _UnboundedQueueStats: - qsize = attr.ib() - tasks_waiting = attr.ib() +@attr.s(slots=True, frozen=True) +class UnboundedQueueStatistics: + """An object containing debugging information. + Currently the following fields are defined: + + * ``qsize``: The number of items currently in the queue. + * ``tasks_waiting``: The number of tasks blocked on this queue's + :meth:`get_batch` method. + + """ -class UnboundedQueue(metaclass=Final): + qsize: int = attr.ib() + tasks_waiting: int = attr.ib() + + +class UnboundedQueue(Generic[T], metaclass=Final): """An unbounded queue suitable for certain unusual forms of inter-task communication. @@ -47,20 +66,20 @@ class UnboundedQueue(metaclass=Final): thing="trio.lowlevel.UnboundedQueue", instead="trio.open_memory_channel(math.inf)", ) - def __init__(self): + def __init__(self) -> None: self._lot = _core.ParkingLot() - self._data = [] + self._data: list[T] = [] # used to allow handoff from put to the first task in the lot self._can_get = False - def __repr__(self): + def __repr__(self) -> str: return f"" - def qsize(self): + def qsize(self) -> int: """Returns the number of items currently in the queue.""" return len(self._data) - def empty(self): + def empty(self) -> bool: """Returns True if the queue is empty, False otherwise. There is some subtlety to interpreting this method's return value: see @@ -70,7 +89,7 @@ def empty(self): return not self._data @_core.enable_ki_protection - def put_nowait(self, obj): + def put_nowait(self, obj: T) -> None: """Put an object into the queue, without blocking. This always succeeds, because the queue is unbounded. We don't provide @@ -88,13 +107,13 @@ def put_nowait(self, obj): self._can_get = True self._data.append(obj) - def _get_batch_protected(self): + def _get_batch_protected(self) -> list[T]: data = self._data.copy() self._data.clear() self._can_get = False return data - def get_batch_nowait(self): + def get_batch_nowait(self) -> list[T]: """Attempt to get the next batch from the queue, without blocking. Returns: @@ -110,7 +129,7 @@ def get_batch_nowait(self): raise _core.WouldBlock return self._get_batch_protected() - async def get_batch(self): + async def get_batch(self) -> list[T]: """Get the next batch from the queue, blocking as necessary. Returns: @@ -128,22 +147,14 @@ async def get_batch(self): finally: await _core.cancel_shielded_checkpoint() - def statistics(self): - """Return an object containing debugging information. - - Currently the following fields are defined: - - * ``qsize``: The number of items currently in the queue. - * ``tasks_waiting``: The number of tasks blocked on this queue's - :meth:`get_batch` method. - - """ - return _UnboundedQueueStats( + def statistics(self) -> UnboundedQueueStatistics: + """Return an :class:`UnboundedQueueStatistics` object containing debugging information.""" + return UnboundedQueueStatistics( qsize=len(self._data), tasks_waiting=self._lot.statistics().tasks_waiting ) - def __aiter__(self): + def __aiter__(self) -> Self: return self - async def __anext__(self): + async def __anext__(self) -> list[T]: return await self.get_batch() diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index 4d632d2589..cc34831844 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9104, + "completenessScore": 0.9121405750798722, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 569, - "withUnknownType": 56 + "withKnownType": 571, + "withUnknownType": 55 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -46,8 +46,8 @@ ], "otherSymbolCounts": { "withAmbiguousType": 3, - "withKnownType": 576, - "withUnknownType": 74 + "withKnownType": 598, + "withUnknownType": 65 }, "packageName": "trio", "symbols": [ @@ -57,14 +57,6 @@ "trio._core._run.Nursery.start_soon", "trio._core._run.TaskStatus.__repr__", "trio._core._run.TaskStatus.started", - "trio._core._unbounded_queue.UnboundedQueue.__aiter__", - "trio._core._unbounded_queue.UnboundedQueue.__anext__", - "trio._core._unbounded_queue.UnboundedQueue.__repr__", - "trio._core._unbounded_queue.UnboundedQueue.empty", - "trio._core._unbounded_queue.UnboundedQueue.get_batch", - "trio._core._unbounded_queue.UnboundedQueue.get_batch_nowait", - "trio._core._unbounded_queue.UnboundedQueue.qsize", - "trio._core._unbounded_queue.UnboundedQueue.statistics", "trio._dtls.DTLSChannel.__init__", "trio._dtls.DTLSEndpoint.serve", "trio._highlevel_socket.SocketStream.getsockopt", diff --git a/trio/lowlevel.py b/trio/lowlevel.py index 54f4ef3141..36d23d5955 100644 --- a/trio/lowlevel.py +++ b/trio/lowlevel.py @@ -17,6 +17,7 @@ Task as Task, TrioToken as TrioToken, UnboundedQueue as UnboundedQueue, + UnboundedQueueStatistics as UnboundedQueueStatistics, add_instrument as add_instrument, cancel_shielded_checkpoint as cancel_shielded_checkpoint, checkpoint as checkpoint, From 08625dc858849db10f38f442a610a5d07b4b49d9 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Mon, 31 Jul 2023 15:25:35 +0000 Subject: [PATCH 113/162] typecheck trio/_core/_entry_queue.py (#2718) * typecheck trio/_core/_entry_queue.py --------- Co-authored-by: Spencer Brown --- pyproject.toml | 1 + trio/_core/_entry_queue.py | 49 +++++++++++++++++++++------------ trio/_core/_local.py | 2 +- trio/_core/_tests/test_local.py | 2 +- trio/_tests/verify_types.json | 11 ++++---- 5 files changed, 40 insertions(+), 25 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 1cb552dace..ac3e1a3ea5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,7 @@ disallow_untyped_defs = false # Fully typed, enable stricter checks module = [ "trio._abc", + "trio._core._entry_queue", "trio._core._local", "trio._core._unbounded_queue", "trio._deprecate", diff --git a/trio/_core/_entry_queue.py b/trio/_core/_entry_queue.py index 878506bb2b..468a13462a 100644 --- a/trio/_core/_entry_queue.py +++ b/trio/_core/_entry_queue.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import threading from collections import deque +from typing import Callable, Iterable, NoReturn, Tuple import attr @@ -7,6 +10,11 @@ from .._util import NoPublicConstructor from ._wakeup_socketpair import WakeupSocketpair +# TODO: Type with TypeVarTuple, at least to an extent where it makes +# the public interface safe. +Function = Callable[..., object] +Job = Tuple[Function, Iterable[object]] + @attr.s(slots=True) class EntryQueue: @@ -17,11 +25,11 @@ class EntryQueue: # atomic WRT signal delivery (signal handlers can run on either side, but # not *during* a deque operation). dict makes similar guarantees - and # it's even ordered! - queue = attr.ib(factory=deque) - idempotent_queue = attr.ib(factory=dict) + queue: deque[Job] = attr.ib(factory=deque) + idempotent_queue: dict[Job, None] = attr.ib(factory=dict) - wakeup = attr.ib(factory=WakeupSocketpair) - done = attr.ib(default=False) + wakeup: WakeupSocketpair = attr.ib(factory=WakeupSocketpair) + done: bool = attr.ib(default=False) # Must be a reentrant lock, because it's acquired from signal handlers. # RLock is signal-safe as of cpython 3.2. NB that this does mean that the # lock is effectively *disabled* when we enter from signal context. The @@ -30,9 +38,9 @@ class EntryQueue: # main thread -- it just might happen at some inconvenient place. But if # you look at the one place where the main thread holds the lock, it's # just to make 1 assignment, so that's atomic WRT a signal anyway. - lock = attr.ib(factory=threading.RLock) + lock: threading.RLock = attr.ib(factory=threading.RLock) - async def task(self): + async def task(self) -> None: assert _core.currently_ki_protected() # RLock has two implementations: a signal-safe version in _thread, and # and signal-UNsafe version in threading. We need the signal safe @@ -43,7 +51,7 @@ async def task(self): # https://bugs.python.org/issue13697#msg237140 assert self.lock.__class__.__module__ == "_thread" - def run_cb(job): + def run_cb(job: Job) -> None: # We run this with KI protection enabled; it's the callback's # job to disable it if it wants it disabled. Exceptions are # treated like system task exceptions (i.e., converted into @@ -53,7 +61,7 @@ def run_cb(job): sync_fn(*args) except BaseException as exc: - async def kill_everything(exc): + async def kill_everything(exc: BaseException) -> NoReturn: raise exc try: @@ -63,14 +71,17 @@ async def kill_everything(exc): # system nursery is already closed. # TODO(2020-06): this is a gross hack and should # be fixed soon when we address #1607. - _core.current_task().parent_nursery.start_soon(kill_everything, exc) - - return True + parent_nursery = _core.current_task().parent_nursery + if parent_nursery is None: + raise AssertionError( + "Internal error: `parent_nursery` should never be `None`" + ) from exc # pragma: no cover + parent_nursery.start_soon(kill_everything, exc) # This has to be carefully written to be safe in the face of new items # being queued while we iterate, and to do a bounded amount of work on # each pass: - def run_all_bounded(): + def run_all_bounded() -> None: for _ in range(len(self.queue)): run_cb(self.queue.popleft()) for job in list(self.idempotent_queue): @@ -104,13 +115,15 @@ def run_all_bounded(): assert not self.queue assert not self.idempotent_queue - def close(self): + def close(self) -> None: self.wakeup.close() - def size(self): + def size(self) -> int: return len(self.queue) + len(self.idempotent_queue) - def run_sync_soon(self, sync_fn, *args, idempotent=False): + def run_sync_soon( + self, sync_fn: Function, *args: object, idempotent: bool = False + ) -> None: with self.lock: if self.done: raise _core.RunFinishedError("run() has exited") @@ -146,9 +159,11 @@ class TrioToken(metaclass=NoPublicConstructor): """ - _reentry_queue = attr.ib() + _reentry_queue: EntryQueue = attr.ib() - def run_sync_soon(self, sync_fn, *args, idempotent=False): + def run_sync_soon( + self, sync_fn: Function, *args: object, idempotent: bool = False + ) -> None: """Schedule a call to ``sync_fn(*args)`` to occur in the context of a Trio task. diff --git a/trio/_core/_local.py b/trio/_core/_local.py index 7f2c632153..39a2cff009 100644 --- a/trio/_core/_local.py +++ b/trio/_core/_local.py @@ -27,7 +27,7 @@ def _empty(cls, var: RunVar[T]) -> RunVarToken[T]: return cls._create(var) -@attr.s(eq=False, hash=False, slots=True) +@attr.s(eq=False, hash=False, slots=True, repr=False) class RunVar(Generic[T], metaclass=Final): """The run-local variant of a context variable. diff --git a/trio/_core/_tests/test_local.py b/trio/_core/_tests/test_local.py index 619dcd20d4..d36be0479e 100644 --- a/trio/_core/_tests/test_local.py +++ b/trio/_core/_tests/test_local.py @@ -8,7 +8,7 @@ def test_runvar_smoketest(): t1 = _core.RunVar("test1") t2 = _core.RunVar("test2", default="catfish") - assert "RunVar" in repr(t1) + assert repr(t1) == "" async def first_check(): with pytest.raises(LookupError): diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index cc34831844..4dbd256dcf 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9121405750798722, + "completenessScore": 0.9137380191693291, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 571, - "withUnknownType": 55 + "withKnownType": 572, + "withUnknownType": 54 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -46,12 +46,11 @@ ], "otherSymbolCounts": { "withAmbiguousType": 3, - "withKnownType": 598, - "withUnknownType": 65 + "withKnownType": 600, + "withUnknownType": 63 }, "packageName": "trio", "symbols": [ - "trio._core._entry_queue.TrioToken.run_sync_soon", "trio._core._mock_clock.MockClock.jump", "trio._core._run.Nursery.start", "trio._core._run.Nursery.start_soon", From cd32271162253e016d1635b722e9ccfbc8108ab1 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 02:55:05 +0000 Subject: [PATCH 114/162] Dependency updates (#2728) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- docs-requirements.txt | 4 ++-- test-requirements.txt | 28 ++++++++++++++-------------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index fabf3e901a..41fc1fd4d6 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -20,7 +20,7 @@ cffi==1.15.1 # via cryptography charset-normalizer==3.2.0 # via requests -click==8.1.5 +click==8.1.6 # via # click-default-group # towncrier @@ -103,7 +103,7 @@ tomli==2.0.1 # via towncrier towncrier==23.6.0 # via -r docs-requirements.in -urllib3==2.0.3 +urllib3==2.0.4 # via requests zipp==3.16.2 # via diff --git a/test-requirements.txt b/test-requirements.txt index 7e0d86e62e..ea8b93e72a 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -24,7 +24,7 @@ build==0.10.0 # via pip-tools cffi==1.15.1 # via cryptography -click==8.1.5 +click==8.1.6 # via # black # pip-tools @@ -40,7 +40,7 @@ cryptography==41.0.2 # types-pyopenssl decorator==5.1.1 # via ipython -dill==0.3.6 +dill==0.3.7 # via pylint exceptiongroup==1.1.2 ; python_version < "3.11" # via @@ -48,7 +48,7 @@ exceptiongroup==1.1.2 ; python_version < "3.11" # pytest executing==1.2.0 # via stack-data -flake8==6.0.0 +flake8==6.1.0 # via # -r test-requirements.in # flake8-pyproject @@ -64,7 +64,7 @@ ipython==8.12.2 # via -r test-requirements.in isort==5.12.0 # via pylint -jedi==0.18.2 +jedi==0.19.0 # via # -r test-requirements.in # ipython @@ -94,15 +94,15 @@ packaging==23.1 # pytest parso==0.8.3 # via jedi -pathspec==0.11.1 +pathspec==0.11.2 # via black pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==7.0.0 +pip-tools==7.1.0 # via -r test-requirements.in -platformdirs==3.9.1 +platformdirs==3.10.0 # via # black # pylint @@ -114,21 +114,21 @@ ptyprocess==0.7.0 # via pexpect pure-eval==0.2.2 # via stack-data -pycodestyle==2.10.0 +pycodestyle==2.11.0 # via flake8 pycparser==2.21 # via cffi -pyflakes==3.0.1 +pyflakes==3.1.0 # via flake8 pygments==2.15.1 # via ipython -pylint==2.17.4 +pylint==2.17.5 # via -r test-requirements.in pyopenssl==23.2.0 # via -r test-requirements.in pyproject-hooks==1.0.0 # via build -pyright==1.1.317 +pyright==1.1.318 # via -r test-requirements.in pytest==7.4.0 # via -r test-requirements.in @@ -150,7 +150,7 @@ tomli==2.0.1 # pylint # pyproject-hooks # pytest -tomlkit==0.11.8 +tomlkit==0.12.1 # via pylint traitlets==5.9.0 # via @@ -158,7 +158,7 @@ traitlets==5.9.0 # matplotlib-inline trustme==1.1.0 # via -r test-requirements.in -types-pyopenssl==23.2.0.1 ; implementation_name == "cpython" +types-pyopenssl==23.2.0.2 ; implementation_name == "cpython" # via -r test-requirements.in typing-extensions==4.7.1 # via @@ -170,7 +170,7 @@ typing-extensions==4.7.1 # pylint wcwidth==0.2.6 # via prompt-toolkit -wheel==0.40.0 +wheel==0.41.0 # via pip-tools wrapt==1.15.0 # via astroid From ccf4e24e26b70c09ca8b09ca8d5ef37a2fddfd83 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Tue, 1 Aug 2023 00:05:16 -0500 Subject: [PATCH 115/162] Typing highlevel open tcp stream (#2725) * Add type annotations to `_highlevel_open_tcp_stream.py` * Update `verify_types.json` * Add missing annotations and fix others * Add `_highlevel_open_tcp_stream.py` to mypy more strict checking block * Change to `Address` where it makes sense * After discussion change to `Address` * Update `verify_types.json` * Basically revert e82ce69 from fixing mypy type issues * Update `verify_types.json` * fix unpacking not to lose types, add hacky workaround to get the types actually checked despite calling through nursery.start_soon * undo changes to behaviour, fix signature * test * Revert "test" This reverts commit cf546ed0cde1dbf0f9621c38c8576ca7b4ba53b9. * Update `verify_types.json` --------- Co-authored-by: jakkdl --- pyproject.toml | 1 + trio/_highlevel_open_tcp_stream.py | 67 ++++++++++++++++++++++-------- trio/_tests/verify_types.json | 7 ++-- trio/socket.py | 1 + 4 files changed, 55 insertions(+), 21 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ac3e1a3ea5..90bc98f64f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,7 @@ module = [ "trio._deprecate", "trio._dtls", "trio._file_io", + "trio._highlevel_open_tcp_stream.py", "trio._ki", "trio._socket", "trio._sync", diff --git a/trio/_highlevel_open_tcp_stream.py b/trio/_highlevel_open_tcp_stream.py index a2477104d9..0c4e8a4a8d 100644 --- a/trio/_highlevel_open_tcp_stream.py +++ b/trio/_highlevel_open_tcp_stream.py @@ -1,9 +1,14 @@ +from __future__ import annotations + import sys +from collections.abc import Generator from contextlib import contextmanager +from socket import AddressFamily, SocketKind +from typing import TYPE_CHECKING import trio from trio._core._multierror import MultiError -from trio.socket import SOCK_STREAM, getaddrinfo, socket +from trio.socket import SOCK_STREAM, Address, _SocketType, getaddrinfo, socket if sys.version_info < (3, 11): from exceptiongroup import ExceptionGroup @@ -109,8 +114,8 @@ @contextmanager -def close_all(): - sockets_to_close = set() +def close_all() -> Generator[set[_SocketType], None, None]: + sockets_to_close: set[_SocketType] = set() try: yield sockets_to_close finally: @@ -126,7 +131,17 @@ def close_all(): raise MultiError(errs) -def reorder_for_rfc_6555_section_5_4(targets): +def reorder_for_rfc_6555_section_5_4( + targets: list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ] +) -> None: # RFC 6555 section 5.4 says that if getaddrinfo returns multiple address # families (e.g. IPv4 and IPv6), then you should make sure that your first # and second attempts use different families: @@ -144,7 +159,7 @@ def reorder_for_rfc_6555_section_5_4(targets): break -def format_host_port(host, port): +def format_host_port(host: str | bytes, port: int) -> str: host = host.decode("ascii") if isinstance(host, bytes) else host if ":" in host: return f"[{host}]:{port}" @@ -173,8 +188,12 @@ def format_host_port(host, port): # AF_INET6: "..."} # this might be simpler after async def open_tcp_stream( - host, port, *, happy_eyeballs_delay=DEFAULT_DELAY, local_address=None -): + host: str | bytes, + port: int, + *, + happy_eyeballs_delay: float | None = DEFAULT_DELAY, + local_address: str | None = None, +) -> trio.abc.Stream: """Connect to the given host and port over TCP. If the given ``host`` has multiple IP addresses associated with it, then @@ -212,9 +231,9 @@ async def open_tcp_stream( port (int): The port to connect to. - happy_eyeballs_delay (float): How many seconds to wait for each + happy_eyeballs_delay (float or None): How many seconds to wait for each connection attempt to succeed or fail before getting impatient and - starting another one in parallel. Set to `math.inf` if you want + starting another one in parallel. Set to `None` if you want to limit to only one connection attempt at a time (like :func:`socket.create_connection`). Default: 0.25 (250 ms). @@ -247,9 +266,8 @@ async def open_tcp_stream( # To keep our public API surface smaller, rule out some cases that # getaddrinfo will accept in some circumstances, but that act weird or # have non-portable behavior or are just plain not useful. - # No type check on host though b/c we want to allow bytes-likes. - if host is None: - raise ValueError("host cannot be None") + if not isinstance(host, (str, bytes)): + raise ValueError(f"host must be str or bytes, not {host!r}") if not isinstance(port, int): raise TypeError(f"port must be int, not {port!r}") @@ -274,7 +292,7 @@ async def open_tcp_stream( # Keeps track of the socket that we're going to complete with, # need to make sure this isn't automatically closed - winning_socket = None + winning_socket: _SocketType | None = None # Try connecting to the specified address. Possible outcomes: # - success: record connected socket in winning_socket and cancel @@ -283,7 +301,11 @@ async def open_tcp_stream( # the next connection attempt to start early # code needs to ensure sockets can be closed appropriately in the # face of crash or cancellation - async def attempt_connect(socket_args, sockaddr, attempt_failed): + async def attempt_connect( + socket_args: tuple[AddressFamily, SocketKind, int], + sockaddr: Address, + attempt_failed: trio.Event, + ) -> None: nonlocal winning_socket try: @@ -334,7 +356,7 @@ async def attempt_connect(socket_args, sockaddr, attempt_failed): except OSError: raise OSError( f"local_address={local_address!r} is incompatible " - f"with remote address {sockaddr}" + f"with remote address {sockaddr!r}" ) await sock.connect(sockaddr) @@ -355,12 +377,23 @@ async def attempt_connect(socket_args, sockaddr, attempt_failed): # nursery spawns a task for each connection attempt, will be # cancelled by the task that gets a successful connection async with trio.open_nursery() as nursery: - for *sa, _, addr in targets: + for address_family, socket_type, proto, _, addr in targets: # create an event to indicate connection failure, # allowing the next target to be tried early attempt_failed = trio.Event() - nursery.start_soon(attempt_connect, sa, addr, attempt_failed) + # workaround to check types until typing of nursery.start_soon improved + if TYPE_CHECKING: + await attempt_connect( + (address_family, socket_type, proto), addr, attempt_failed + ) + + nursery.start_soon( + attempt_connect, + (address_family, socket_type, proto), + addr, + attempt_failed, + ) # give this attempt at most this time before moving on with trio.move_on_after(happy_eyeballs_delay): diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index 4dbd256dcf..ac2cfbd197 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9137380191693291, + "completenessScore": 0.9154704944178629, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 572, - "withUnknownType": 54 + "withKnownType": 574, + "withUnknownType": 53 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -109,7 +109,6 @@ "trio.open_ssl_over_tcp_listeners", "trio.open_ssl_over_tcp_stream", "trio.open_tcp_listeners", - "trio.open_tcp_stream", "trio.open_unix_socket", "trio.run", "trio.run_process", diff --git a/trio/socket.py b/trio/socket.py index f6aebb6a6e..f8d0bc3fc2 100644 --- a/trio/socket.py +++ b/trio/socket.py @@ -34,6 +34,7 @@ # import the overwrites from ._socket import ( + Address as Address, SocketType as SocketType, _SocketType as _SocketType, from_stdlib_socket as from_stdlib_socket, From 8bce327bda28b62bf490176b03dd2c1373c8f973 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 2 Aug 2023 04:01:53 +0000 Subject: [PATCH 116/162] Dependency updates (#2731) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- docs-requirements.txt | 2 +- test-requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index 41fc1fd4d6..2a24f2d7d7 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -26,7 +26,7 @@ click==8.1.6 # towncrier click-default-group==1.2.2 # via towncrier -cryptography==41.0.2 +cryptography==41.0.3 # via pyopenssl docutils==0.18.1 # via diff --git a/test-requirements.txt b/test-requirements.txt index ea8b93e72a..73d94f09c1 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -32,7 +32,7 @@ codespell==2.2.5 # via -r test-requirements.in coverage==7.2.7 # via -r test-requirements.in -cryptography==41.0.2 +cryptography==41.0.3 # via # -r test-requirements.in # pyopenssl From 348713a7ae73c1afaef79969c90c57e12f4f098b Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Thu, 3 Aug 2023 06:55:28 +0900 Subject: [PATCH 117/162] Fix version selector and maybe search for RTD (#2732) * Update docs-requirements.in and maybe fix docs * Fix pip-compile'd documentation requirements --- docs-requirements.in | 8 +++----- docs-requirements.txt | 6 +++--- docs/source/conf.py | 1 + 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/docs-requirements.in b/docs-requirements.in index d6214ec1d0..9239fe3fce 100644 --- a/docs-requirements.in +++ b/docs-requirements.in @@ -1,9 +1,8 @@ # RTD is currently installing 1.5.3, which has a bug in :lineno-match: -# sphinx-3.4 causes warnings about some trio._abc classes: GH#2338 -sphinx >= 1.7.0, < 6.2 -# jinja2-3.1 causes importerror with sphinx<4.0 -jinja2 < 3.1 +sphinx >= 4.0, < 6.2 +jinja2 sphinx_rtd_theme +sphinxcontrib-jquery sphinxcontrib-trio towncrier @@ -11,7 +10,6 @@ towncrier cffi; os_name == "nt" attrs >= 19.2.0 sortedcontainers -async_generator >= 1.9 idna outcome sniffio diff --git a/docs-requirements.txt b/docs-requirements.txt index 2a24f2d7d7..e0a131b58d 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -6,8 +6,6 @@ # alabaster==0.7.13 # via sphinx -async-generator==1.10 - # via -r docs-requirements.in attrs==23.1.0 # via # -r docs-requirements.in @@ -90,7 +88,9 @@ sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.1 # via sphinx sphinxcontrib-jquery==4.1 - # via sphinx-rtd-theme + # via + # -r docs-requirements.in + # sphinx-rtd-theme sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-qthelp==1.0.3 diff --git a/docs/source/conf.py b/docs/source/conf.py index 0e16b2d426..a5abeb0dca 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -113,6 +113,7 @@ def setup(app): "sphinx.ext.coverage", "sphinx.ext.napoleon", "sphinxcontrib_trio", + "sphinxcontrib.jquery", "local_customization", ] From 9236431d9a82f9d7d5d2d8090b797de08c493faf Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Sun, 6 Aug 2023 14:52:30 +1000 Subject: [PATCH 118/162] Switch to |-optional --- trio/_core/_thread_cache.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/trio/_core/_thread_cache.py b/trio/_core/_thread_cache.py index cc272fc92c..7e936d809b 100644 --- a/trio/_core/_thread_cache.py +++ b/trio/_core/_thread_cache.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import ctypes import ctypes.util import sys @@ -17,18 +19,16 @@ def _to_os_thread_name(name: str) -> bytes: # used to construct the method used to set os thread name, or None, depending on platform. # called once on import -def get_os_thread_name_func() -> Optional[Callable[[Optional[int], str], None]]: - def namefunc(setname: Callable[[int, bytes], int], ident: Optional[int], name: str): +def get_os_thread_name_func() -> Callable[[int | None, str], None] | None: + def namefunc(setname: Callable[[int, bytes], int], ident: int | None, name: str) -> None: # Thread.ident is None "if it has not been started". Unclear if that can happen # with current usage. if ident is not None: # pragma: no cover setname(ident, _to_os_thread_name(name)) - # namefunc on mac also takes an ident, even if pthread_setname_np doesn't/can't use it + # namefunc on Mac also takes an ident, even if pthread_setname_np doesn't/can't use it # so the caller don't need to care about platform. - def darwin_namefunc( - setname: Callable[[bytes], int], ident: Optional[int], name: str - ): + def darwin_namefunc(setname: Callable[[bytes], int], ident: int | None, name: str) -> None: # I don't know if Mac can rename threads that hasn't been started, but default # to no to be on the safe side. if ident is not None: # pragma: no cover From 0c646e88a78498771ece296ae8d989b0f87c75ec Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Sun, 6 Aug 2023 14:54:29 +1000 Subject: [PATCH 119/162] Type _core._thread_cache --- pyproject.toml | 1 + trio/_core/_thread_cache.py | 46 ++++++++++++++++++++++++++----------- 2 files changed, 34 insertions(+), 13 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 90bc98f64f..097d43dade 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ module = [ "trio._core._entry_queue", "trio._core._local", "trio._core._unbounded_queue", + "trio._core._thread_cache", "trio._deprecate", "trio._dtls", "trio._file_io", diff --git a/trio/_core/_thread_cache.py b/trio/_core/_thread_cache.py index 7e936d809b..85a7778e6a 100644 --- a/trio/_core/_thread_cache.py +++ b/trio/_core/_thread_cache.py @@ -7,10 +7,12 @@ from functools import partial from itertools import count from threading import Lock, Thread -from typing import Callable, Optional, Tuple +from typing import Any, Callable, Generic, TypeVar import outcome +T = TypeVar("T") + def _to_os_thread_name(name: str) -> bytes: # ctypes handles the trailing \00 @@ -20,7 +22,9 @@ def _to_os_thread_name(name: str) -> bytes: # used to construct the method used to set os thread name, or None, depending on platform. # called once on import def get_os_thread_name_func() -> Callable[[int | None, str], None] | None: - def namefunc(setname: Callable[[int, bytes], int], ident: int | None, name: str) -> None: + def namefunc( + setname: Callable[[int, bytes], int], ident: int | None, name: str + ) -> None: # Thread.ident is None "if it has not been started". Unclear if that can happen # with current usage. if ident is not None: # pragma: no cover @@ -28,7 +32,9 @@ def namefunc(setname: Callable[[int, bytes], int], ident: int | None, name: str) # namefunc on Mac also takes an ident, even if pthread_setname_np doesn't/can't use it # so the caller don't need to care about platform. - def darwin_namefunc(setname: Callable[[bytes], int], ident: int | None, name: str) -> None: + def darwin_namefunc( + setname: Callable[[bytes], int], ident: int | None, name: str + ) -> None: # I don't know if Mac can rename threads that hasn't been started, but default # to no to be on the safe side. if ident is not None: # pragma: no cover @@ -110,9 +116,13 @@ def darwin_namefunc(setname: Callable[[bytes], int], ident: int | None, name: st name_counter = count() -class WorkerThread: - def __init__(self, thread_cache): - self._job: Optional[Tuple[Callable, Callable, str]] = None +class WorkerThread(Generic[T]): + def __init__(self, thread_cache: ThreadCache) -> None: + self._job: tuple[ + Callable[[], T], + Callable[[outcome.Outcome[T]], object], + str | None, + ] | None = None self._thread_cache = thread_cache # This Lock is used in an unconventional way. # @@ -130,7 +140,7 @@ def __init__(self, thread_cache): set_os_thread_name(self._thread.ident, self._default_name) self._thread.start() - def _handle_job(self): + def _handle_job(self) -> None: # Handle job in a separate method to ensure user-created # objects are cleaned up in a consistent manner. assert self._job is not None @@ -161,7 +171,7 @@ def _handle_job(self): print("Exception while delivering result of thread", file=sys.stderr) traceback.print_exception(type(e), e, e.__traceback__) - def _work(self): + def _work(self) -> None: while True: if self._worker_lock.acquire(timeout=IDLE_TIMEOUT): # We got a job @@ -185,10 +195,16 @@ def _work(self): class ThreadCache: - def __init__(self): - self._idle_workers = {} - - def start_thread_soon(self, fn, deliver, name: Optional[str] = None): + def __init__(self) -> None: + self._idle_workers: dict[WorkerThread[Any], None] = {} + + def start_thread_soon( + self, + fn: Callable[[], T], + deliver: Callable[[outcome.Outcome[T]], object], + name: str | None = None, + ) -> None: + worker: WorkerThread[T] try: worker, _ = self._idle_workers.popitem() except KeyError: @@ -200,7 +216,11 @@ def start_thread_soon(self, fn, deliver, name: Optional[str] = None): THREAD_CACHE = ThreadCache() -def start_thread_soon(fn, deliver, name: Optional[str] = None): +def start_thread_soon( + fn: Callable[[], T], + deliver: Callable[[outcome.Outcome[T]], object], + name: str | None = None, +) -> None: """Runs ``deliver(outcome.capture(fn))`` in a worker thread. Generally ``fn`` does some blocking work, and ``deliver`` delivers the From 9933a83ddf4c5982f2e9155b96d5683fd90c112f Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Sun, 6 Aug 2023 15:04:44 +1000 Subject: [PATCH 120/162] Update type completeness file --- trio/_tests/verify_types.json | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index ac2cfbd197..5befa79513 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9154704944178629, + "completenessScore": 0.9170653907496013, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 574, - "withUnknownType": 53 + "withKnownType": 575, + "withUnknownType": 52 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -102,7 +102,6 @@ "trio.lowlevel.reschedule", "trio.lowlevel.spawn_system_task", "trio.lowlevel.start_guest_run", - "trio.lowlevel.start_thread_soon", "trio.lowlevel.temporarily_detach_coroutine_object", "trio.lowlevel.wait_readable", "trio.lowlevel.wait_writable", From 49d03931456ad32fef6dfe4524dc46c71420ea60 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Sun, 6 Aug 2023 04:46:01 -0500 Subject: [PATCH 121/162] Add typing for `_highlevel_open_tcp_listeners.py` (#2724) * Add typing for `_highlevel_open_tcp_listeners.py` --------- Co-authored-by: CoolCat467 --- docs/source/reference-core.rst | 3 -- docs/source/reference-lowlevel.rst | 2 + trio/__init__.py | 1 - trio/_dtls.py | 4 +- trio/_highlevel_open_tcp_listeners.py | 49 ++++++++++++------- .../test_highlevel_open_tcp_listeners.py | 12 +++++ trio/_tests/verify_types.json | 7 ++- trio/lowlevel.py | 1 + 8 files changed, 52 insertions(+), 27 deletions(-) diff --git a/docs/source/reference-core.rst b/docs/source/reference-core.rst index 980a3106e5..30994e4386 100644 --- a/docs/source/reference-core.rst +++ b/docs/source/reference-core.rst @@ -922,9 +922,6 @@ The nursery API See :meth:`~Nursery.start`. -.. autoclass:: TaskStatus - :members: - .. _task-local-storage: Task-local storage diff --git a/docs/source/reference-lowlevel.rst b/docs/source/reference-lowlevel.rst index bacebff5ad..faf07268cb 100644 --- a/docs/source/reference-lowlevel.rst +++ b/docs/source/reference-lowlevel.rst @@ -534,6 +534,8 @@ Task API putting a task to sleep and then waking it up again. (See :func:`wait_task_rescheduled` for details.) +.. autoclass:: TaskStatus + :members: .. _guest-mode: diff --git a/trio/__init__.py b/trio/__init__.py index be7de42cde..277baa5339 100644 --- a/trio/__init__.py +++ b/trio/__init__.py @@ -35,7 +35,6 @@ EndOfChannel as EndOfChannel, Nursery as Nursery, RunFinishedError as RunFinishedError, - TaskStatus as TaskStatus, TrioInternalError as TrioInternalError, WouldBlock as WouldBlock, current_effective_deadline as current_effective_deadline, diff --git a/trio/_dtls.py b/trio/_dtls.py index 8675cb75b6..b3ed0fd883 100644 --- a/trio/_dtls.py +++ b/trio/_dtls.py @@ -42,8 +42,8 @@ from OpenSSL.SSL import Context from typing_extensions import Self, TypeAlias - from ._core._run import TaskStatus - from ._socket import Address, _SocketType + from trio.lowlevel import TaskStatus + from trio.socket import Address, _SocketType MAX_UDP_PACKET_SIZE = 65527 diff --git a/trio/_highlevel_open_tcp_listeners.py b/trio/_highlevel_open_tcp_listeners.py index 6211917254..0d5f630495 100644 --- a/trio/_highlevel_open_tcp_listeners.py +++ b/trio/_highlevel_open_tcp_listeners.py @@ -1,8 +1,12 @@ +from __future__ import annotations + import errno import sys +from collections.abc import Awaitable, Callable from math import inf import trio +from trio.lowlevel import TaskStatus from . import socket as tsocket @@ -23,7 +27,7 @@ # backpressure. If a connection gets stuck waiting in the backlog queue, then # from the peer's point of view the connection succeeded but then their # send/recv will stall until we get to it, possibly for a long time. OTOH if -# there isn't room in the backlog queue... then their connect stalls, possibly +# there isn't room in the backlog queue, then their connect stalls, possibly # for a long time, which is pretty much the same thing. # # A large backlog can also use a bit more kernel memory, but this seems fairly @@ -37,16 +41,24 @@ # so this is unnecessary -- we can just pass in "infinity" and get the maximum # that way. (Verified on Windows, Linux, macOS using # notes-to-self/measure-listen-backlog.py) -def _compute_backlog(backlog): - if backlog is None: - backlog = inf +def _compute_backlog(backlog: int | float | None) -> int: # Many systems (Linux, BSDs, ...) store the backlog in a uint16 and are # missing overflow protection, so we apply our own overflow protection. # https://github.com/golang/go/issues/5030 + if isinstance(backlog, float): + # TODO: Remove when removing infinity support + # https://github.com/python-trio/trio/pull/2724#discussion_r1278541729 + if backlog != inf: + raise ValueError(f"Only accepts infinity, not {backlog!r}") + backlog = None + if backlog is None: + return 0xFFFF return min(backlog, 0xFFFF) -async def open_tcp_listeners(port, *, host=None, backlog=None): +async def open_tcp_listeners( + port: int, *, host: str | bytes | None = None, backlog: int | float | None = None +) -> list[trio.SocketListener]: """Create :class:`SocketListener` objects to listen for TCP connections. Args: @@ -62,7 +74,7 @@ async def open_tcp_listeners(port, *, host=None, backlog=None): :func:`open_tcp_listeners` will bind to both the IPv4 wildcard address (``0.0.0.0``) and also the IPv6 wildcard address (``::``). - host (str, bytes-like, or None): The local interface to bind to. This is + host (str, bytes, or None): The local interface to bind to. This is passed to :func:`~socket.getaddrinfo` with the ``AI_PASSIVE`` flag set. @@ -78,13 +90,16 @@ async def open_tcp_listeners(port, *, host=None, backlog=None): all interfaces, pass the family-specific wildcard address: ``"0.0.0.0"`` for IPv4-only and ``"::"`` for IPv6-only. - backlog (int or None): The listen backlog to use. If you leave this as - ``None`` then Trio will pick a good default. (Currently: whatever + backlog (int, math.inf, or None): The listen backlog to use. If you leave this as + ``None`` or ``math.inf`` then Trio will pick a good default. (Currently: whatever your system has configured as the maximum backlog.) Returns: list of :class:`SocketListener` + Raises: + :class:`TypeError` if invalid arguments. + """ # getaddrinfo sometimes allows port=None, sometimes not (depending on # whether host=None). And on some systems it treats "" as 0, others it @@ -93,7 +108,7 @@ async def open_tcp_listeners(port, *, host=None, backlog=None): if not isinstance(port, int): raise TypeError(f"port must be an int not {port!r}") - backlog = _compute_backlog(backlog) + computed_backlog = _compute_backlog(backlog) addresses = await tsocket.getaddrinfo( host, port, type=tsocket.SOCK_STREAM, flags=tsocket.AI_PASSIVE @@ -126,7 +141,7 @@ async def open_tcp_listeners(port, *, host=None, backlog=None): sock.setsockopt(tsocket.IPPROTO_IPV6, tsocket.IPV6_V6ONLY, 1) await sock.bind(sockaddr) - sock.listen(backlog) + sock.listen(computed_backlog) listeners.append(trio.SocketListener(sock)) except: @@ -150,14 +165,14 @@ async def open_tcp_listeners(port, *, host=None, backlog=None): async def serve_tcp( - handler, - port, + handler: Callable[[trio.SocketStream], Awaitable[object]], + port: int, *, - host=None, - backlog=None, - handler_nursery=None, - task_status=trio.TASK_STATUS_IGNORED, -): + host: str | bytes | None = None, + backlog: int | float | None = None, + handler_nursery: trio.Nursery | None = None, + task_status: TaskStatus = trio.TASK_STATUS_IGNORED, # type: ignore[assignment] # default has type "_TaskStatusIgnored", argument has type "TaskStatus" +) -> None: """Listen for incoming TCP connections, and for each one start a task running ``handler(stream)``. diff --git a/trio/_tests/test_highlevel_open_tcp_listeners.py b/trio/_tests/test_highlevel_open_tcp_listeners.py index e58cbd13cc..6eca844f0c 100644 --- a/trio/_tests/test_highlevel_open_tcp_listeners.py +++ b/trio/_tests/test_highlevel_open_tcp_listeners.py @@ -1,6 +1,7 @@ import errno import socket as stdlib_socket import sys +from math import inf import attr import pytest @@ -289,6 +290,7 @@ async def test_open_tcp_listeners_backlog(): tsocket.set_custom_socket_factory(fsf) for given, expected in [ (None, 0xFFFF), + (inf, 0xFFFF), (99999999, 0xFFFF), (10, 10), (1, 1), @@ -297,3 +299,13 @@ async def test_open_tcp_listeners_backlog(): assert listeners for listener in listeners: assert listener.socket.backlog == expected + + +async def test_open_tcp_listeners_backlog_float_error(): + fsf = FakeSocketFactory(99) + tsocket.set_custom_socket_factory(fsf) + for should_fail in (0.0, 2.18, 3.14, 9.75): + with pytest.raises( + ValueError, match=f"Only accepts infinity, not {should_fail!r}" + ): + await open_tcp_listeners(0, backlog=should_fail) diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index ac2cfbd197..9c71ddd58a 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9154704944178629, + "completenessScore": 0.9170653907496013, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 574, - "withUnknownType": 53 + "withKnownType": 575, + "withUnknownType": 52 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -108,7 +108,6 @@ "trio.lowlevel.wait_writable", "trio.open_ssl_over_tcp_listeners", "trio.open_ssl_over_tcp_stream", - "trio.open_tcp_listeners", "trio.open_unix_socket", "trio.run", "trio.run_process", diff --git a/trio/lowlevel.py b/trio/lowlevel.py index 36d23d5955..c66e22b60e 100644 --- a/trio/lowlevel.py +++ b/trio/lowlevel.py @@ -15,6 +15,7 @@ RaiseCancelT as RaiseCancelT, RunVar as RunVar, Task as Task, + TaskStatus as TaskStatus, TrioToken as TrioToken, UnboundedQueue as UnboundedQueue, UnboundedQueueStatistics as UnboundedQueueStatistics, From 987ef312b740188a9539d4e1d54a547cd484e70d Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Sun, 6 Aug 2023 05:07:57 -0500 Subject: [PATCH 122/162] Add typing to `_tools/gen_exports.py` (#2738) * Add typing to `_tools/gen_exports.py` --------- Co-authored-by: jakkdl --- pyproject.toml | 1 + trio/_tools/gen_exports.py | 32 ++++++++++++++++++++------------ 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 90bc98f64f..0900f3a7d1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,6 +60,7 @@ module = [ "trio._ki", "trio._socket", "trio._sync", + "trio._tools.gen_exports", "trio._util", ] disallow_incomplete_defs = true diff --git a/trio/_tools/gen_exports.py b/trio/_tools/gen_exports.py index a5d8529b53..bae7e4f69d 100755 --- a/trio/_tools/gen_exports.py +++ b/trio/_tools/gen_exports.py @@ -3,12 +3,19 @@ Code generation script for class methods to be exported as public API """ +from __future__ import annotations + import argparse import ast import os import sys +from collections.abc import Iterable, Iterator from pathlib import Path from textwrap import indent +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing_extensions import TypeGuard import astor @@ -36,7 +43,7 @@ """ -def is_function(node): +def is_function(node: ast.AST) -> TypeGuard[ast.FunctionDef | ast.AsyncFunctionDef]: """Check if the AST node is either a function or an async function """ @@ -45,17 +52,18 @@ def is_function(node): return False -def is_public(node): +def is_public(node: ast.AST) -> TypeGuard[ast.FunctionDef | ast.AsyncFunctionDef]: """Check if the AST node has a _public decorator""" - if not is_function(node): - return False - for decorator in node.decorator_list: - if isinstance(decorator, ast.Name) and decorator.id == "_public": - return True + if is_function(node): + for decorator in node.decorator_list: + if isinstance(decorator, ast.Name) and decorator.id == "_public": + return True return False -def get_public_methods(tree): +def get_public_methods( + tree: ast.AST, +) -> Iterator[ast.FunctionDef | ast.AsyncFunctionDef]: """Return a list of methods marked as public. The function walks the given tree and extracts all objects that are functions which are marked @@ -66,7 +74,7 @@ def get_public_methods(tree): yield node -def create_passthrough_args(funcdef): +def create_passthrough_args(funcdef: ast.FunctionDef | ast.AsyncFunctionDef) -> str: """Given a function definition, create a string that represents taking all the arguments from the function, and passing them through to another invocation of the same function. @@ -130,7 +138,7 @@ def gen_public_wrappers_source(source_path: Path, lookup_path: str) -> str: return "\n\n".join(generated) -def matches_disk_files(new_files): +def matches_disk_files(new_files: dict[str, str]) -> bool: for new_path, new_source in new_files.items(): if not os.path.exists(new_path): return False @@ -141,7 +149,7 @@ def matches_disk_files(new_files): return True -def process(sources_and_lookups, *, do_test): +def process(sources_and_lookups: Iterable[tuple[Path, str]], *, do_test: bool) -> None: new_files = {} for source_path, lookup_path in sources_and_lookups: print("Scanning:", source_path) @@ -164,7 +172,7 @@ def process(sources_and_lookups, *, do_test): # This is in fact run in CI, but only in the formatting check job, which # doesn't collect coverage. -def main(): # pragma: no cover +def main() -> None: # pragma: no cover parser = argparse.ArgumentParser( description="Generate python code for public api wrappers" ) From 59229b1d473979e979f265e253ab9f871b122512 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Sun, 6 Aug 2023 05:19:33 -0500 Subject: [PATCH 123/162] Add missing type annotations to `_core/_mock_clock.py` (#2740) * Add missing type annotations to `_core/_mock_clock.py` * remove -> None on __init__ until decision taken to enforce it globally --------- Co-authored-by: jakkdl --- trio/_core/_mock_clock.py | 2 +- trio/_tests/verify_types.json | 11 +++++------ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/trio/_core/_mock_clock.py b/trio/_core/_mock_clock.py index fe35298631..27a5829076 100644 --- a/trio/_core/_mock_clock.py +++ b/trio/_core/_mock_clock.py @@ -150,7 +150,7 @@ def deadline_to_sleep_time(self, deadline: float) -> float: else: return 999999999 - def jump(self, seconds) -> None: + def jump(self, seconds: float) -> None: """Manually advance the clock by the given number of seconds. Args: diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index 9c71ddd58a..55ba3b32d7 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9170653907496013, + "completenessScore": 0.9186602870813397, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 575, - "withUnknownType": 52 + "withKnownType": 576, + "withUnknownType": 51 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -46,12 +46,11 @@ ], "otherSymbolCounts": { "withAmbiguousType": 3, - "withKnownType": 600, - "withUnknownType": 63 + "withKnownType": 602, + "withUnknownType": 61 }, "packageName": "trio", "symbols": [ - "trio._core._mock_clock.MockClock.jump", "trio._core._run.Nursery.start", "trio._core._run.Nursery.start_soon", "trio._core._run.TaskStatus.__repr__", From c8aefc675327645e32e3afe4f7406e615461367c Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Tue, 8 Aug 2023 12:37:13 +0900 Subject: [PATCH 124/162] Add link to changelog on PyPI page (#2736) * Add link to changelog on PyPI page * Update setup.py * Update setup.py * Minor stable -> latest change --- setup.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/setup.py b/setup.py index 2917f7c12e..68ed59cc97 100644 --- a/setup.py +++ b/setup.py @@ -52,6 +52,8 @@ * Tutorial and reference manual: https://trio.readthedocs.io +* Changelog: https://trio.readthedocs.io/en/latest/history.html + * Bug tracker and source code: https://github.com/python-trio/trio * Real-time chat: https://gitter.im/python-trio/general @@ -118,4 +120,8 @@ "Topic :: System :: Networking", "Framework :: Trio", ], + project_urls={ + "Documentation": "https://trio.readthedocs.io/", + "Changelog": "https://trio.readthedocs.io/en/latest/history.html", + }, ) From b719d8b521978e37154caa58ef1020deb9013b32 Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Tue, 8 Aug 2023 14:14:38 +1000 Subject: [PATCH 125/162] Temporarily ignore Any from missing outcome stubs --- trio/_core/_thread_cache.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/trio/_core/_thread_cache.py b/trio/_core/_thread_cache.py index 85a7778e6a..aabd6698f9 100644 --- a/trio/_core/_thread_cache.py +++ b/trio/_core/_thread_cache.py @@ -118,7 +118,7 @@ def darwin_namefunc( class WorkerThread(Generic[T]): def __init__(self, thread_cache: ThreadCache) -> None: - self._job: tuple[ + self._job: tuple[ # type: ignore[no-any-unimported] Callable[[], T], Callable[[outcome.Outcome[T]], object], str | None, @@ -198,7 +198,7 @@ class ThreadCache: def __init__(self) -> None: self._idle_workers: dict[WorkerThread[Any], None] = {} - def start_thread_soon( + def start_thread_soon( # type: ignore[no-any-unimported] self, fn: Callable[[], T], deliver: Callable[[outcome.Outcome[T]], object], @@ -216,7 +216,7 @@ def start_thread_soon( THREAD_CACHE = ThreadCache() -def start_thread_soon( +def start_thread_soon( # type: ignore[no-any-unimported] fn: Callable[[], T], deliver: Callable[[outcome.Outcome[T]], object], name: str | None = None, From 69f26825b193ae3afa1e0e962bb905d1f6a04719 Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Tue, 8 Aug 2023 14:16:48 +1000 Subject: [PATCH 126/162] Change typevar name --- trio/_core/_thread_cache.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/trio/_core/_thread_cache.py b/trio/_core/_thread_cache.py index aabd6698f9..d8f5a94c5b 100644 --- a/trio/_core/_thread_cache.py +++ b/trio/_core/_thread_cache.py @@ -11,7 +11,7 @@ import outcome -T = TypeVar("T") +RetT = TypeVar("RetT") def _to_os_thread_name(name: str) -> bytes: @@ -116,11 +116,11 @@ def darwin_namefunc( name_counter = count() -class WorkerThread(Generic[T]): +class WorkerThread(Generic[RetT]): def __init__(self, thread_cache: ThreadCache) -> None: self._job: tuple[ # type: ignore[no-any-unimported] - Callable[[], T], - Callable[[outcome.Outcome[T]], object], + Callable[[], RetT], + Callable[[outcome.Outcome[RetT]], object], str | None, ] | None = None self._thread_cache = thread_cache @@ -200,11 +200,11 @@ def __init__(self) -> None: def start_thread_soon( # type: ignore[no-any-unimported] self, - fn: Callable[[], T], - deliver: Callable[[outcome.Outcome[T]], object], + fn: Callable[[], RetT], + deliver: Callable[[outcome.Outcome[RetT]], object], name: str | None = None, ) -> None: - worker: WorkerThread[T] + worker: WorkerThread[RetT] try: worker, _ = self._idle_workers.popitem() except KeyError: @@ -217,8 +217,8 @@ def start_thread_soon( # type: ignore[no-any-unimported] def start_thread_soon( # type: ignore[no-any-unimported] - fn: Callable[[], T], - deliver: Callable[[outcome.Outcome[T]], object], + fn: Callable[[], RetT], + deliver: Callable[[outcome.Outcome[RetT]], object], name: str | None = None, ) -> None: """Runs ``deliver(outcome.capture(fn))`` in a worker thread. From 055728c9742fafc994c58e39309a20d37355d2ee Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Tue, 8 Aug 2023 16:36:25 +1000 Subject: [PATCH 127/162] Make docs build succeed --- docs/source/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index a5abeb0dca..39eda12ad5 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -67,6 +67,7 @@ ("py:class", "types.FrameType"), ("py:class", "P.args"), ("py:class", "P.kwargs"), + ("py:class", "RetT"), # TODO: figure out if you can link this to SSL ("py:class", "Context"), # TODO: temporary type From 722f1b577d4753de5ea1ca5b5b9f2f1a7c6cb56d Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Tue, 8 Aug 2023 16:33:51 +0900 Subject: [PATCH 128/162] Address an old comment waiting for `Self` support (#2746) * Address an old comment left behind * Fix formatting... --- trio/_channel.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/trio/_channel.py b/trio/_channel.py index c8d27695b8..db122d37f5 100644 --- a/trio/_channel.py +++ b/trio/_channel.py @@ -4,7 +4,7 @@ from math import inf from types import TracebackType from typing import Tuple # only needed for typechecking on <3.9 -from typing import TYPE_CHECKING, Generic, TypeVar +from typing import TYPE_CHECKING, Generic import attr from outcome import Error, Value @@ -15,8 +15,8 @@ from ._core import Abort, RaiseCancelT, Task, enable_ki_protection from ._util import NoPublicConstructor, generic_function -# Temporary TypeVar needed until mypy release supports Self as a type -SelfT = TypeVar("SelfT") +if TYPE_CHECKING: + from typing_extensions import Self def _open_memory_channel( @@ -237,7 +237,7 @@ def clone(self) -> MemorySendChannel[SendType]: raise trio.ClosedResourceError return MemorySendChannel._create(self._state) - def __enter__(self: SelfT) -> SelfT: + def __enter__(self) -> Self: return self def __exit__( @@ -383,7 +383,7 @@ def clone(self) -> MemoryReceiveChannel[ReceiveType]: raise trio.ClosedResourceError return MemoryReceiveChannel._create(self._state) - def __enter__(self: SelfT) -> SelfT: + def __enter__(self) -> Self: return self def __exit__( From 0cb2886969f121dd7132c19f7a2fd92e1caa8f52 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Thu, 10 Aug 2023 13:09:55 +0200 Subject: [PATCH 129/162] type _io_epoll and _io_kqueue (#2743) * type _io_epoll and _io_kqueue --------- Co-authored-by: Spencer Brown --- pyproject.toml | 5 +- trio/_core/_generated_instrumentation.py | 12 ++- trio/_core/_generated_io_epoll.py | 20 ++-- trio/_core/_generated_io_kqueue.py | 36 ++++++-- trio/_core/_generated_io_windows.py | 13 ++- trio/_core/_generated_run.py | 12 ++- trio/_core/_io_epoll.py | 66 +++++++------ trio/_core/_io_kqueue.py | 66 +++++++------ trio/_core/_io_windows.py | 4 +- trio/_core/_run.py | 2 +- trio/_tests/tools/test_gen_exports.py | 44 +++++++-- trio/_tests/verify_types.json | 6 +- trio/_tools/gen_exports.py | 113 +++++++++++++++++++---- 13 files changed, 280 insertions(+), 119 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a23f7f5db9..fc7d37dbf3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,6 @@ extend-ignore = ['D', 'E', 'W', 'F403', 'F405', 'F821', 'F822'] per-file-ignores = [ 'trio/__init__.py: F401', 'trio/_core/__init__.py: F401', - 'trio/_core/_generated*.py: F401', 'trio/_core/_tests/test_multierror_scripts/*: F401', 'trio/abc.py: F401', 'trio/lowlevel.py: F401', @@ -51,6 +50,10 @@ disallow_untyped_defs = false module = [ "trio._abc", "trio._core._entry_queue", + "trio._core._generated_io_epoll", + "trio._core._generated_io_kqueue", + "trio._core._io_epoll", + "trio._core._io_kqueue", "trio._core._local", "trio._core._unbounded_queue", "trio._core._thread_cache", diff --git a/trio/_core/_generated_instrumentation.py b/trio/_core/_generated_instrumentation.py index 30c2f26b4e..605a6372f2 100644 --- a/trio/_core/_generated_instrumentation.py +++ b/trio/_core/_generated_instrumentation.py @@ -1,12 +1,14 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# isort: skip -from ._instrumentation import Instrument -from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED -from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT - +# Don't lint this file, generation will not format this too nicely. +# isort: skip_file # fmt: off +from __future__ import annotations + +from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +from ._run import GLOBAL_RUN_CONTEXT +from ._instrumentation import Instrument def add_instrument(instrument: Instrument) ->None: diff --git a/trio/_core/_generated_io_epoll.py b/trio/_core/_generated_io_epoll.py index 02fb3bc348..abe49ed3ff 100644 --- a/trio/_core/_generated_io_epoll.py +++ b/trio/_core/_generated_io_epoll.py @@ -1,15 +1,21 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# isort: skip -from ._instrumentation import Instrument +# Don't lint this file, generation will not format this too nicely. +# isort: skip_file +# fmt: off +from __future__ import annotations + from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED -from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT +from ._run import GLOBAL_RUN_CONTEXT +from socket import socket +from typing import TYPE_CHECKING +import sys -# fmt: off +assert not TYPE_CHECKING or sys.platform=="linux" -async def wait_readable(fd): +async def wait_readable(fd: (int | socket)) ->None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_readable(fd) @@ -17,7 +23,7 @@ async def wait_readable(fd): raise RuntimeError("must be called from async context") -async def wait_writable(fd): +async def wait_writable(fd: (int | socket)) ->None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_writable(fd) @@ -25,7 +31,7 @@ async def wait_writable(fd): raise RuntimeError("must be called from async context") -def notify_closing(fd): +def notify_closing(fd: (int | socket)) ->None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.io_manager.notify_closing(fd) diff --git a/trio/_core/_generated_io_kqueue.py b/trio/_core/_generated_io_kqueue.py index 94e819769c..cfcf6354c7 100644 --- a/trio/_core/_generated_io_kqueue.py +++ b/trio/_core/_generated_io_kqueue.py @@ -1,15 +1,29 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# isort: skip -from ._instrumentation import Instrument +# Don't lint this file, generation will not format this too nicely. +# isort: skip_file +# fmt: off +from __future__ import annotations + from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED -from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT +from ._run import GLOBAL_RUN_CONTEXT +from typing import Callable, ContextManager, TYPE_CHECKING -# fmt: off +if TYPE_CHECKING: + import select + from socket import socket + + from ._traps import Abort, RaiseCancelT + + from .. import _core + +import sys + +assert not TYPE_CHECKING or sys.platform=="darwin" -def current_kqueue(): +def current_kqueue() ->select.kqueue: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.io_manager.current_kqueue() @@ -17,7 +31,8 @@ def current_kqueue(): raise RuntimeError("must be called from async context") -def monitor_kevent(ident, filter): +def monitor_kevent(ident: int, filter: int) ->ContextManager[_core.UnboundedQueue + [select.kevent]]: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.io_manager.monitor_kevent(ident, filter) @@ -25,7 +40,8 @@ def monitor_kevent(ident, filter): raise RuntimeError("must be called from async context") -async def wait_kevent(ident, filter, abort_func): +async def wait_kevent(ident: int, filter: int, abort_func: Callable[[ + RaiseCancelT], Abort]) ->Abort: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_kevent(ident, filter, abort_func) @@ -33,7 +49,7 @@ async def wait_kevent(ident, filter, abort_func): raise RuntimeError("must be called from async context") -async def wait_readable(fd): +async def wait_readable(fd: (int | socket)) ->None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_readable(fd) @@ -41,7 +57,7 @@ async def wait_readable(fd): raise RuntimeError("must be called from async context") -async def wait_writable(fd): +async def wait_writable(fd: (int | socket)) ->None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_writable(fd) @@ -49,7 +65,7 @@ async def wait_writable(fd): raise RuntimeError("must be called from async context") -def notify_closing(fd): +def notify_closing(fd: (int | socket)) ->None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.io_manager.notify_closing(fd) diff --git a/trio/_core/_generated_io_windows.py b/trio/_core/_generated_io_windows.py index 26b4da697d..7fa6fd5126 100644 --- a/trio/_core/_generated_io_windows.py +++ b/trio/_core/_generated_io_windows.py @@ -1,12 +1,17 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# isort: skip -from ._instrumentation import Instrument +# Don't lint this file, generation will not format this too nicely. +# isort: skip_file +# fmt: off +from __future__ import annotations + from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED -from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT +from ._run import GLOBAL_RUN_CONTEXT +from typing import TYPE_CHECKING +import sys -# fmt: off +assert not TYPE_CHECKING or sys.platform=="win32" async def wait_readable(sock): diff --git a/trio/_core/_generated_run.py b/trio/_core/_generated_run.py index d1e74a93f4..674c86aaec 100644 --- a/trio/_core/_generated_run.py +++ b/trio/_core/_generated_run.py @@ -1,12 +1,14 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# isort: skip -from ._instrumentation import Instrument -from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED -from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT - +# Don't lint this file, generation will not format this too nicely. +# isort: skip_file # fmt: off +from __future__ import annotations + +from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +from ._run import GLOBAL_RUN_CONTEXT +from ._run import _NO_SEND def current_statistics(): diff --git a/trio/_core/_io_epoll.py b/trio/_core/_io_epoll.py index 376dd18a4e..c4a31f3722 100644 --- a/trio/_core/_io_epoll.py +++ b/trio/_core/_io_epoll.py @@ -1,23 +1,38 @@ +from __future__ import annotations + import select import sys from collections import defaultdict -from typing import TYPE_CHECKING, Dict +from typing import TYPE_CHECKING, DefaultDict, Literal import attr from .. import _core from ._io_common import wake_all -from ._run import _public +from ._run import Task, _public from ._wakeup_socketpair import WakeupSocketpair +if TYPE_CHECKING: + from socket import socket + + from .._core import Abort, RaiseCancelT + + +@attr.s(slots=True, eq=False) +class EpollWaiters: + read_task: Task | None = attr.ib(default=None) + write_task: Task | None = attr.ib(default=None) + current_flags: int = attr.ib(default=0) + + assert not TYPE_CHECKING or sys.platform == "linux" @attr.s(slots=True, eq=False, frozen=True) class _EpollStatistics: - tasks_waiting_read = attr.ib() - tasks_waiting_write = attr.ib() - backend = attr.ib(default="epoll") + tasks_waiting_read: int = attr.ib() + tasks_waiting_write: int = attr.ib() + backend: Literal["epoll"] = attr.ib(init=False, default="epoll") # Some facts about epoll @@ -178,28 +193,21 @@ class _EpollStatistics: # wanted to about how epoll works. -@attr.s(slots=True, eq=False) -class EpollWaiters: - read_task = attr.ib(default=None) - write_task = attr.ib(default=None) - current_flags = attr.ib(default=0) - - @attr.s(slots=True, eq=False, hash=False) class EpollIOManager: - _epoll = attr.ib(factory=select.epoll) + _epoll: select.epoll = attr.ib(factory=select.epoll) # {fd: EpollWaiters} - _registered = attr.ib( - factory=lambda: defaultdict(EpollWaiters), type=Dict[int, EpollWaiters] + _registered: DefaultDict[int, EpollWaiters] = attr.ib( + factory=lambda: defaultdict(EpollWaiters) ) - _force_wakeup = attr.ib(factory=WakeupSocketpair) - _force_wakeup_fd = attr.ib(default=None) + _force_wakeup: WakeupSocketpair = attr.ib(factory=WakeupSocketpair) + _force_wakeup_fd: int | None = attr.ib(default=None) - def __attrs_post_init__(self): + def __attrs_post_init__(self) -> None: self._epoll.register(self._force_wakeup.wakeup_sock, select.EPOLLIN) self._force_wakeup_fd = self._force_wakeup.wakeup_sock.fileno() - def statistics(self): + def statistics(self) -> _EpollStatistics: tasks_waiting_read = 0 tasks_waiting_write = 0 for waiter in self._registered.values(): @@ -212,24 +220,24 @@ def statistics(self): tasks_waiting_write=tasks_waiting_write, ) - def close(self): + def close(self) -> None: self._epoll.close() self._force_wakeup.close() - def force_wakeup(self): + def force_wakeup(self) -> None: self._force_wakeup.wakeup_thread_and_signal_safe() # Return value must be False-y IFF the timeout expired, NOT if any I/O # happened or force_wakeup was called. Otherwise it can be anything; gets # passed straight through to process_events. - def get_events(self, timeout): + def get_events(self, timeout: float) -> list[tuple[int, int]]: # max_events must be > 0 or epoll gets cranky # accessing self._registered from a thread looks dangerous, but it's # OK because it doesn't matter if our value is a little bit off. max_events = max(1, len(self._registered)) return self._epoll.poll(timeout, max_events) - def process_events(self, events): + def process_events(self, events: list[tuple[int, int]]) -> None: for fd, flags in events: if fd == self._force_wakeup_fd: self._force_wakeup.drain() @@ -248,7 +256,7 @@ def process_events(self, events): waiters.read_task = None self._update_registrations(fd) - def _update_registrations(self, fd): + def _update_registrations(self, fd: int) -> None: waiters = self._registered[fd] wanted_flags = 0 if waiters.read_task is not None: @@ -277,7 +285,7 @@ def _update_registrations(self, fd): if not wanted_flags: del self._registered[fd] - async def _epoll_wait(self, fd, attr_name): + async def _epoll_wait(self, fd: int | socket, attr_name: str) -> None: if not isinstance(fd, int): fd = fd.fileno() waiters = self._registered[fd] @@ -288,7 +296,7 @@ async def _epoll_wait(self, fd, attr_name): setattr(waiters, attr_name, _core.current_task()) self._update_registrations(fd) - def abort(_): + def abort(_: RaiseCancelT) -> Abort: setattr(waiters, attr_name, None) self._update_registrations(fd) return _core.Abort.SUCCEEDED @@ -296,15 +304,15 @@ def abort(_): await _core.wait_task_rescheduled(abort) @_public - async def wait_readable(self, fd): + async def wait_readable(self, fd: int | socket) -> None: await self._epoll_wait(fd, "read_task") @_public - async def wait_writable(self, fd): + async def wait_writable(self, fd: int | socket) -> None: await self._epoll_wait(fd, "write_task") @_public - def notify_closing(self, fd): + def notify_closing(self, fd: int | socket) -> None: if not isinstance(fd, int): fd = fd.fileno() wake_all( diff --git a/trio/_core/_io_kqueue.py b/trio/_core/_io_kqueue.py index d1151843e8..0b0f8ee557 100644 --- a/trio/_core/_io_kqueue.py +++ b/trio/_core/_io_kqueue.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import errno import select import sys from contextlib import contextmanager -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Callable, Iterator, Literal import attr import outcome @@ -11,32 +13,39 @@ from ._run import _public from ._wakeup_socketpair import WakeupSocketpair +if TYPE_CHECKING: + from socket import socket + + from .._core import Abort, RaiseCancelT, Task, UnboundedQueue + assert not TYPE_CHECKING or (sys.platform != "linux" and sys.platform != "win32") @attr.s(slots=True, eq=False, frozen=True) class _KqueueStatistics: - tasks_waiting = attr.ib() - monitors = attr.ib() - backend = attr.ib(default="kqueue") + tasks_waiting: int = attr.ib() + monitors: int = attr.ib() + backend: Literal["kqueue"] = attr.ib(init=False, default="kqueue") @attr.s(slots=True, eq=False) class KqueueIOManager: - _kqueue = attr.ib(factory=select.kqueue) + _kqueue: select.kqueue = attr.ib(factory=select.kqueue) # {(ident, filter): Task or UnboundedQueue} - _registered = attr.ib(factory=dict) - _force_wakeup = attr.ib(factory=WakeupSocketpair) - _force_wakeup_fd = attr.ib(default=None) + _registered: dict[tuple[int, int], Task | UnboundedQueue[select.kevent]] = attr.ib( + factory=dict + ) + _force_wakeup: WakeupSocketpair = attr.ib(factory=WakeupSocketpair) + _force_wakeup_fd: int | None = attr.ib(default=None) - def __attrs_post_init__(self): + def __attrs_post_init__(self) -> None: force_wakeup_event = select.kevent( self._force_wakeup.wakeup_sock, select.KQ_FILTER_READ, select.KQ_EV_ADD ) self._kqueue.control([force_wakeup_event], 0) self._force_wakeup_fd = self._force_wakeup.wakeup_sock.fileno() - def statistics(self): + def statistics(self) -> _KqueueStatistics: tasks_waiting = 0 monitors = 0 for receiver in self._registered.values(): @@ -46,14 +55,14 @@ def statistics(self): monitors += 1 return _KqueueStatistics(tasks_waiting=tasks_waiting, monitors=monitors) - def close(self): + def close(self) -> None: self._kqueue.close() self._force_wakeup.close() - def force_wakeup(self): + def force_wakeup(self) -> None: self._force_wakeup.wakeup_thread_and_signal_safe() - def get_events(self, timeout): + def get_events(self, timeout: float) -> list[select.kevent]: # max_events must be > 0 or kqueue gets cranky # and we generally want this to be strictly larger than the actual # number of events we get, so that we can tell that we've gotten @@ -70,7 +79,7 @@ def get_events(self, timeout): # and loop back to the start return events - def process_events(self, events): + def process_events(self, events: list[select.kevent]) -> None: for event in events: key = (event.ident, event.filter) if event.ident == self._force_wakeup_fd: @@ -79,7 +88,7 @@ def process_events(self, events): receiver = self._registered[key] if event.flags & select.KQ_EV_ONESHOT: del self._registered[key] - if type(receiver) is _core.Task: + if isinstance(receiver, _core.Task): _core.reschedule(receiver, outcome.Value(event)) else: receiver.put_nowait(event) @@ -96,18 +105,20 @@ def process_events(self, events): # be more ergonomic... @_public - def current_kqueue(self): + def current_kqueue(self) -> select.kqueue: return self._kqueue @contextmanager @_public - def monitor_kevent(self, ident, filter): + def monitor_kevent( + self, ident: int, filter: int + ) -> Iterator[_core.UnboundedQueue[select.kevent]]: key = (ident, filter) if key in self._registered: raise _core.BusyResourceError( "attempt to register multiple listeners for same ident/filter pair" ) - q = _core.UnboundedQueue() + q = _core.UnboundedQueue[select.kevent]() self._registered[key] = q try: yield q @@ -115,7 +126,9 @@ def monitor_kevent(self, ident, filter): del self._registered[key] @_public - async def wait_kevent(self, ident, filter, abort_func): + async def wait_kevent( + self, ident: int, filter: int, abort_func: Callable[[RaiseCancelT], Abort] + ) -> Abort: key = (ident, filter) if key in self._registered: raise _core.BusyResourceError( @@ -123,22 +136,23 @@ async def wait_kevent(self, ident, filter, abort_func): ) self._registered[key] = _core.current_task() - def abort(raise_cancel): + def abort(raise_cancel: RaiseCancelT) -> Abort: r = abort_func(raise_cancel) if r is _core.Abort.SUCCEEDED: del self._registered[key] return r - return await _core.wait_task_rescheduled(abort) + # wait_task_rescheduled does not have its return type typed + return await _core.wait_task_rescheduled(abort) # type: ignore[no-any-return] - async def _wait_common(self, fd, filter): + async def _wait_common(self, fd: int | socket, filter: int) -> None: if not isinstance(fd, int): fd = fd.fileno() flags = select.KQ_EV_ADD | select.KQ_EV_ONESHOT event = select.kevent(fd, filter, flags) self._kqueue.control([event], 0) - def abort(_): + def abort(_: RaiseCancelT) -> Abort: event = select.kevent(fd, filter, select.KQ_EV_DELETE) try: self._kqueue.control([event], 0) @@ -163,15 +177,15 @@ def abort(_): await self.wait_kevent(fd, filter, abort) @_public - async def wait_readable(self, fd): + async def wait_readable(self, fd: int | socket) -> None: await self._wait_common(fd, select.KQ_FILTER_READ) @_public - async def wait_writable(self, fd): + async def wait_writable(self, fd: int | socket) -> None: await self._wait_common(fd, select.KQ_FILTER_WRITE) @_public - def notify_closing(self, fd): + def notify_closing(self, fd: int | socket) -> None: if not isinstance(fd, int): fd = fd.fileno() diff --git a/trio/_core/_io_windows.py b/trio/_core/_io_windows.py index 4084f72b6e..0130170af3 100644 --- a/trio/_core/_io_windows.py +++ b/trio/_core/_io_windows.py @@ -3,7 +3,7 @@ import socket import sys from contextlib import contextmanager -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Literal import attr from outcome import Value @@ -369,7 +369,7 @@ class _WindowsStatistics: tasks_waiting_write = attr.ib() tasks_waiting_overlapped = attr.ib() completion_key_monitors = attr.ib() - backend = attr.ib(default="windows") + backend: Literal["windows"] = attr.ib(init=False, default="windows") # Maximum number of events to dequeue from the completion port on each pass diff --git a/trio/_core/_run.py b/trio/_core/_run.py index ce8feb2827..7d247a2738 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -1433,7 +1433,7 @@ def in_main_thread(): class Runner: clock = attr.ib() instruments: Instruments = attr.ib() - io_manager = attr.ib() + io_manager: TheIOManager = attr.ib() ki_manager = attr.ib() strict_exception_groups = attr.ib() diff --git a/trio/_tests/tools/test_gen_exports.py b/trio/_tests/tools/test_gen_exports.py index 9436105fa4..7d2d6e99a1 100644 --- a/trio/_tests/tools/test_gen_exports.py +++ b/trio/_tests/tools/test_gen_exports.py @@ -2,9 +2,15 @@ import pytest -from trio._tools.gen_exports import create_passthrough_args, get_public_methods, process +from trio._tools.gen_exports import ( + File, + create_passthrough_args, + get_public_methods, + process, +) SOURCE = '''from _run import _public +from somewhere import Thing class Test: @_public @@ -14,7 +20,7 @@ def public_func(self): @ignore_this @_public @another_decorator - async def public_async_func(self): + async def public_async_func(self) -> Thing: pass # no doc string def not_public(self): @@ -24,6 +30,21 @@ async def not_public_async(self): pass ''' +IMPORT_1 = """\ +from somewhere import Thing +""" + +IMPORT_2 = """\ +from somewhere import Thing +import os +""" + +IMPORT_3 = """\ +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from somewhere import Thing +""" + def test_get_public_methods(): methods = list(get_public_methods(ast.parse(SOURCE))) @@ -48,18 +69,27 @@ def test_create_pass_through_args(): assert create_passthrough_args(func_node) == expected -def test_process(tmp_path): +@pytest.mark.parametrize("imports", ["", IMPORT_1, IMPORT_2, IMPORT_3]) +def test_process(tmp_path, imports): modpath = tmp_path / "_module.py" genpath = tmp_path / "_generated_module.py" modpath.write_text(SOURCE, encoding="utf-8") + file = File(modpath, "runner", platform="linux", imports=imports) assert not genpath.exists() with pytest.raises(SystemExit) as excinfo: - process([(str(modpath), "runner")], do_test=True) + process([file], do_test=True) assert excinfo.value.code == 1 - process([(str(modpath), "runner")], do_test=False) + process([file], do_test=False) assert genpath.exists() - process([(str(modpath), "runner")], do_test=True) + process([file], do_test=True) # But if we change the lookup path it notices with pytest.raises(SystemExit) as excinfo: - process([(str(modpath), "runner.io_manager")], do_test=True) + process( + [File(modpath, "runner.io_manager", platform="linux", imports=imports)], + do_test=True, + ) + assert excinfo.value.code == 1 + # Also if the platform is changed. + with pytest.raises(SystemExit) as excinfo: + process([File(modpath, "runner", imports=imports)], do_test=True) assert excinfo.value.code == 1 diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index 5a0c59e33e..ec345facf3 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9202551834130781, + "completenessScore": 0.9250398724082934, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 577, - "withUnknownType": 50 + "withKnownType": 580, + "withUnknownType": 47 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, diff --git a/trio/_tools/gen_exports.py b/trio/_tools/gen_exports.py index bae7e4f69d..f3ed2e26e7 100755 --- a/trio/_tools/gen_exports.py +++ b/trio/_tools/gen_exports.py @@ -18,18 +18,20 @@ from typing_extensions import TypeGuard import astor +import attr PREFIX = "_generated" HEADER = """# *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# isort: skip -from ._instrumentation import Instrument -from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED -from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT - +# Don't lint this file, generation will not format this too nicely. +# isort: skip_file # fmt: off +from __future__ import annotations + +from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +from ._run import GLOBAL_RUN_CONTEXT """ FOOTER = """# fmt: on @@ -43,6 +45,14 @@ """ +@attr.define +class File: + path: Path + modname: str + platform: str = attr.field(default="", kw_only=True) + imports: str = attr.field(default="", kw_only=True) + + def is_function(node: ast.AST) -> TypeGuard[ast.FunctionDef | ast.AsyncFunctionDef]: """Check if the AST node is either a function or an async function @@ -94,18 +104,41 @@ def create_passthrough_args(funcdef: ast.FunctionDef | ast.AsyncFunctionDef) -> return "({})".format(", ".join(call_args)) -def gen_public_wrappers_source(source_path: Path, lookup_path: str) -> str: +def gen_public_wrappers_source(file: File) -> str: """Scan the given .py file for @_public decorators, and generate wrapper functions. """ - generated = [HEADER] - source = astor.code_to_ast.parse_file(source_path) + header = [HEADER] + + if file.imports: + header.append(file.imports) + if file.platform: + # Simple checks to avoid repeating imports. If this messes up, type checkers/tests will + # just give errors. + if "TYPE_CHECKING" not in file.imports: + header.append("from typing import TYPE_CHECKING\n") + if "import sys" not in file.imports: # pragma: no cover + header.append("import sys\n") + header.append( + f'\nassert not TYPE_CHECKING or sys.platform=="{file.platform}"\n' + ) + + generated = ["".join(header)] + + source = astor.code_to_ast.parse_file(file.path) for method in get_public_methods(source): # Remove self from arguments assert method.args.args[0].arg == "self" del method.args.args[0] + for dec in method.decorator_list: # pragma: no cover + if isinstance(dec, ast.Name) and dec.id == "contextmanager": + is_cm = True + break + else: + is_cm = False + # Remove decorators method.decorator_list = [] @@ -122,10 +155,13 @@ def gen_public_wrappers_source(source_path: Path, lookup_path: str) -> str: # Create the function definition including the body func = astor.to_source(method, indent_with=" " * 4) + if is_cm: # pragma: no cover + func = func.replace("->Iterator", "->ContextManager") + # Create export function body template = TEMPLATE.format( " await " if isinstance(method, ast.AsyncFunctionDef) else " ", - lookup_path, + file.modname, method.name + new_args, ) @@ -149,12 +185,12 @@ def matches_disk_files(new_files: dict[str, str]) -> bool: return True -def process(sources_and_lookups: Iterable[tuple[Path, str]], *, do_test: bool) -> None: +def process(files: Iterable[File], *, do_test: bool) -> None: new_files = {} - for source_path, lookup_path in sources_and_lookups: - print("Scanning:", source_path) - new_source = gen_public_wrappers_source(source_path, lookup_path) - dirname, basename = os.path.split(source_path) + for file in files: + print("Scanning:", file.path) + new_source = gen_public_wrappers_source(file) + dirname, basename = os.path.split(file.path) new_path = os.path.join(dirname, PREFIX + basename) new_files[new_path] = new_source if do_test: @@ -186,15 +222,54 @@ def main() -> None: # pragma: no cover assert (source_root / "LICENSE").exists() core = source_root / "trio/_core" to_wrap = [ - (core / "_run.py", "runner"), - (core / "_instrumentation.py", "runner.instruments"), - (core / "_io_windows.py", "runner.io_manager"), - (core / "_io_epoll.py", "runner.io_manager"), - (core / "_io_kqueue.py", "runner.io_manager"), + File(core / "_run.py", "runner", imports=IMPORTS_RUN), + File( + core / "_instrumentation.py", + "runner.instruments", + imports=IMPORTS_INSTRUMENT, + ), + File(core / "_io_windows.py", "runner.io_manager", platform="win32"), + File( + core / "_io_epoll.py", + "runner.io_manager", + platform="linux", + imports=IMPORTS_EPOLL, + ), + File( + core / "_io_kqueue.py", + "runner.io_manager", + platform="darwin", + imports=IMPORTS_KQUEUE, + ), ] process(to_wrap, do_test=parsed_args.test) +IMPORTS_RUN = """\ +from ._run import _NO_SEND +""" +IMPORTS_INSTRUMENT = """\ +from ._instrumentation import Instrument +""" + +IMPORTS_EPOLL = """\ +from socket import socket +""" + +IMPORTS_KQUEUE = """\ +from typing import Callable, ContextManager, TYPE_CHECKING + +if TYPE_CHECKING: + import select + from socket import socket + + from ._traps import Abort, RaiseCancelT + + from .. import _core + +""" + + if __name__ == "__main__": # pragma: no cover main() From 4363a065d69965847a26c4a87bd1b5cc26080ef2 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Thu, 10 Aug 2023 10:09:10 -0500 Subject: [PATCH 130/162] Add typing for `_core/_asyncgens.py` (#2735) * Add typing for `_core/_asyncgens.py` --------- Co-authored-by: Spencer Brown Co-authored-by: CoolCat467 --- pyproject.toml | 3 ++- trio/_core/_asyncgens.py | 39 ++++++++++++++++++++++++++++----------- 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fc7d37dbf3..f8d2f571e3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,6 +49,7 @@ disallow_untyped_defs = false # Fully typed, enable stricter checks module = [ "trio._abc", + "trio._core._asyncgens", "trio._core._entry_queue", "trio._core._generated_io_epoll", "trio._core._generated_io_kqueue", @@ -60,7 +61,7 @@ module = [ "trio._deprecate", "trio._dtls", "trio._file_io", - "trio._highlevel_open_tcp_stream.py", + "trio._highlevel_open_tcp_stream", "trio._ki", "trio._socket", "trio._sync", diff --git a/trio/_core/_asyncgens.py b/trio/_core/_asyncgens.py index 5f02ebe76d..4261328278 100644 --- a/trio/_core/_asyncgens.py +++ b/trio/_core/_asyncgens.py @@ -1,7 +1,11 @@ +from __future__ import annotations + import logging import sys import warnings import weakref +from types import AsyncGeneratorType +from typing import TYPE_CHECKING, NoReturn import attr @@ -12,6 +16,15 @@ # Used to log exceptions in async generator finalizers ASYNCGEN_LOGGER = logging.getLogger("trio.async_generator_errors") +if TYPE_CHECKING: + from typing import Set + + _WEAK_ASYNC_GEN_SET = weakref.WeakSet[AsyncGeneratorType[object, NoReturn]] + _ASYNC_GEN_SET = Set[AsyncGeneratorType[object, NoReturn]] +else: + _WEAK_ASYNC_GEN_SET = weakref.WeakSet + _ASYNC_GEN_SET = set + @attr.s(eq=False, slots=True) class AsyncGenerators: @@ -22,17 +35,17 @@ class AsyncGenerators: # asyncgens after the system nursery has been closed, it's a # regular set so we don't have to deal with GC firing at # unexpected times. - alive = attr.ib(factory=weakref.WeakSet) + alive: _WEAK_ASYNC_GEN_SET | _ASYNC_GEN_SET = attr.ib(factory=_WEAK_ASYNC_GEN_SET) # This collects async generators that get garbage collected during # the one-tick window between the system nursery closing and the # init task starting end-of-run asyncgen finalization. - trailing_needs_finalize = attr.ib(factory=set) + trailing_needs_finalize: _ASYNC_GEN_SET = attr.ib(factory=_ASYNC_GEN_SET) prev_hooks = attr.ib(init=False) - def install_hooks(self, runner): - def firstiter(agen): + def install_hooks(self, runner: _run.Runner) -> None: + def firstiter(agen: AsyncGeneratorType[object, NoReturn]) -> None: if hasattr(_run.GLOBAL_RUN_CONTEXT, "task"): self.alive.add(agen) else: @@ -46,7 +59,9 @@ def firstiter(agen): if self.prev_hooks.firstiter is not None: self.prev_hooks.firstiter(agen) - def finalize_in_trio_context(agen, agen_name): + def finalize_in_trio_context( + agen: AsyncGeneratorType[object, NoReturn], agen_name: str + ) -> None: try: runner.spawn_system_task( self._finalize_one, @@ -61,7 +76,7 @@ def finalize_in_trio_context(agen, agen_name): # have hit it. self.trailing_needs_finalize.add(agen) - def finalizer(agen): + def finalizer(agen: AsyncGeneratorType[object, NoReturn]) -> None: agen_name = name_asyncgen(agen) try: is_ours = not agen.ag_frame.f_locals.get("@trio_foreign_asyncgen") @@ -112,9 +127,9 @@ def finalizer(agen): ) self.prev_hooks = sys.get_asyncgen_hooks() - sys.set_asyncgen_hooks(firstiter=firstiter, finalizer=finalizer) + sys.set_asyncgen_hooks(firstiter=firstiter, finalizer=finalizer) # type: ignore[arg-type] # Finalizer doesn't use AsyncGeneratorType - async def finalize_remaining(self, runner): + async def finalize_remaining(self, runner: _run.Runner) -> None: # This is called from init after shutting down the system nursery. # The only tasks running at this point are init and # the run_sync_soon task, and since the system nursery is closed, @@ -170,14 +185,16 @@ async def finalize_remaining(self, runner): # all are gone. while self.alive: batch = self.alive - self.alive = set() + self.alive = _ASYNC_GEN_SET() for agen in batch: await self._finalize_one(agen, name_asyncgen(agen)) - def close(self): + def close(self) -> None: sys.set_asyncgen_hooks(*self.prev_hooks) - async def _finalize_one(self, agen, name): + async def _finalize_one( + self, agen: AsyncGeneratorType[object, NoReturn], name: object + ) -> None: try: # This shield ensures that finalize_asyncgen never exits # with an exception, not even a Cancelled. The inside From 709005123398dec08c8b76243e7d835f0bf1fb42 Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Mon, 14 Aug 2023 21:55:09 +1000 Subject: [PATCH 131/162] Add types to `_core._run` (#2733) add types to `_core._run` and fix various small issues that arose during review --------- Co-authored-by: jakkdl --- .coveragerc | 1 + docs/source/conf.py | 27 +- docs/source/reference-core.rst | 12 +- docs/source/reference-lowlevel.rst | 9 +- pyproject.toml | 4 +- trio/__init__.py | 1 + trio/_core/__init__.py | 1 + trio/_core/_generated_run.py | 32 +- trio/_core/_io_epoll.py | 9 +- trio/_core/_io_kqueue.py | 8 +- trio/_core/_io_windows.py | 22 +- trio/_core/_local.py | 11 +- trio/_core/_run.py | 471 +++++++++++++++++--------- trio/_core/_thread_cache.py | 6 +- trio/_dtls.py | 3 +- trio/_highlevel_open_tcp_listeners.py | 4 +- trio/_tests/test_exports.py | 13 + trio/_tests/verify_types.json | 27 +- trio/_tools/gen_exports.py | 17 +- trio/_util.py | 2 +- trio/lowlevel.py | 2 +- 21 files changed, 453 insertions(+), 229 deletions(-) diff --git a/.coveragerc b/.coveragerc index 431a02971b..5272237caf 100644 --- a/.coveragerc +++ b/.coveragerc @@ -23,6 +23,7 @@ exclude_lines = if _t.TYPE_CHECKING: if t.TYPE_CHECKING: @overload + class .*\bProtocol\b.*\): partial_branches = pragma: no branch diff --git a/docs/source/conf.py b/docs/source/conf.py index 39eda12ad5..06e4a7e6be 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -62,12 +62,16 @@ ("py:obj", "trio._abc.SendType"), ("py:obj", "trio._abc.T"), ("py:obj", "trio._abc.T_resource"), + ("py:class", "trio._core._run.StatusT"), + ("py:class", "trio._core._run.StatusT_co"), + ("py:class", "trio._core._run.StatusT_contra"), + ("py:class", "trio._core._run.RetT"), ("py:class", "trio._threads.T"), - # why aren't these found in stdlib? - ("py:class", "types.FrameType"), ("py:class", "P.args"), ("py:class", "P.kwargs"), ("py:class", "RetT"), + # why aren't these found in stdlib? + ("py:class", "types.FrameType"), # TODO: figure out if you can link this to SSL ("py:class", "Context"), # TODO: temporary type @@ -90,6 +94,24 @@ } +def autodoc_process_signature( + app, what, name, obj, options, signature, return_annotation +): + """Modify found signatures to fix various issues.""" + if signature is not None: + signature = signature.replace("~_contextvars.Context", "~contextvars.Context") + if name == "trio.lowlevel.start_guest_run": + signature = signature.replace("Outcome", "~outcome.Outcome") + if name == "trio.lowlevel.RunVar": # Typevar is not useful here. + signature = signature.replace(": ~trio._core._local.T", "") + if "_NoValue" in signature: + # Strip the type from the union, make it look like = ... + signature = signature.replace(" | type[trio._core._local._NoValue]", "") + signature = signature.replace("", "...") + + return signature, return_annotation + + # XX hack the RTD theme until # https://github.com/rtfd/sphinx_rtd_theme/pull/382 # is shipped (should be in the release after 0.2.4) @@ -97,6 +119,7 @@ # though. def setup(app): app.add_css_file("hackrtd.css") + app.connect("autodoc-process-signature", autodoc_process_signature) # -- General configuration ------------------------------------------------ diff --git a/docs/source/reference-core.rst b/docs/source/reference-core.rst index 30994e4386..316a9c2ac3 100644 --- a/docs/source/reference-core.rst +++ b/docs/source/reference-core.rst @@ -916,11 +916,19 @@ The nursery API .. autoclass:: Nursery() - :members: + :members: child_tasks, parent_task + + .. automethod:: start(async_fn, *args, name = None) + + .. automethod:: start_soon(async_fn, *args, name = None) .. attribute:: TASK_STATUS_IGNORED + :type: TaskStatus - See :meth:`~Nursery.start`. + See :meth:`Nursery.start`. + +.. autoclass:: TaskStatus(Protocol[StatusT]) + :members: .. _task-local-storage: diff --git a/docs/source/reference-lowlevel.rst b/docs/source/reference-lowlevel.rst index faf07268cb..712a36ad04 100644 --- a/docs/source/reference-lowlevel.rst +++ b/docs/source/reference-lowlevel.rst @@ -49,7 +49,11 @@ attributes, :meth:`trio.Lock.statistics`, etc.). Here are some more. Global statistics ----------------- -.. autofunction:: current_statistics +.. function:: current_statistics() -> RunStatistics + + Returns an object containing run-loop-level debugging information: + +.. autoclass:: RunStatistics() The current clock @@ -534,9 +538,6 @@ Task API putting a task to sleep and then waking it up again. (See :func:`wait_task_rescheduled` for details.) -.. autoclass:: TaskStatus - :members: - .. _guest-mode: Using "guest mode" to run Trio on top of other event loops diff --git a/pyproject.toml b/pyproject.toml index f8d2f571e3..3121799cb7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,7 @@ module = [ "trio._abc", "trio._core._asyncgens", "trio._core._entry_queue", + "trio._core._generated_run", "trio._core._generated_io_epoll", "trio._core._generated_io_kqueue", "trio._core._io_epoll", @@ -58,6 +59,7 @@ module = [ "trio._core._local", "trio._core._unbounded_queue", "trio._core._thread_cache", + "trio._core._run", "trio._deprecate", "trio._dtls", "trio._file_io", @@ -73,7 +75,7 @@ disallow_untyped_defs = true disallow_untyped_decorators = true disallow_any_generics = true disallow_any_decorated = true -disallow_any_unimported = true +disallow_any_unimported = false # Enable once outcome has stubs. disallow_subclassing_any = true [[tool.mypy.overrides]] diff --git a/trio/__init__.py b/trio/__init__.py index 277baa5339..be7de42cde 100644 --- a/trio/__init__.py +++ b/trio/__init__.py @@ -35,6 +35,7 @@ EndOfChannel as EndOfChannel, Nursery as Nursery, RunFinishedError as RunFinishedError, + TaskStatus as TaskStatus, TrioInternalError as TrioInternalError, WouldBlock as WouldBlock, current_effective_deadline as current_effective_deadline, diff --git a/trio/_core/__init__.py b/trio/_core/__init__.py index 8e42d2743b..b9bd0d8cc4 100644 --- a/trio/_core/__init__.py +++ b/trio/_core/__init__.py @@ -27,6 +27,7 @@ TASK_STATUS_IGNORED, CancelScope, Nursery, + RunStatistics, Task, TaskStatus, add_instrument, diff --git a/trio/_core/_generated_run.py b/trio/_core/_generated_run.py index 674c86aaec..35ecd45a1b 100644 --- a/trio/_core/_generated_run.py +++ b/trio/_core/_generated_run.py @@ -8,13 +8,21 @@ from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED from ._run import GLOBAL_RUN_CONTEXT -from ._run import _NO_SEND +from collections.abc import Awaitable, Callable +from typing import Any +from outcome import Outcome +import contextvars -def current_statistics(): - """Returns an object containing run-loop-level debugging information. +from ._run import _NO_SEND, RunStatistics, Task +from ._entry_queue import TrioToken +from .._abc import Clock - Currently the following fields are defined: + +def current_statistics() ->RunStatistics: + """Returns ``RunStatistics``, which contains run-loop-level debugging information. + + Currently, the following fields are defined: * ``tasks_living`` (int): The number of tasks that have been spawned and not yet exited. @@ -41,7 +49,7 @@ def current_statistics(): raise RuntimeError("must be called from async context") -def current_time(): +def current_time() ->float: """Returns the current time according to Trio's internal clock. Returns: @@ -58,7 +66,7 @@ def current_time(): raise RuntimeError("must be called from async context") -def current_clock(): +def current_clock() ->Clock: """Returns the current :class:`~trio.abc.Clock`.""" locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: @@ -67,7 +75,7 @@ def current_clock(): raise RuntimeError("must be called from async context") -def current_root_task(): +def current_root_task() ->(Task | None): """Returns the current root :class:`Task`. This is the task that is the ultimate parent of all other tasks. @@ -80,7 +88,7 @@ def current_root_task(): raise RuntimeError("must be called from async context") -def reschedule(task, next_send=_NO_SEND): +def reschedule(task: Task, next_send: Outcome[Any]=_NO_SEND) ->None: # type: ignore[has-type] """Reschedule the given task with the given :class:`outcome.Outcome`. @@ -105,7 +113,9 @@ def reschedule(task, next_send=_NO_SEND): raise RuntimeError("must be called from async context") -def spawn_system_task(async_fn, *args, name=None, context=None): +def spawn_system_task(async_fn: Callable[..., Awaitable[object]], *args: + object, name: object=None, context: (contextvars.Context | None)=None + ) ->Task: """Spawn a "system" task. System tasks have a few differences from regular tasks: @@ -164,7 +174,7 @@ def spawn_system_task(async_fn, *args, name=None, context=None): raise RuntimeError("must be called from async context") -def current_trio_token(): +def current_trio_token() ->TrioToken: """Retrieve the :class:`TrioToken` for the current call to :func:`trio.run`. @@ -176,7 +186,7 @@ def current_trio_token(): raise RuntimeError("must be called from async context") -async def wait_all_tasks_blocked(cushion=0.0): +async def wait_all_tasks_blocked(cushion: float=0.0) ->None: """Block until there are no runnable tasks. This is useful in testing code when you want to give other tasks a diff --git a/trio/_core/_io_epoll.py b/trio/_core/_io_epoll.py index c4a31f3722..0d247cae64 100644 --- a/trio/_core/_io_epoll.py +++ b/trio/_core/_io_epoll.py @@ -15,6 +15,8 @@ if TYPE_CHECKING: from socket import socket + from typing_extensions import TypeAlias + from .._core import Abort, RaiseCancelT @@ -28,6 +30,9 @@ class EpollWaiters: assert not TYPE_CHECKING or sys.platform == "linux" +EventResult: TypeAlias = "list[tuple[int, int]]" + + @attr.s(slots=True, eq=False, frozen=True) class _EpollStatistics: tasks_waiting_read: int = attr.ib() @@ -230,14 +235,14 @@ def force_wakeup(self) -> None: # Return value must be False-y IFF the timeout expired, NOT if any I/O # happened or force_wakeup was called. Otherwise it can be anything; gets # passed straight through to process_events. - def get_events(self, timeout: float) -> list[tuple[int, int]]: + def get_events(self, timeout: float) -> EventResult: # max_events must be > 0 or epoll gets cranky # accessing self._registered from a thread looks dangerous, but it's # OK because it doesn't matter if our value is a little bit off. max_events = max(1, len(self._registered)) return self._epoll.poll(timeout, max_events) - def process_events(self, events: list[tuple[int, int]]) -> None: + def process_events(self, events: EventResult) -> None: for fd, flags in events: if fd == self._force_wakeup_fd: self._force_wakeup.drain() diff --git a/trio/_core/_io_kqueue.py b/trio/_core/_io_kqueue.py index 0b0f8ee557..56a6559091 100644 --- a/trio/_core/_io_kqueue.py +++ b/trio/_core/_io_kqueue.py @@ -16,10 +16,14 @@ if TYPE_CHECKING: from socket import socket + from typing_extensions import TypeAlias + from .._core import Abort, RaiseCancelT, Task, UnboundedQueue assert not TYPE_CHECKING or (sys.platform != "linux" and sys.platform != "win32") +EventResult: TypeAlias = "list[select.kevent]" + @attr.s(slots=True, eq=False, frozen=True) class _KqueueStatistics: @@ -62,7 +66,7 @@ def close(self) -> None: def force_wakeup(self) -> None: self._force_wakeup.wakeup_thread_and_signal_safe() - def get_events(self, timeout: float) -> list[select.kevent]: + def get_events(self, timeout: float) -> EventResult: # max_events must be > 0 or kqueue gets cranky # and we generally want this to be strictly larger than the actual # number of events we get, so that we can tell that we've gotten @@ -79,7 +83,7 @@ def get_events(self, timeout: float) -> list[select.kevent]: # and loop back to the start return events - def process_events(self, events: list[select.kevent]) -> None: + def process_events(self, events: EventResult) -> None: for event in events: key = (event.ident, event.filter) if event.ident == self._force_wakeup_fd: diff --git a/trio/_core/_io_windows.py b/trio/_core/_io_windows.py index 0130170af3..9757d25b5f 100644 --- a/trio/_core/_io_windows.py +++ b/trio/_core/_io_windows.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import enum import itertools import socket @@ -29,6 +31,10 @@ assert not TYPE_CHECKING or sys.platform == "win32" +if TYPE_CHECKING: + from typing_extensions import TypeAlias +EventResult: TypeAlias = int + # There's a lot to be said about the overall design of a Windows event # loop. See # @@ -365,10 +371,10 @@ class AFDGroup: @attr.s(slots=True, eq=False, frozen=True) class _WindowsStatistics: - tasks_waiting_read = attr.ib() - tasks_waiting_write = attr.ib() - tasks_waiting_overlapped = attr.ib() - completion_key_monitors = attr.ib() + tasks_waiting_read: int = attr.ib() + tasks_waiting_write: int = attr.ib() + tasks_waiting_overlapped: int = attr.ib() + completion_key_monitors: int = attr.ib() backend: Literal["windows"] = attr.ib(init=False, default="windows") @@ -485,7 +491,7 @@ def force_wakeup(self): ) ) - def get_events(self, timeout): + def get_events(self, timeout: float) -> EventResult: received = ffi.new("PULONG") milliseconds = round(1000 * timeout) if timeout > 0 and milliseconds == 0: @@ -500,9 +506,11 @@ def get_events(self, timeout): if exc.winerror != ErrorCodes.WAIT_TIMEOUT: # pragma: no cover raise return 0 - return received[0] + result = received[0] + assert isinstance(result, int) + return result - def process_events(self, received): + def process_events(self, received: EventResult) -> None: for i in range(received): entry = self._events[i] if entry.lpCompletionKey == CKeys.AFD_POLL: diff --git a/trio/_core/_local.py b/trio/_core/_local.py index 39a2cff009..8286a5578f 100644 --- a/trio/_core/_local.py +++ b/trio/_core/_local.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Generic, TypeVar, final +from typing import Generic, TypeVar, cast, final # Runvar implementations import attr @@ -43,8 +43,7 @@ class RunVar(Generic[T], metaclass=Final): def get(self, default: T | type[_NoValue] = _NoValue) -> T: """Gets the value of this :class:`RunVar` for the current run call.""" try: - # not typed yet - return _run.GLOBAL_RUN_CONTEXT.runner._locals[self] # type: ignore[return-value, index] + return cast(T, _run.GLOBAL_RUN_CONTEXT.runner._locals[self]) except AttributeError: raise RuntimeError("Cannot be used outside of a run context") from None except KeyError: @@ -72,7 +71,7 @@ def set(self, value: T) -> RunVarToken[T]: # This can't fail, because if we weren't in Trio context then the # get() above would have failed. - _run.GLOBAL_RUN_CONTEXT.runner._locals[self] = value # type: ignore[assignment, index] + _run.GLOBAL_RUN_CONTEXT.runner._locals[self] = value return token def reset(self, token: RunVarToken[T]) -> None: @@ -92,9 +91,9 @@ def reset(self, token: RunVarToken[T]) -> None: previous = token.previous_value try: if previous is _NoValue: - _run.GLOBAL_RUN_CONTEXT.runner._locals.pop(self) # type: ignore[arg-type] + _run.GLOBAL_RUN_CONTEXT.runner._locals.pop(self) else: - _run.GLOBAL_RUN_CONTEXT.runner._locals[self] = previous # type: ignore[index, assignment] + _run.GLOBAL_RUN_CONTEXT.runner._locals[self] = previous except AttributeError: raise RuntimeError("Cannot be used outside of a run context") diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 7d247a2738..1ba88da85e 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -10,14 +10,30 @@ import threading import warnings from collections import deque -from collections.abc import Callable, Coroutine, Iterator +from collections.abc import ( + Awaitable, + Callable, + Coroutine, + Generator, + Iterator, + Sequence, +) from contextlib import AbstractAsyncContextManager, contextmanager from contextvars import copy_context from heapq import heapify, heappop, heappush from math import inf from time import perf_counter from types import TracebackType -from typing import TYPE_CHECKING, Any, NoReturn, TypeVar +from typing import ( + TYPE_CHECKING, + Any, + NoReturn, + Protocol, + TypeVar, + cast, + final, + overload, +) import attr from outcome import Error, Outcome, Value, capture @@ -25,6 +41,7 @@ from sortedcontainers import SortedDict from .. import _core +from .._abc import Clock, Instrument from .._util import Final, NoPublicConstructor, coroutine_or_error from ._asyncgens import AsyncGenerators from ._entry_queue import EntryQueue, TrioToken @@ -53,11 +70,23 @@ # An unfortunate name collision here with trio._util.Final from typing import Final as FinalT + from typing_extensions import Self + DEADLINE_HEAP_MIN_PRUNE_THRESHOLD: FinalT = 1000 -_NO_SEND: FinalT = object() +# Passed as a sentinel +_NO_SEND: FinalT = cast("Outcome[Any]", object()) FnT = TypeVar("FnT", bound="Callable[..., Any]") +StatusT = TypeVar("StatusT") +StatusT_co = TypeVar("StatusT_co", covariant=True) +StatusT_contra = TypeVar("StatusT_contra", contravariant=True) +RetT = TypeVar("RetT") + + +@final +class _NoStatus(metaclass=NoPublicConstructor): + """Sentinel for unset TaskStatus._value.""" # Decorator to mark methods public. This does nothing by itself, but @@ -120,7 +149,7 @@ def function_with_unique_name_xyzzy() -> NoReturn: @attr.s(frozen=True, slots=True) -class SystemClock: +class SystemClock(Clock): # Add a large random offset to our clock to ensure that if people # accidentally call time.perf_counter() directly or start comparing clocks # between different runs, then they'll notice the bug quickly: @@ -149,7 +178,9 @@ class IdlePrimedTypes(enum.Enum): ################################################################ -def collapse_exception_group(excgroup): +def collapse_exception_group( + excgroup: BaseExceptionGroup[BaseException], +) -> BaseException: """Recursively collapse any single-exception groups into that single contained exception. @@ -169,7 +200,8 @@ def collapse_exception_group(excgroup): ) return exceptions[0] elif modified: - return excgroup.derive(exceptions) + # derive() returns Any for some reason. + return excgroup.derive(exceptions) # type: ignore[no-any-return] else: return excgroup @@ -184,18 +216,18 @@ class Deadlines: """ # Heap of (deadline, id(CancelScope), CancelScope) - _heap = attr.ib(factory=list) + _heap: list[tuple[float, int, CancelScope]] = attr.ib(factory=list) # Count of active deadlines (those that haven't been changed) - _active = attr.ib(default=0) + _active: int = attr.ib(default=0) - def add(self, deadline, cancel_scope): + def add(self, deadline: float, cancel_scope: CancelScope) -> None: heappush(self._heap, (deadline, id(cancel_scope), cancel_scope)) self._active += 1 - def remove(self, deadline, cancel_scope): + def remove(self, deadline: float, cancel_scope: CancelScope) -> None: self._active -= 1 - def next_deadline(self): + def next_deadline(self) -> float: while self._heap: deadline, _, cancel_scope = self._heap[0] if deadline == cancel_scope._registered_deadline: @@ -205,7 +237,7 @@ def next_deadline(self): heappop(self._heap) return inf - def _prune(self): + def _prune(self) -> None: # In principle, it's possible for a cancel scope to toggle back and # forth repeatedly between the same two deadlines, and end up with # lots of stale entries that *look* like they're still active, because @@ -226,7 +258,7 @@ def _prune(self): heapify(pruned_heap) self._heap = pruned_heap - def expire(self, now): + def expire(self, now: float) -> bool: did_something = False while self._heap and self._heap[0][0] <= now: deadline, _, cancel_scope = heappop(self._heap) @@ -377,14 +409,14 @@ def close(self) -> None: child.recalculate() @property - def parent_cancellation_is_visible_to_us(self): + def parent_cancellation_is_visible_to_us(self) -> bool: return ( self._parent is not None and not self._scope.shield and self._parent.effectively_cancelled ) - def recalculate(self): + def recalculate(self) -> None: # This does a depth-first traversal over this and descendent cancel # statuses, to ensure their state is up-to-date. It's basically a # recursive algorithm, but we use an explicit stack to avoid any @@ -403,7 +435,7 @@ def recalculate(self): task._attempt_delivery_of_any_pending_cancel() todo.extend(current._children) - def _mark_abandoned(self): + def _mark_abandoned(self) -> None: self.abandoned_by_misnesting = True for child in self._children: child._mark_abandoned() @@ -440,6 +472,7 @@ def effective_deadline(self) -> float: """ +@final @attr.s(eq=False, repr=False, slots=True) class CancelScope(metaclass=Final): """A *cancellation scope*: the link between a unit of cancellable @@ -491,7 +524,7 @@ class CancelScope(metaclass=Final): _shield: bool = attr.ib(default=False, kw_only=True) @enable_ki_protection - def __enter__(self): + def __enter__(self) -> Self: task = _core.current_task() if self._has_been_entered: raise RuntimeError( @@ -505,7 +538,7 @@ def __enter__(self): task._activate_cancel_status(self._cancel_status) return self - def _close(self, exc): + def _close(self, exc: BaseException | None) -> BaseException | None: if self._cancel_status is None: new_exc = RuntimeError( "Cancel scope stack corrupted: attempted to exit {!r} " @@ -780,28 +813,57 @@ def cancel_called(self) -> bool: ################################################################ +class TaskStatus(Protocol[StatusT_contra]): + """The interface provided by :meth:`Nursery.start()` to the spawned task. + + This is provided via the ``task_status`` keyword-only parameter. + """ + + @overload + def started(self: TaskStatus[None]) -> None: + ... + + @overload + def started(self, value: StatusT_contra) -> None: + ... + + def started(self, value: StatusT_contra | None = None) -> None: + """Tasks call this method to indicate that they have initialized. + + See `nursery.start() ` for more information. + """ + + # This code needs to be read alongside the code from Nursery.start to make # sense. @attr.s(eq=False, hash=False, repr=False) -class TaskStatus(metaclass=Final): - _old_nursery = attr.ib() - _new_nursery = attr.ib() - _called_started = attr.ib(default=False) - _value = attr.ib(default=None) +class _TaskStatus(TaskStatus[StatusT]): + _old_nursery: Nursery = attr.ib() + _new_nursery: Nursery = attr.ib() + # NoStatus is a sentinel. + _value: StatusT | type[_NoStatus] = attr.ib(default=_NoStatus) - def __repr__(self): + def __repr__(self) -> str: return f"" - def started(self, value=None): - if self._called_started: + @overload + def started(self: _TaskStatus[None]) -> None: + ... + + @overload + def started(self: _TaskStatus[StatusT], value: StatusT) -> None: + ... + + def started(self, value: StatusT | None = None) -> None: + if self._value is not _NoStatus: raise RuntimeError("called 'started' twice on the same task status") - self._called_started = True - self._value = value + self._value = cast(StatusT, value) # If None, StatusT == None # If the old nursery is cancelled, then quietly quit now; the child # will eventually exit on its own, and we don't want to risk moving # children that might have propagating Cancelled exceptions into # a place with no cancelled cancel scopes to catch them. + assert self._old_nursery._cancel_status is not None if self._old_nursery._cancel_status.effectively_cancelled: return @@ -845,6 +907,15 @@ def started(self, value=None): self._old_nursery._check_nursery_closed() +class _NurseryStartFunc(Protocol[StatusT_co]): + """Type of functions passed to `nursery.start() `.""" + + def __call__( + self, *args: Any, task_status: TaskStatus[StatusT_co] + ) -> Awaitable[object]: + ... + + @attr.s class NurseryManager: """Nursery context manager. @@ -934,6 +1005,7 @@ def open_nursery( return NurseryManager(strict_exception_groups=strict_exception_groups) +@final class Nursery(metaclass=NoPublicConstructor): """A context which may be used to spawn (or cancel) child tasks. @@ -997,20 +1069,22 @@ def _add_exc(self, exc: BaseException) -> None: self._pending_excs.append(exc) self.cancel_scope.cancel() - def _check_nursery_closed(self): + def _check_nursery_closed(self) -> None: if not any([self._nested_child_running, self._children, self._pending_starts]): self._closed = True if self._parent_waiting_in_aexit: self._parent_waiting_in_aexit = False GLOBAL_RUN_CONTEXT.runner.reschedule(self._parent_task) - def _child_finished(self, task, outcome): + def _child_finished(self, task: Task, outcome: Outcome[Any]) -> None: self._children.remove(task) if isinstance(outcome, Error): self._add_exc(outcome.error) self._check_nursery_closed() - async def _nested_child_finished(self, nested_child_exc): + async def _nested_child_finished( + self, nested_child_exc: BaseException | None + ) -> BaseException | None: # Returns MultiError instance (or any exception if the nursery is in loose mode # and there is just one contained exception) if there are pending exceptions if nested_child_exc is not None: @@ -1022,7 +1096,7 @@ async def _nested_child_finished(self, nested_child_exc): # If we get cancelled (or have an exception injected, like # KeyboardInterrupt), then save that, but still wait until our # children finish. - def aborted(raise_cancel): + def aborted(raise_cancel: _core.RaiseCancelT) -> Abort: self._add_exc(capture(raise_cancel).error) return Abort.FAILED @@ -1048,8 +1122,15 @@ def aborted(raise_cancel): # avoid a garbage cycle # (see test_nursery_cancel_doesnt_create_cyclic_garbage) del self._pending_excs + return None - def start_soon(self, async_fn, *args, name=None): + def start_soon( + self, + # TODO: TypeVarTuple + async_fn: Callable[..., Awaitable[object]], + *args: object, + name: object = None, + ) -> None: """Creates a child task, scheduling ``await async_fn(*args)``. If you want to run a function and immediately wait for its result, @@ -1091,7 +1172,9 @@ def start_soon(self, async_fn, *args, name=None): """ GLOBAL_RUN_CONTEXT.runner.spawn_impl(async_fn, args, self, name) - async def start(self, async_fn, *args, name=None): + async def start( + self, async_fn: _NurseryStartFunc[StatusT], *args: object, name: object = None + ) -> StatusT: r"""Creates and initializes a child task. Like :meth:`start_soon`, but blocks until the new task has @@ -1100,7 +1183,7 @@ async def start(self, async_fn, *args, name=None): The ``async_fn`` must accept a ``task_status`` keyword argument, and it must make sure that it (or someone) eventually calls - ``task_status.started()``. + :meth:`task_status.started() `. The conventional way to define ``async_fn`` is like:: @@ -1113,31 +1196,30 @@ async def async_fn(arg1, arg2, *, task_status=trio.TASK_STATUS_IGNORED): a do-nothing ``started`` method. This way your function supports being called either like ``await nursery.start(async_fn, arg1, arg2)`` or directly like ``await async_fn(arg1, arg2)``, and - either way it can call ``task_status.started()`` without - worrying about which mode it's in. Defining your function like + either way it can call :meth:`task_status.started() ` + without worrying about which mode it's in. Defining your function like this will make it obvious to readers that it supports being used in both modes. - Before the child calls ``task_status.started()``, it's - effectively run underneath the call to :meth:`start`: if it + Before the child calls :meth:`task_status.started() `, + it's effectively run underneath the call to :meth:`start`: if it raises an exception then that exception is reported by :meth:`start`, and does *not* propagate out of the nursery. If :meth:`start` is cancelled, then the child task is also cancelled. - When the child calls ``task_status.started()``, it's moved out - from underneath :meth:`start` and into the given nursery. + When the child calls :meth:`task_status.started() `, + it's moved out from underneath :meth:`start` and into the given nursery. - If the child task passes a value to - ``task_status.started(value)``, then :meth:`start` returns this - value. Otherwise it returns ``None``. + If the child task passes a value to :meth:`task_status.started(value) `, + then :meth:`start` returns this value. Otherwise, it returns ``None``. """ if self._closed: raise RuntimeError("Nursery is closed to new arrivals") try: self._pending_starts += 1 async with open_nursery() as old_nursery: - task_status = TaskStatus(old_nursery, self) + task_status: _TaskStatus[StatusT] = _TaskStatus(old_nursery, self) thunk = functools.partial(async_fn, task_status=task_status) task = GLOBAL_RUN_CONTEXT.runner.spawn_impl( thunk, args, old_nursery, name @@ -1148,9 +1230,9 @@ async def async_fn(arg1, arg2, *, task_status=trio.TASK_STATUS_IGNORED): # If we get here, then the child either got reparented or exited # normally. The complicated logic is all in TaskStatus.started(). # (Any exceptions propagate directly out of the above.) - if not task_status._called_started: + if task_status._value is _NoStatus: raise RuntimeError("child exited without calling task_status.started()") - return task_status._value + return task_status._value # type: ignore[return-value] # Mypy doesn't narrow yet. finally: self._pending_starts -= 1 self._check_nursery_closed() @@ -1164,11 +1246,12 @@ def __del__(self) -> None: ################################################################ +@final @attr.s(eq=False, hash=False, repr=False, slots=True) class Task(metaclass=NoPublicConstructor): _parent_nursery: Nursery | None = attr.ib() coro: Coroutine[Any, Outcome[object], Any] = attr.ib() - _runner = attr.ib() + _runner: Runner = attr.ib() name: str = attr.ib() context: contextvars.Context = attr.ib() _counter: int = attr.ib(init=False, factory=itertools.count().__next__) @@ -1184,11 +1267,9 @@ class Task(metaclass=NoPublicConstructor): # tracebacks with extraneous frames. # - for scheduled tasks, custom_sleep_data is None # Tasks start out unscheduled. - _next_send_fn = attr.ib(default=None) - _next_send = attr.ib(default=None) - _abort_func: Callable[[Callable[[], NoReturn]], Abort] | None = attr.ib( - default=None - ) + _next_send_fn: Callable[[Any], object] = attr.ib(default=None) + _next_send: Outcome[Any] | None | BaseException = attr.ib(default=None) + _abort_func: Callable[[_core.RaiseCancelT], Abort] | None = attr.ib(default=None) custom_sleep_data: Any = attr.ib(default=None) # For introspection and nursery.start() @@ -1256,7 +1337,7 @@ def print_stack_for_task(task): print("".join(ss.format())) """ - # ignore static typing as we're doing lots of dynamic introspection + # Ignore static typing as we're doing lots of dynamic introspection coro: Any = self.coro while coro is not None: if hasattr(coro, "cr_frame"): @@ -1290,18 +1371,19 @@ def print_stack_for_task(task): # The CancelStatus object that is currently active for this task. # Don't change this directly; instead, use _activate_cancel_status(). + # This can be None, but only in the init task. _cancel_status: CancelStatus = attr.ib(default=None, repr=False) - def _activate_cancel_status(self, cancel_status: CancelStatus) -> None: + def _activate_cancel_status(self, cancel_status: CancelStatus | None) -> None: if self._cancel_status is not None: self._cancel_status._tasks.remove(self) - self._cancel_status = cancel_status + self._cancel_status = cancel_status # type: ignore[assignment] if self._cancel_status is not None: self._cancel_status._tasks.add(self) if self._cancel_status.effectively_cancelled: self._attempt_delivery_of_any_pending_cancel() - def _attempt_abort(self, raise_cancel: Callable[[], NoReturn]) -> None: + def _attempt_abort(self, raise_cancel: _core.RaiseCancelT) -> None: # Either the abort succeeds, in which case we will reschedule the # task, or else it fails, in which case it will worry about # rescheduling itself (hopefully eventually calling reraise to raise @@ -1326,7 +1408,7 @@ def _attempt_delivery_of_any_pending_cancel(self) -> None: if not self._cancel_status.effectively_cancelled: return - def raise_cancel(): + def raise_cancel() -> NoReturn: raise Cancelled._create() self._attempt_abort(raise_cancel) @@ -1356,13 +1438,35 @@ class RunContext(threading.local): GLOBAL_RUN_CONTEXT: FinalT = RunContext() -@attr.s(frozen=True) -class _RunStatistics: - tasks_living = attr.ib() - tasks_runnable = attr.ib() - seconds_to_next_deadline = attr.ib() - io_statistics = attr.ib() - run_sync_soon_queue_size = attr.ib() +@attr.frozen +class RunStatistics: + """An object containing run-loop-level debugging information. + + Currently, the following fields are defined: + + * ``tasks_living`` (int): The number of tasks that have been spawned + and not yet exited. + * ``tasks_runnable`` (int): The number of tasks that are currently + queued on the run queue (as opposed to blocked waiting for something + to happen). + * ``seconds_to_next_deadline`` (float): The time until the next + pending cancel scope deadline. May be negative if the deadline has + expired but we haven't yet processed cancellations. May be + :data:`~math.inf` if there are no pending deadlines. + * ``run_sync_soon_queue_size`` (int): The number of + unprocessed callbacks queued via + :meth:`trio.lowlevel.TrioToken.run_sync_soon`. + * ``io_statistics`` (object): Some statistics from Trio's I/O + backend. This always has an attribute ``backend`` which is a string + naming which operating-system-specific I/O backend is in use; the + other attributes vary between backends. + """ + + tasks_living: int + tasks_runnable: int + seconds_to_next_deadline: float + io_statistics: IOStatistics + run_sync_soon_queue_size: int # This holds all the state that gets trampolined back and forth between @@ -1386,18 +1490,19 @@ class _RunStatistics: # worker thread. @attr.s(eq=False, hash=False, slots=True) class GuestState: - runner = attr.ib() - run_sync_soon_threadsafe = attr.ib() - run_sync_soon_not_threadsafe = attr.ib() - done_callback = attr.ib() - unrolled_run_gen = attr.ib() - _value_factory: Callable[[], Value] = lambda: Value(None) - unrolled_run_next_send = attr.ib(factory=_value_factory, type=Outcome) - - def guest_tick(self): + runner: Runner = attr.ib() + run_sync_soon_threadsafe: Callable[[Callable[[], object]], object] = attr.ib() + run_sync_soon_not_threadsafe: Callable[[Callable[[], object]], object] = attr.ib() + done_callback: Callable[[Outcome[Any]], object] = attr.ib() + unrolled_run_gen: Generator[float, EventResult, None] = attr.ib() + _value_factory: Callable[[], Value[Any]] = lambda: Value(None) + unrolled_run_next_send: Outcome[Any] = attr.ib(factory=_value_factory) + + def guest_tick(self) -> None: try: timeout = self.unrolled_run_next_send.send(self.unrolled_run_gen) except StopIteration: + assert self.runner.main_task_outcome is not None self.done_callback(self.runner.main_task_outcome) return except TrioInternalError as exc: @@ -1405,7 +1510,9 @@ def guest_tick(self): return # Optimization: try to skip going into the thread if we can avoid it - events_outcome = capture(self.runner.io_manager.get_events, 0) + events_outcome: Value[EventResult] | Error = capture( + self.runner.io_manager.get_events, 0 + ) if timeout <= 0 or isinstance(events_outcome, Error) or events_outcome.value: # No need to go into the thread self.unrolled_run_next_send = events_outcome @@ -1415,11 +1522,11 @@ def guest_tick(self): # Need to go into the thread and call get_events() there self.runner.guest_tick_scheduled = False - def get_events(): + def get_events() -> EventResult: return self.runner.io_manager.get_events(timeout) - def deliver(events_outcome): - def in_main_thread(): + def deliver(events_outcome: Outcome[EventResult]) -> None: + def in_main_thread() -> None: self.unrolled_run_next_send = events_outcome self.runner.guest_tick_scheduled = True self.guest_tick() @@ -1431,44 +1538,44 @@ def in_main_thread(): @attr.s(eq=False, hash=False, slots=True) class Runner: - clock = attr.ib() + clock: Clock = attr.ib() instruments: Instruments = attr.ib() io_manager: TheIOManager = attr.ib() - ki_manager = attr.ib() - strict_exception_groups = attr.ib() + ki_manager: KIManager = attr.ib() + strict_exception_groups: bool = attr.ib() # Run-local values, see _local.py - _locals = attr.ib(factory=dict) + _locals: dict[_core.RunVar[Any], Any] = attr.ib(factory=dict) runq: deque[Task] = attr.ib(factory=deque) - tasks = attr.ib(factory=set) + tasks: set[Task] = attr.ib(factory=set) - deadlines = attr.ib(factory=Deadlines) + deadlines: Deadlines = attr.ib(factory=Deadlines) - init_task = attr.ib(default=None) - system_nursery = attr.ib(default=None) - system_context = attr.ib(default=None) - main_task = attr.ib(default=None) - main_task_outcome = attr.ib(default=None) + init_task: Task | None = attr.ib(default=None) + system_nursery: Nursery | None = attr.ib(default=None) + system_context: contextvars.Context = attr.ib(kw_only=True) + main_task: Task | None = attr.ib(default=None) + main_task_outcome: Outcome[Any] | None = attr.ib(default=None) - entry_queue = attr.ib(factory=EntryQueue) - trio_token = attr.ib(default=None) - asyncgens = attr.ib(factory=AsyncGenerators) + entry_queue: EntryQueue = attr.ib(factory=EntryQueue) + trio_token: TrioToken | None = attr.ib(default=None) + asyncgens: AsyncGenerators = attr.ib(factory=AsyncGenerators) # If everything goes idle for this long, we call clock._autojump() - clock_autojump_threshold = attr.ib(default=inf) + clock_autojump_threshold: float = attr.ib(default=inf) # Guest mode stuff - is_guest = attr.ib(default=False) - guest_tick_scheduled = attr.ib(default=False) + is_guest: bool = attr.ib(default=False) + guest_tick_scheduled: bool = attr.ib(default=False) - def force_guest_tick_asap(self): + def force_guest_tick_asap(self) -> None: if self.guest_tick_scheduled: return self.guest_tick_scheduled = True self.io_manager.force_wakeup() - def close(self): + def close(self) -> None: self.io_manager.close() self.entry_queue.close() self.asyncgens.close() @@ -1478,10 +1585,10 @@ def close(self): self.ki_manager.close() @_public - def current_statistics(self): - """Returns an object containing run-loop-level debugging information. + def current_statistics(self) -> RunStatistics: + """Returns ``RunStatistics``, which contains run-loop-level debugging information. - Currently the following fields are defined: + Currently, the following fields are defined: * ``tasks_living`` (int): The number of tasks that have been spawned and not yet exited. @@ -1502,7 +1609,7 @@ def current_statistics(self): """ seconds_to_next_deadline = self.deadlines.next_deadline() - self.current_time() - return _RunStatistics( + return RunStatistics( tasks_living=len(self.tasks), tasks_runnable=len(self.runq), seconds_to_next_deadline=seconds_to_next_deadline, @@ -1511,7 +1618,7 @@ def current_statistics(self): ) @_public - def current_time(self): + def current_time(self) -> float: """Returns the current time according to Trio's internal clock. Returns: @@ -1524,12 +1631,12 @@ def current_time(self): return self.clock.current_time() @_public - def current_clock(self): + def current_clock(self) -> Clock: """Returns the current :class:`~trio.abc.Clock`.""" return self.clock @_public - def current_root_task(self): + def current_root_task(self) -> Task | None: """Returns the current root :class:`Task`. This is the task that is the ultimate parent of all other tasks. @@ -1541,8 +1648,10 @@ def current_root_task(self): # Core task handling primitives ################ - @_public - def reschedule(self, task, next_send=_NO_SEND): + @_public # Type-ignore due to use of Any here. + def reschedule( # type: ignore[misc] + self, task: Task, next_send: Outcome[Any] = _NO_SEND + ) -> None: """Reschedule the given task with the given :class:`outcome.Outcome`. @@ -1576,8 +1685,16 @@ def reschedule(self, task, next_send=_NO_SEND): self.instruments.call("task_scheduled", task) def spawn_impl( - self, async_fn, args, nursery, name, *, system_task=False, context=None - ): + self, + # TODO: TypeVarTuple + async_fn: Callable[..., Awaitable[object]], + args: tuple[object, ...], + nursery: Nursery | None, + name: object, + *, + system_task: bool = False, + context: contextvars.Context | None = None, + ) -> Task: ###### # Make sure the nursery is in working order ###### @@ -1607,7 +1724,8 @@ def spawn_impl( # Call the function and get the coroutine object, while giving helpful # errors for common mistakes. ###### - coro = context.run(coroutine_or_error, async_fn, *args) + # TODO: resolve the type: ignore when implementing TypeVarTuple + coro = context.run(coroutine_or_error, async_fn, *args) # type: ignore[arg-type] if name is None: name = async_fn @@ -1615,13 +1733,13 @@ def spawn_impl( name = name.func if not isinstance(name, str): try: - name = f"{name.__module__}.{name.__qualname__}" + name = f"{name.__module__}.{name.__qualname__}" # type: ignore[attr-defined] except AttributeError: name = repr(name) if not hasattr(coro, "cr_frame"): # This async function is implemented in C or Cython - async def python_wrapper(orig_coro): + async def python_wrapper(orig_coro: Awaitable[RetT]) -> RetT: return await orig_coro coro = python_wrapper(coro) @@ -1643,10 +1761,11 @@ async def python_wrapper(orig_coro): self.instruments.call("task_spawned", task) # Special case: normally next_send should be an Outcome, but for the # very first send we have to send a literal unboxed None. - self.reschedule(task, None) + # TODO: remove [unused-ignore] when Outcome is typed + self.reschedule(task, None) # type: ignore[arg-type, unused-ignore] return task - def task_exited(self, task, outcome): + def task_exited(self, task: Task, outcome: Outcome[Any]) -> None: if ( task._cancel_status is not None and task._cancel_status.abandoned_by_misnesting @@ -1685,6 +1804,7 @@ def task_exited(self, task, outcome): if task is self.main_task: self.main_task_outcome = outcome outcome = Value(None) + assert task._parent_nursery is not None, task task._parent_nursery._child_finished(task, outcome) if "task_exited" in self.instruments: @@ -1694,8 +1814,15 @@ def task_exited(self, task, outcome): # System tasks and init ################ - @_public - def spawn_system_task(self, async_fn, *args, name=None, context=None): + @_public # Type-ignore due to use of Any here. + def spawn_system_task( # type: ignore[misc] + self, + # TODO: TypeVarTuple + async_fn: Callable[..., Awaitable[object]], + *args: object, + name: object = None, + context: contextvars.Context | None = None, + ) -> Task: """Spawn a "system" task. System tasks have a few differences from regular tasks: @@ -1756,7 +1883,12 @@ def spawn_system_task(self, async_fn, *args, name=None, context=None): context=context, ) - async def init(self, async_fn, args): + async def init( + # TODO: TypeVarTuple + self, + async_fn: Callable[..., Awaitable[object]], + args: tuple[object, ...], + ) -> None: # run_sync_soon task runs here: async with open_nursery() as run_sync_soon_nursery: # All other system tasks run here: @@ -1794,7 +1926,7 @@ async def init(self, async_fn, args): ################ @_public - def current_trio_token(self): + def current_trio_token(self) -> TrioToken: """Retrieve the :class:`TrioToken` for the current call to :func:`trio.run`. @@ -1807,7 +1939,7 @@ def current_trio_token(self): # KI handling ################ - ki_pending = attr.ib(default=False) + ki_pending: bool = attr.ib(default=False) # deliver_ki is broke. Maybe move all the actual logic and state into # RunToken, and we'll only have one instance per runner? But then we can't @@ -1816,14 +1948,14 @@ def current_trio_token(self): # keep the class public so people can isinstance() it if they want. # This gets called from signal context - def deliver_ki(self): + def deliver_ki(self) -> None: self.ki_pending = True try: self.entry_queue.run_sync_soon(self._deliver_ki_cb) except RunFinishedError: pass - def _deliver_ki_cb(self): + def _deliver_ki_cb(self) -> None: if not self.ki_pending: return # Can't happen because main_task and run_sync_soon_task are created at @@ -1840,10 +1972,12 @@ def _deliver_ki_cb(self): # Quiescing ################ - waiting_for_idle = attr.ib(factory=SortedDict) + # sortedcontainers doesn't have types, and is reportedly very hard to type: + # https://github.com/grantjenks/python-sortedcontainers/issues/68 + waiting_for_idle: Any = attr.ib(factory=SortedDict) @_public - async def wait_all_tasks_blocked(self, cushion=0.0): + async def wait_all_tasks_blocked(self, cushion: float = 0.0) -> None: """Block until there are no runnable tasks. This is useful in testing code when you want to give other tasks a @@ -1905,7 +2039,7 @@ async def test_lock_fairness(): key = (cushion, id(task)) self.waiting_for_idle[key] = task - def abort(_): + def abort(_: _core.RaiseCancelT) -> Abort: del self.waiting_for_idle[key] return Abort.SUCCEEDED @@ -1980,11 +2114,11 @@ def abort(_): def setup_runner( - clock, - instruments, - restrict_keyboard_interrupt_to_checkpoints, - strict_exception_groups, -): + clock: Clock | None, + instruments: Sequence[Instrument], + restrict_keyboard_interrupt_to_checkpoints: bool, + strict_exception_groups: bool, +) -> Runner: """Create a Runner object and install it as the GLOBAL_RUN_CONTEXT.""" # It wouldn't be *hard* to support nested calls to run(), but I can't # think of a single good reason for it, so let's be conservative for @@ -1994,14 +2128,14 @@ def setup_runner( if clock is None: clock = SystemClock() - instruments = Instruments(instruments) + instrument_group = Instruments(instruments) io_manager = TheIOManager() system_context = copy_context() ki_manager = KIManager() runner = Runner( clock=clock, - instruments=instruments, + instruments=instrument_group, io_manager=io_manager, system_context=system_context, ki_manager=ki_manager, @@ -2018,13 +2152,13 @@ def setup_runner( def run( - async_fn, - *args, - clock=None, - instruments=(), + async_fn: Callable[..., RetT], + *args: object, + clock: Clock | None = None, + instruments: Sequence[Instrument] = (), restrict_keyboard_interrupt_to_checkpoints: bool = False, strict_exception_groups: bool = False, -): +) -> RetT: """Run a Trio-flavored async function, and return the result. Calling:: @@ -2107,7 +2241,8 @@ def run( ) gen = unrolled_run(runner, async_fn, args) - next_send = None + # Need to send None in the first time. + next_send: EventResult = None # type: ignore[assignment] while True: try: timeout = gen.send(next_send) @@ -2117,23 +2252,26 @@ def run( # Inlined copy of runner.main_task_outcome.unwrap() to avoid # cluttering every single Trio traceback with an extra frame. if isinstance(runner.main_task_outcome, Value): - return runner.main_task_outcome.value - else: + return cast(RetT, runner.main_task_outcome.value) + elif isinstance(runner.main_task_outcome, Error): raise runner.main_task_outcome.error + else: # pragma: no cover + raise AssertionError(runner.main_task_outcome) def start_guest_run( - async_fn, - *args, - run_sync_soon_threadsafe, - done_callback, - run_sync_soon_not_threadsafe=None, + async_fn: Callable[..., Awaitable[RetT]], + *args: object, + run_sync_soon_threadsafe: Callable[[Callable[[], object]], object], + done_callback: Callable[[Outcome[RetT]], object], + run_sync_soon_not_threadsafe: Callable[[Callable[[], object]], object] + | None = None, host_uses_signal_set_wakeup_fd: bool = False, - clock=None, - instruments=(), + clock: Clock | None = None, + instruments: Sequence[Instrument] = (), restrict_keyboard_interrupt_to_checkpoints: bool = False, strict_exception_groups: bool = False, -): +) -> None: """Start a "guest" run of Trio on top of some other "host" event loop. Each host loop can only have one guest run at a time. @@ -2221,10 +2359,10 @@ def my_done_callback(run_outcome): # straight through. def unrolled_run( runner: Runner, - async_fn, - args, + async_fn: Callable[..., object], + args: tuple[object, ...], host_uses_signal_set_wakeup_fd: bool = False, -): +) -> Generator[float, EventResult, None]: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True __tracebackhide__ = True @@ -2310,6 +2448,7 @@ def unrolled_run( break else: assert idle_primed is IdlePrimedTypes.AUTOJUMP_CLOCK + assert isinstance(runner.clock, _core.MockClock) runner.clock._autojump() # Process all runnable tasks, but only the ones that are already @@ -2354,7 +2493,7 @@ def unrolled_run( next_send_fn = task._next_send_fn next_send = task._next_send task._next_send_fn = task._next_send = None - final_outcome = None + final_outcome: Outcome[Any] | None = None try: # We used to unwrap the Outcome object here and send/throw # its contents in directly, but it turns out that .throw() @@ -2373,9 +2512,8 @@ def unrolled_run( # more Context.run adds. tb = task_exc.__traceback__ for _ in range(1 + CONTEXT_RUN_TB_FRAMES): - if tb is None: - break - tb = tb.tb_next + if tb is not None: # pragma: no branch + tb = tb.tb_next final_outcome = Error(task_exc.with_traceback(tb)) # Remove local refs so that e.g. cancelled coroutine locals # are not kept alive by this frame until another exception @@ -2418,7 +2556,8 @@ def unrolled_run( # protocol of unwrapping whatever outcome gets sent in. # Instead, we'll arrange to throw `exc` in directly, # which works for at least asyncio and curio. - runner.reschedule(task, exc) + # TODO: remove [unused-ignore] when Outcome is typed + runner.reschedule(task, exc) # type: ignore[arg-type, unused-ignore] task._next_send_fn = task.coro.throw # prevent long-lived reference # TODO: develop test for this deletion @@ -2461,15 +2600,15 @@ def unrolled_run( ################################################################ -class _TaskStatusIgnored: +class _TaskStatusIgnored(TaskStatus[Any]): def __repr__(self) -> str: return "TASK_STATUS_IGNORED" - def started(self, value: object = None) -> None: + def started(self, value: Any = None) -> None: pass -TASK_STATUS_IGNORED: FinalT = _TaskStatusIgnored() +TASK_STATUS_IGNORED: FinalT[TaskStatus[Any]] = _TaskStatusIgnored() def current_task() -> Task: @@ -2566,13 +2705,25 @@ async def checkpoint_if_cancelled() -> None: if sys.platform == "win32": from ._generated_io_windows import * - from ._io_windows import WindowsIOManager as TheIOManager + from ._io_windows import ( + EventResult as EventResult, + WindowsIOManager as TheIOManager, + _WindowsStatistics as IOStatistics, + ) elif sys.platform == "linux" or (not TYPE_CHECKING and hasattr(select, "epoll")): from ._generated_io_epoll import * - from ._io_epoll import EpollIOManager as TheIOManager + from ._io_epoll import ( + EpollIOManager as TheIOManager, + EventResult as EventResult, + _EpollStatistics as IOStatistics, + ) elif TYPE_CHECKING or hasattr(select, "kqueue"): from ._generated_io_kqueue import * - from ._io_kqueue import KqueueIOManager as TheIOManager + from ._io_kqueue import ( + EventResult as EventResult, + KqueueIOManager as TheIOManager, + _KqueueStatistics as IOStatistics, + ) else: # pragma: no cover raise NotImplementedError("unsupported platform") diff --git a/trio/_core/_thread_cache.py b/trio/_core/_thread_cache.py index d8f5a94c5b..8381153576 100644 --- a/trio/_core/_thread_cache.py +++ b/trio/_core/_thread_cache.py @@ -118,7 +118,7 @@ def darwin_namefunc( class WorkerThread(Generic[RetT]): def __init__(self, thread_cache: ThreadCache) -> None: - self._job: tuple[ # type: ignore[no-any-unimported] + self._job: tuple[ Callable[[], RetT], Callable[[outcome.Outcome[RetT]], object], str | None, @@ -198,7 +198,7 @@ class ThreadCache: def __init__(self) -> None: self._idle_workers: dict[WorkerThread[Any], None] = {} - def start_thread_soon( # type: ignore[no-any-unimported] + def start_thread_soon( self, fn: Callable[[], RetT], deliver: Callable[[outcome.Outcome[RetT]], object], @@ -216,7 +216,7 @@ def start_thread_soon( # type: ignore[no-any-unimported] THREAD_CACHE = ThreadCache() -def start_thread_soon( # type: ignore[no-any-unimported] +def start_thread_soon( fn: Callable[[], RetT], deliver: Callable[[outcome.Outcome[RetT]], object], name: str | None = None, diff --git a/trio/_dtls.py b/trio/_dtls.py index b3ed0fd883..08b7672a2f 100644 --- a/trio/_dtls.py +++ b/trio/_dtls.py @@ -42,7 +42,6 @@ from OpenSSL.SSL import Context from typing_extensions import Self, TypeAlias - from trio.lowlevel import TaskStatus from trio.socket import Address, _SocketType MAX_UDP_PACKET_SIZE = 65527 @@ -1267,7 +1266,7 @@ async def serve( ssl_context: Context, async_fn: Callable[..., Awaitable[object]], *args: Any, - task_status: TaskStatus = trio.TASK_STATUS_IGNORED, # type: ignore[has-type] + task_status: trio.TaskStatus[None] = trio.TASK_STATUS_IGNORED, ) -> None: """Listen for incoming connections, and spawn a handler for each using an internal nursery. diff --git a/trio/_highlevel_open_tcp_listeners.py b/trio/_highlevel_open_tcp_listeners.py index 0d5f630495..e6840eae97 100644 --- a/trio/_highlevel_open_tcp_listeners.py +++ b/trio/_highlevel_open_tcp_listeners.py @@ -6,7 +6,7 @@ from math import inf import trio -from trio.lowlevel import TaskStatus +from trio import TaskStatus from . import socket as tsocket @@ -171,7 +171,7 @@ async def serve_tcp( host: str | bytes | None = None, backlog: int | float | None = None, handler_nursery: trio.Nursery | None = None, - task_status: TaskStatus = trio.TASK_STATUS_IGNORED, # type: ignore[assignment] # default has type "_TaskStatusIgnored", argument has type "TaskStatus" + task_status: TaskStatus[list[trio.SocketListener]] = trio.TASK_STATUS_IGNORED, ) -> None: """Listen for incoming TCP connections, and for each one start a task running ``handler(stream)``. diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index 1b1e8df8da..c0da975fb4 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -9,6 +9,7 @@ import sys from pathlib import Path from types import ModuleType +from typing import Protocol import attrs import pytest @@ -23,6 +24,12 @@ mypy_cache_updated = False +try: # If installed, check both versions of this class. + from typing_extensions import Protocol as Protocol_ext +except ImportError: # pragma: no cover + Protocol_ext = Protocol + + def _ensure_mypy_cache_updated(): # This pollutes the `empty` dir. Should this be changed? from mypy.api import run @@ -311,12 +318,15 @@ def lookup_symbol(symbol): "__annotations__", "__attrs_attrs__", "__attrs_own_setattr__", + "__callable_proto_members_only__", "__class_getitem__", + "__final__", "__getstate__", "__match_args__", "__order__", "__orig_bases__", "__parameters__", + "__protocol_attrs__", "__setstate__", "__slots__", "__weakref__", @@ -484,6 +494,9 @@ def test_classes_are_final(): # point of ABCs if inspect.isabstract(class_): continue + # Same with protocols, but only direct children. + if Protocol in class_.__bases__ or Protocol_ext in class_.__bases__: + continue # Exceptions are allowed to be subclassed, because exception # subclassing isn't used to inherit behavior. if issubclass(class_, BaseException): diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index ec345facf3..16acc7f39b 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9250398724082934, + "completenessScore": 0.9490445859872612, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 580, - "withUnknownType": 47 + "withKnownType": 596, + "withUnknownType": 32 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -46,17 +46,11 @@ ], "otherSymbolCounts": { "withAmbiguousType": 3, - "withKnownType": 602, - "withUnknownType": 61 + "withKnownType": 627, + "withUnknownType": 50 }, "packageName": "trio", "symbols": [ - "trio._core._run.Nursery.start", - "trio._core._run.Nursery.start_soon", - "trio._core._run.TaskStatus.__repr__", - "trio._core._run.TaskStatus.started", - "trio._dtls.DTLSChannel.__init__", - "trio._dtls.DTLSEndpoint.serve", "trio._highlevel_socket.SocketStream.getsockopt", "trio._highlevel_socket.SocketStream.send_all", "trio._highlevel_socket.SocketStream.setsockopt", @@ -86,32 +80,22 @@ "trio._subprocess.Process.send_signal", "trio._subprocess.Process.terminate", "trio._subprocess.Process.wait", - "trio.current_time", "trio.from_thread.run", "trio.from_thread.run_sync", "trio.lowlevel.cancel_shielded_checkpoint", - "trio.lowlevel.current_clock", - "trio.lowlevel.current_root_task", - "trio.lowlevel.current_statistics", - "trio.lowlevel.current_trio_token", "trio.lowlevel.notify_closing", "trio.lowlevel.open_process", "trio.lowlevel.permanently_detach_coroutine_object", "trio.lowlevel.reattach_detached_coroutine_object", - "trio.lowlevel.reschedule", - "trio.lowlevel.spawn_system_task", - "trio.lowlevel.start_guest_run", "trio.lowlevel.temporarily_detach_coroutine_object", "trio.lowlevel.wait_readable", "trio.lowlevel.wait_writable", "trio.open_ssl_over_tcp_listeners", "trio.open_ssl_over_tcp_stream", "trio.open_unix_socket", - "trio.run", "trio.run_process", "trio.serve_listeners", "trio.serve_ssl_over_tcp", - "trio.serve_tcp", "trio.testing._memory_streams.MemoryReceiveStream.__init__", "trio.testing._memory_streams.MemoryReceiveStream.aclose", "trio.testing._memory_streams.MemoryReceiveStream.close", @@ -142,7 +126,6 @@ "trio.testing.memory_stream_pump", "trio.testing.open_stream_to_socket_listener", "trio.testing.trio_test", - "trio.testing.wait_all_tasks_blocked", "trio.tests.TestsDeprecationWrapper", "trio.to_thread.current_default_thread_limiter" ] diff --git a/trio/_tools/gen_exports.py b/trio/_tools/gen_exports.py index f3ed2e26e7..9d78cd5bd7 100755 --- a/trio/_tools/gen_exports.py +++ b/trio/_tools/gen_exports.py @@ -158,6 +158,13 @@ def gen_public_wrappers_source(file: File) -> str: if is_cm: # pragma: no cover func = func.replace("->Iterator", "->ContextManager") + # TODO: hacky workaround until we run mypy without `-m`, which breaks imports + # enough that it cannot figure out the type of _NO_SEND + if file.path.stem == "_run" and func.startswith( + "def reschedule" + ): # pragma: no cover + func = func.replace("None:\n", "None: # type: ignore[has-type]\n") + # Create export function body template = TEMPLATE.format( " await " if isinstance(method, ast.AsyncFunctionDef) else " ", @@ -247,7 +254,15 @@ def main() -> None: # pragma: no cover IMPORTS_RUN = """\ -from ._run import _NO_SEND +from collections.abc import Awaitable, Callable +from typing import Any + +from outcome import Outcome +import contextvars + +from ._run import _NO_SEND, RunStatistics, Task +from ._entry_queue import TrioToken +from .._abc import Clock """ IMPORTS_INSTRUMENT = """\ from ._instrumentation import Instrument diff --git a/trio/_util.py b/trio/_util.py index ba56c18385..0329a12fa9 100644 --- a/trio/_util.py +++ b/trio/_util.py @@ -102,7 +102,7 @@ def is_main_thread() -> bool: # TODO: Use TypeVarTuple here. def coroutine_or_error( async_fn: t.Callable[..., t.Awaitable[RetT]], *args: t.Any -) -> t.Awaitable[RetT]: +) -> collections.abc.Coroutine[object, t.NoReturn, RetT]: def _return_value_looks_like_wrong_library(value: object) -> bool: # Returned by legacy @asyncio.coroutine functions, which includes # a surprising proportion of asyncio builtins. diff --git a/trio/lowlevel.py b/trio/lowlevel.py index c66e22b60e..25e64975e2 100644 --- a/trio/lowlevel.py +++ b/trio/lowlevel.py @@ -13,9 +13,9 @@ ParkingLot as ParkingLot, ParkingLotStatistics as ParkingLotStatistics, RaiseCancelT as RaiseCancelT, + RunStatistics as RunStatistics, RunVar as RunVar, Task as Task, - TaskStatus as TaskStatus, TrioToken as TrioToken, UnboundedQueue as UnboundedQueue, UnboundedQueueStatistics as UnboundedQueueStatistics, From d6d650f921d9fe9dec09accded23bb60b86b8336 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Mon, 14 Aug 2023 07:25:37 -0500 Subject: [PATCH 132/162] Add typing for `_core/_multierror.py` (#2742) Add typing for `_core/_multierror.py` --------- Co-authored-by: CoolCat467 Co-authored-by: John Litborn <11260241+jakkdl@users.noreply.github.com> --- pyproject.toml | 3 +- trio/_core/_multierror.py | 169 +++++++++++++++++++++++++------------- 2 files changed, 116 insertions(+), 56 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3121799cb7..d93cb382ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,8 +57,9 @@ module = [ "trio._core._io_epoll", "trio._core._io_kqueue", "trio._core._local", - "trio._core._unbounded_queue", + "trio._core._multierror", "trio._core._thread_cache", + "trio._core._unbounded_queue", "trio._core._run", "trio._deprecate", "trio._dtls", diff --git a/trio/_core/_multierror.py b/trio/_core/_multierror.py index 3c6ebb789f..6e4cb8b923 100644 --- a/trio/_core/_multierror.py +++ b/trio/_core/_multierror.py @@ -2,7 +2,9 @@ import sys import warnings -from typing import TYPE_CHECKING +from collections.abc import Callable, Sequence +from types import TracebackType +from typing import TYPE_CHECKING, Any, cast, overload import attr @@ -14,13 +16,15 @@ from traceback import print_exception if TYPE_CHECKING: - from types import TracebackType + from typing_extensions import Self ################################################################ # MultiError ################################################################ -def _filter_impl(handler, root_exc): +def _filter_impl( + handler: Callable[[BaseException], BaseException | None], root_exc: BaseException +) -> BaseException | None: # We have a tree of MultiError's, like: # # MultiError([ @@ -79,7 +83,9 @@ def _filter_impl(handler, root_exc): # Filters a subtree, ignoring tracebacks, while keeping a record of # which MultiErrors were preserved unchanged - def filter_tree(exc, preserved): + def filter_tree( + exc: MultiError | BaseException, preserved: set[int] + ) -> MultiError | BaseException | None: if isinstance(exc, MultiError): new_exceptions = [] changed = False @@ -103,7 +109,9 @@ def filter_tree(exc, preserved): new_exc.__context__ = exc return new_exc - def push_tb_down(tb, exc, preserved): + def push_tb_down( + tb: TracebackType | None, exc: BaseException, preserved: set[int] + ) -> None: if id(exc) in preserved: return new_tb = concat_tb(tb, exc.__traceback__) @@ -114,7 +122,7 @@ def push_tb_down(tb, exc, preserved): else: exc.__traceback__ = new_tb - preserved = set() + preserved: set[int] = set() new_root_exc = filter_tree(root_exc, preserved) push_tb_down(None, root_exc, preserved) # Delete the local functions to avoid a reference cycle (see @@ -130,9 +138,9 @@ def push_tb_down(tb, exc, preserved): # frame show up in the traceback; otherwise, we leave no trace.) @attr.s(frozen=True) class MultiErrorCatcher: - _handler = attr.ib() + _handler: Callable[[BaseException], BaseException | None] = attr.ib() - def __enter__(self): + def __enter__(self) -> None: pass def __exit__( @@ -167,7 +175,13 @@ def __exit__( return False -class MultiError(BaseExceptionGroup): +if TYPE_CHECKING: + _BaseExceptionGroup = BaseExceptionGroup[BaseException] +else: + _BaseExceptionGroup = BaseExceptionGroup + + +class MultiError(_BaseExceptionGroup): """An exception that contains other exceptions; also known as an "inception". @@ -190,7 +204,9 @@ class MultiError(BaseExceptionGroup): """ - def __init__(self, exceptions, *, _collapse=True): + def __init__( + self, exceptions: Sequence[BaseException], *, _collapse: bool = True + ) -> None: self.collapse = _collapse # Avoid double initialization when _collapse is True and exceptions[0] returned @@ -201,7 +217,9 @@ def __init__(self, exceptions, *, _collapse=True): super().__init__("multiple tasks failed", exceptions) - def __new__(cls, exceptions, *, _collapse=True): + def __new__( # type: ignore[misc] # mypy says __new__ must return a class instance + cls, exceptions: Sequence[BaseException], *, _collapse: bool = True + ) -> NonBaseMultiError | Self | BaseException: exceptions = list(exceptions) for exc in exceptions: if not isinstance(exc, BaseException): @@ -218,33 +236,54 @@ def __new__(cls, exceptions, *, _collapse=True): # In an earlier version of the code, we didn't define __init__ and # simply set the `exceptions` attribute directly on the new object. # However, linters expect attributes to be initialized in __init__. + from_class: type[Self] | type[NonBaseMultiError] = cls if all(isinstance(exc, Exception) for exc in exceptions): - cls = NonBaseMultiError + from_class = NonBaseMultiError - return super().__new__(cls, "multiple tasks failed", exceptions) + # Ignoring arg-type: 'Argument 3 to "__new__" of "BaseExceptionGroup" has incompatible type "list[BaseException]"; expected "Sequence[_BaseExceptionT_co]"' + # We have checked that exceptions is indeed a list of BaseException objects, this is fine. + new_obj = super().__new__(from_class, "multiple tasks failed", exceptions) # type: ignore[arg-type] + assert isinstance(new_obj, (cls, NonBaseMultiError)) + return new_obj - def __reduce__(self): + def __reduce__( + self, + ) -> tuple[object, tuple[type[Self], list[BaseException]], dict[str, bool]]: return ( self.__new__, (self.__class__, list(self.exceptions)), {"collapse": self.collapse}, ) - def __str__(self): + def __str__(self) -> str: return ", ".join(repr(exc) for exc in self.exceptions) - def __repr__(self): + def __repr__(self) -> str: return f"" - def derive(self, __excs): + @overload + def derive(self, excs: Sequence[Exception], /) -> NonBaseMultiError: + ... + + @overload + def derive(self, excs: Sequence[BaseException], /) -> MultiError: + ... + + def derive( + self, excs: Sequence[Exception | BaseException], / + ) -> NonBaseMultiError | MultiError: # We use _collapse=False here to get ExceptionGroup semantics, since derive() # is part of the PEP 654 API - exc = MultiError(__excs, _collapse=False) + exc = MultiError(excs, _collapse=False) exc.collapse = self.collapse return exc @classmethod - def filter(cls, handler, root_exc): + def filter( + cls, + handler: Callable[[BaseException], BaseException | None], + root_exc: BaseException, + ) -> BaseException | None: """Apply the given ``handler`` to all the exceptions in ``root_exc``. Args: @@ -268,7 +307,9 @@ def filter(cls, handler, root_exc): return _filter_impl(handler, root_exc) @classmethod - def catch(cls, handler): + def catch( + cls, handler: Callable[[BaseException], BaseException | None] + ) -> MultiErrorCatcher: """Return a context manager that catches and re-throws exceptions after running :meth:`filter` on them. @@ -286,8 +327,14 @@ def catch(cls, handler): return MultiErrorCatcher(handler) -class NonBaseMultiError(MultiError, ExceptionGroup): - pass +if TYPE_CHECKING: + _ExceptionGroup = ExceptionGroup[Exception] +else: + _ExceptionGroup = ExceptionGroup + + +class NonBaseMultiError(MultiError, _ExceptionGroup): + __slots__ = () # Clean up exception printing: @@ -316,30 +363,6 @@ class NonBaseMultiError(MultiError, ExceptionGroup): try: import tputil except ImportError: - have_tproxy = False -else: - have_tproxy = True - -if have_tproxy: - # http://doc.pypy.org/en/latest/objspace-proxies.html - def copy_tb(base_tb, tb_next): - def controller(operation): - # Rationale for pragma: I looked fairly carefully and tried a few - # things, and AFAICT it's not actually possible to get any - # 'opname' that isn't __getattr__ or __getattribute__. So there's - # no missing test we could add, and no value in coverage nagging - # us about adding one. - if operation.opname in [ - "__getattribute__", - "__getattr__", - ]: # pragma: no cover - if operation.args[0] == "tb_next": - return tb_next - return operation.delegate() - - return tputil.make_proxy(controller, type(base_tb), base_tb) - -else: # ctypes it is import ctypes @@ -359,12 +382,13 @@ class CTraceback(ctypes.Structure): ("tb_lineno", ctypes.c_int), ] - def copy_tb(base_tb, tb_next): + def copy_tb(base_tb: TracebackType, tb_next: TracebackType | None) -> TracebackType: # TracebackType has no public constructor, so allocate one the hard way try: raise ValueError except ValueError as exc: new_tb = exc.__traceback__ + assert new_tb is not None c_new_tb = CTraceback.from_address(id(new_tb)) # At the C level, tb_next either pointer to the next traceback or is @@ -377,14 +401,14 @@ def copy_tb(base_tb, tb_next): # which it already is, so we're done. Otherwise, we have to actually # do some work: if tb_next is not None: - _ctypes.Py_INCREF(tb_next) + _ctypes.Py_INCREF(tb_next) # type: ignore[attr-defined] c_new_tb.tb_next = id(tb_next) assert c_new_tb.tb_frame is not None - _ctypes.Py_INCREF(base_tb.tb_frame) + _ctypes.Py_INCREF(base_tb.tb_frame) # type: ignore[attr-defined] old_tb_frame = new_tb.tb_frame c_new_tb.tb_frame = id(base_tb.tb_frame) - _ctypes.Py_DECREF(old_tb_frame) + _ctypes.Py_DECREF(old_tb_frame) # type: ignore[attr-defined] c_new_tb.tb_lasti = base_tb.tb_lasti c_new_tb.tb_lineno = base_tb.tb_lineno @@ -396,8 +420,33 @@ def copy_tb(base_tb, tb_next): # see test_MultiError_catch_doesnt_create_cyclic_garbage del new_tb, old_tb_frame +else: + # http://doc.pypy.org/en/latest/objspace-proxies.html + def copy_tb(base_tb: TracebackType, tb_next: TracebackType | None) -> TracebackType: + # Mypy refuses to believe that ProxyOperation can be imported properly + # TODO: will need no-any-unimported if/when that's toggled on + def controller(operation: tputil.ProxyOperation) -> Any | None: + # Rationale for pragma: I looked fairly carefully and tried a few + # things, and AFAICT it's not actually possible to get any + # 'opname' that isn't __getattr__ or __getattribute__. So there's + # no missing test we could add, and no value in coverage nagging + # us about adding one. + if operation.opname in [ + "__getattribute__", + "__getattr__", + ]: # pragma: no cover + if operation.args[0] == "tb_next": + return tb_next + return operation.delegate() # Deligate is reverting to original behaviour + + return cast( + TracebackType, tputil.make_proxy(controller, type(base_tb), base_tb) + ) # Returns proxy to traceback + -def concat_tb(head, tail): +def concat_tb( + head: TracebackType | None, tail: TracebackType | None +) -> TracebackType | None: # We have to use an iterative algorithm here, because in the worst case # this might be a RecursionError stack that is by definition too deep to # process by recursion! @@ -429,7 +478,13 @@ def concat_tb(head, tail): ) else: - def trio_show_traceback(self, etype, value, tb, tb_offset=None): + def trio_show_traceback( + self: IPython.core.interactiveshell.InteractiveShell, + etype: type[BaseException], + value: BaseException, + tb: TracebackType, + tb_offset: int | None = None, + ) -> None: # XX it would be better to integrate with IPython's fancy # exception formatting stuff (and not ignore tb_offset) print_exception(value) @@ -460,10 +515,14 @@ def trio_show_traceback(self, etype, value, tb, tb_offset=None): assert sys.excepthook is apport_python_hook.apport_excepthook - def replacement_excepthook(etype, value, tb): - sys.stderr.write("".join(format_exception(etype, value, tb))) + def replacement_excepthook( + etype: type[BaseException], value: BaseException, tb: TracebackType | None + ) -> None: + # This does work, it's an overloaded function + sys.stderr.write("".join(format_exception(etype, value, tb))) # type: ignore[arg-type] fake_sys = ModuleType("trio_fake_sys") fake_sys.__dict__.update(sys.__dict__) - fake_sys.__excepthook__ = replacement_excepthook # type: ignore + # Fake does have __excepthook__ after __dict__ update, but type checkers don't recognize this + fake_sys.__excepthook__ = replacement_excepthook # type: ignore[attr-defined] apport_python_hook.sys = fake_sys From dcec3049572a69a104dad27972780834c859d06f Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Thu, 17 Aug 2023 18:37:41 +1000 Subject: [PATCH 133/162] Add types to `_threads` module (#2749) * Add types to _threads module --------- Co-authored-by: jakkdl --- docs/source/conf.py | 2 +- pyproject.toml | 1 + trio/_tests/verify_types.json | 11 ++-- trio/_threads.py | 100 ++++++++++++++++++++++++---------- 4 files changed, 76 insertions(+), 38 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 06e4a7e6be..4561c8cfe3 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -66,7 +66,7 @@ ("py:class", "trio._core._run.StatusT_co"), ("py:class", "trio._core._run.StatusT_contra"), ("py:class", "trio._core._run.RetT"), - ("py:class", "trio._threads.T"), + ("py:class", "trio._threads.RetT"), ("py:class", "P.args"), ("py:class", "P.kwargs"), ("py:class", "RetT"), diff --git a/pyproject.toml b/pyproject.toml index d93cb382ba..7f2a32f810 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,6 +68,7 @@ module = [ "trio._ki", "trio._socket", "trio._sync", + "trio._threads", "trio._tools.gen_exports", "trio._util", ] diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index 16acc7f39b..397860bcff 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9490445859872612, + "completenessScore": 0.9538216560509554, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 596, - "withUnknownType": 32 + "withKnownType": 599, + "withUnknownType": 29 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -80,8 +80,6 @@ "trio._subprocess.Process.send_signal", "trio._subprocess.Process.terminate", "trio._subprocess.Process.wait", - "trio.from_thread.run", - "trio.from_thread.run_sync", "trio.lowlevel.cancel_shielded_checkpoint", "trio.lowlevel.notify_closing", "trio.lowlevel.open_process", @@ -126,8 +124,7 @@ "trio.testing.memory_stream_pump", "trio.testing.open_stream_to_socket_listener", "trio.testing.trio_test", - "trio.tests.TestsDeprecationWrapper", - "trio.to_thread.current_default_thread_limiter" + "trio.tests.TestsDeprecationWrapper" ] } } diff --git a/trio/_threads.py b/trio/_threads.py index 3fbab05750..fdddc3d2e8 100644 --- a/trio/_threads.py +++ b/trio/_threads.py @@ -5,8 +5,9 @@ import inspect import queue as stdlib_queue import threading +from collections.abc import Awaitable, Callable from itertools import count -from typing import Any, Callable, Optional, TypeVar +from typing import TypeVar import attr import outcome @@ -25,10 +26,17 @@ from ._sync import CapacityLimiter from ._util import coroutine_or_error -T = TypeVar("T") +RetT = TypeVar("RetT") +Ret2T = TypeVar("Ret2T") -# Global due to Threading API, thread local storage for trio token -TOKEN_LOCAL = threading.local() + +class _TokenLocal(threading.local): + """Global due to Threading API, thread local storage for trio token.""" + + token: TrioToken + + +TOKEN_LOCAL = _TokenLocal() _limiter_local: RunVar[CapacityLimiter] = RunVar("limiter") # I pulled this number out of the air; it isn't based on anything. Probably we @@ -37,7 +45,7 @@ _thread_counter = count() -def current_default_thread_limiter(): +def current_default_thread_limiter() -> CapacityLimiter: """Get the default `~trio.CapacityLimiter` used by `trio.to_thread.run_sync`. @@ -59,17 +67,17 @@ def current_default_thread_limiter(): # keep track of who's holding the CapacityLimiter's token. @attr.s(frozen=True, eq=False, hash=False) class ThreadPlaceholder: - name = attr.ib() + name: str = attr.ib() -@enable_ki_protection -async def to_thread_run_sync( - sync_fn: Callable[..., T], - *args: Any, - thread_name: Optional[str] = None, +@enable_ki_protection # Decorator used on function with Coroutine[Any, Any, RetT] +async def to_thread_run_sync( # type: ignore[misc] + sync_fn: Callable[..., RetT], + *args: object, + thread_name: str | None = None, cancellable: bool = False, limiter: CapacityLimiter | None = None, -) -> T: +) -> RetT: """Convert a blocking operation into an async operation using a thread. These two lines are equivalent:: @@ -167,14 +175,14 @@ async def to_thread_run_sync( # This function gets scheduled into the Trio run loop to deliver the # thread's result. - def report_back_in_trio_thread_fn(result): - def do_release_then_return_result(): + def report_back_in_trio_thread_fn(result: outcome.Outcome[RetT]) -> None: + def do_release_then_return_result() -> RetT: # release_on_behalf_of is an arbitrary user-defined method, so it # might raise an error. If it does, we want that error to # replace the regular return value, and if the regular return was # already an exception then we want them to chain. try: - return result.unwrap() + return result.unwrap() # type: ignore[no-any-return] # Until outcome is typed finally: limiter.release_on_behalf_of(placeholder) @@ -187,7 +195,7 @@ def do_release_then_return_result(): if thread_name is None: thread_name = f"{getattr(sync_fn, '__name__', None)} from {trio.lowlevel.current_task().name}" - def worker_fn(): + def worker_fn() -> RetT: current_async_library_cvar.set(None) TOKEN_LOCAL.token = current_trio_token try: @@ -206,9 +214,10 @@ def worker_fn(): del TOKEN_LOCAL.token context = contextvars.copy_context() - contextvars_aware_worker_fn = functools.partial(context.run, worker_fn) + # Partial confuses type checkers, coerce to a callable. + contextvars_aware_worker_fn: Callable[[], RetT] = functools.partial(context.run, worker_fn) # type: ignore[assignment] - def deliver_worker_fn_result(result): + def deliver_worker_fn_result(result: outcome.Outcome[RetT]) -> None: try: current_trio_token.run_sync_soon(report_back_in_trio_thread_fn, result) except trio.RunFinishedError: @@ -237,17 +246,32 @@ def abort(_: RaiseCancelT) -> trio.lowlevel.Abort: return await trio.lowlevel.wait_task_rescheduled(abort) # type: ignore[no-any-return] -def _run_fn_as_system_task(cb, fn, *args, context, trio_token=None): +# We use two typevars here, because cb can transform from one to the other any way it likes. +def _run_fn_as_system_task( + cb: Callable[ + [ + stdlib_queue.SimpleQueue[outcome.Outcome[Ret2T]], + Callable[..., RetT], + tuple[object, ...], + ], + object, + ], + fn: Callable[..., RetT], + *args: object, + context: contextvars.Context, + trio_token: TrioToken | None = None, + # Outcome isn't typed, so Ret2T is used only in the return type. +) -> Ret2T: # type: ignore[type-var] """Helper function for from_thread.run and from_thread.run_sync. Since this internally uses TrioToken.run_sync_soon, all warnings about raised exceptions canceling all tasks should be noted. """ - if trio_token and not isinstance(trio_token, TrioToken): + if trio_token is not None and not isinstance(trio_token, TrioToken): raise RuntimeError("Passed kwarg trio_token is not of type TrioToken") - if not trio_token: + if trio_token is None: try: trio_token = TOKEN_LOCAL.token except AttributeError: @@ -263,12 +287,16 @@ def _run_fn_as_system_task(cb, fn, *args, context, trio_token=None): else: raise RuntimeError("this is a blocking function; call it from a thread") - q = stdlib_queue.SimpleQueue() + q: stdlib_queue.SimpleQueue[outcome.Outcome[Ret2T]] = stdlib_queue.SimpleQueue() trio_token.run_sync_soon(context.run, cb, q, fn, args) - return q.get().unwrap() + return q.get().unwrap() # type: ignore[no-any-return] # Until outcome is typed -def from_thread_run(afn, *args, trio_token=None): +def from_thread_run( + afn: Callable[..., Awaitable[RetT]], + *args: object, + trio_token: TrioToken | None = None, +) -> RetT: """Run the given async function in the parent Trio thread, blocking until it is complete. @@ -303,13 +331,17 @@ def from_thread_run(afn, *args, trio_token=None): to enter Trio. """ - def callback(q, afn, args): + def callback( + q: stdlib_queue.SimpleQueue[outcome.Outcome[RetT]], + afn: Callable[..., Awaitable[RetT]], + args: tuple[object, ...], + ) -> None: @disable_ki_protection - async def unprotected_afn(): + async def unprotected_afn() -> RetT: coro = coroutine_or_error(afn, *args) return await coro - async def await_in_trio_thread_task(): + async def await_in_trio_thread_task() -> None: q.put_nowait(await outcome.acapture(unprotected_afn)) context = contextvars.copy_context() @@ -332,7 +364,11 @@ async def await_in_trio_thread_task(): ) -def from_thread_run_sync(fn, *args, trio_token=None): +def from_thread_run_sync( + fn: Callable[..., RetT], + *args: tuple[object, ...], + trio_token: TrioToken | None = None, +) -> RetT: """Run the given sync function in the parent Trio thread, blocking until it is complete. @@ -363,11 +399,15 @@ def from_thread_run_sync(fn, *args, trio_token=None): to enter Trio. """ - def callback(q, fn, args): + def callback( + q: stdlib_queue.SimpleQueue[outcome.Outcome[RetT]], + fn: Callable[..., RetT], + args: tuple[object, ...], + ) -> None: current_async_library_cvar.set("trio") @disable_ki_protection - def unprotected_fn(): + def unprotected_fn() -> RetT: ret = fn(*args) if inspect.iscoroutine(ret): From 749bb69012c45fd0e0ff8bd73040d8a59458f99f Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Thu, 17 Aug 2023 19:07:01 +1000 Subject: [PATCH 134/162] Add types to `trio._subprocess` (#2753) * Add type hints to _subprocess and _subprocess.platform.* --------- Co-authored-by: jakkdl --- docs/source/reference-io.rst | 6 +- pyproject.toml | 5 + trio/_subprocess.py | 284 ++++++++++++++++++++++----- trio/_subprocess_platform/kqueue.py | 4 +- trio/_subprocess_platform/waitid.py | 14 +- trio/_subprocess_platform/windows.py | 3 +- trio/_tests/verify_types.json | 26 +-- 7 files changed, 262 insertions(+), 80 deletions(-) diff --git a/docs/source/reference-io.rst b/docs/source/reference-io.rst index 9207afb41b..e270033b46 100644 --- a/docs/source/reference-io.rst +++ b/docs/source/reference-io.rst @@ -731,7 +731,11 @@ task and interact with it while it's running: .. autofunction:: trio.run_process -.. autoclass:: trio.Process +.. autoclass:: trio._subprocess.HasFileno(Protocol) + + .. automethod:: fileno + +.. autoclass:: trio.Process() .. autoattribute:: returncode diff --git a/pyproject.toml b/pyproject.toml index 7f2a32f810..566e9ec49f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,6 +67,11 @@ module = [ "trio._highlevel_open_tcp_stream", "trio._ki", "trio._socket", + "trio._subprocess", + "trio._subprocess_platform", + "trio._subprocess_platform.kqueue", + "trio._subprocess_platform.waitid", + "trio._subprocess_platform.windows", "trio._sync", "trio._threads", "trio._tools.gen_exports", diff --git a/trio/_subprocess.py b/trio/_subprocess.py index 1f8d0a8253..7cf990fa53 100644 --- a/trio/_subprocess.py +++ b/trio/_subprocess.py @@ -1,15 +1,20 @@ +from __future__ import annotations + import os +import signal import subprocess import sys import warnings +from collections.abc import Awaitable, Callable, Mapping, Sequence from contextlib import ExitStack from functools import partial -from typing import TYPE_CHECKING, Optional +from io import TextIOWrapper +from typing import TYPE_CHECKING, Final, Literal, Protocol, Union, overload import trio from ._abc import AsyncResource, ReceiveStream, SendStream -from ._core import ClosedResourceError +from ._core import ClosedResourceError, TaskStatus from ._deprecate import deprecated from ._highlevel_generic import StapledStream from ._subprocess_platform import ( @@ -20,6 +25,14 @@ from ._sync import Lock from ._util import NoPublicConstructor +if TYPE_CHECKING: + from typing_extensions import TypeAlias + + +# Only subscriptable in 3.9+ +StrOrBytesPath: TypeAlias = Union[str, bytes, "os.PathLike[str]", "os.PathLike[bytes]"] + + # Linux-specific, but has complex lifetime management stuff so we hard-code it # here instead of hiding it behind the _subprocess_platform abstraction can_try_pidfd_open: bool @@ -65,6 +78,13 @@ def pidfd_open(fd: int, flags: int) -> int: can_try_pidfd_open = False +class HasFileno(Protocol): + """Represents any file-like object that has a file descriptor.""" + + def fileno(self) -> int: + ... + + class Process(AsyncResource, metaclass=NoPublicConstructor): r"""A child process. Like :class:`subprocess.Popen`, but async. @@ -107,32 +127,38 @@ class Process(AsyncResource, metaclass=NoPublicConstructor): available; otherwise this will be None. """ - - universal_newlines = False - encoding = None - errors = None + # We're always in binary mode. + universal_newlines: Final = False + encoding: Final = None + errors: Final = None # Available for the per-platform wait_child_exiting() implementations # to stash some state; waitid platforms use this to avoid spawning # arbitrarily many threads if wait() keeps getting cancelled. - _wait_for_exit_data = None - - def __init__(self, popen, stdin, stdout, stderr): + _wait_for_exit_data: object = None + + def __init__( + self, + popen: subprocess.Popen[bytes], + stdin: SendStream | None, + stdout: ReceiveStream | None, + stderr: ReceiveStream | None, + ) -> None: self._proc = popen - self.stdin: Optional[SendStream] = stdin - self.stdout: Optional[ReceiveStream] = stdout - self.stderr: Optional[ReceiveStream] = stderr + self.stdin = stdin + self.stdout = stdout + self.stderr = stderr - self.stdio: Optional[StapledStream] = None + self.stdio: StapledStream | None = None if self.stdin is not None and self.stdout is not None: self.stdio = StapledStream(self.stdin, self.stdout) - self._wait_lock = Lock() + self._wait_lock: Lock = Lock() - self._pidfd = None + self._pidfd: TextIOWrapper | None = None if can_try_pidfd_open: try: - fd = pidfd_open(self._proc.pid, 0) + fd: int = pidfd_open(self._proc.pid, 0) except OSError: # Well, we tried, but it didn't work (probably because we're # running on an older kernel, or in an older sandbox, that @@ -144,10 +170,10 @@ def __init__(self, popen, stdin, stdout, stderr): # make sure it'll get closed. self._pidfd = open(fd) - self.args = self._proc.args - self.pid = self._proc.pid + self.args: StrOrBytesPath | Sequence[StrOrBytesPath] = self._proc.args + self.pid: int = self._proc.pid - def __repr__(self): + def __repr__(self) -> str: returncode = self.returncode if returncode is None: status = f"running with PID {self.pid}" @@ -159,7 +185,7 @@ def __repr__(self): return f"" @property - def returncode(self): + def returncode(self) -> int | None: """The exit status of the process (an integer), or ``None`` if it's still running. @@ -186,13 +212,13 @@ def returncode(self): issue=1104, instead="run_process or nursery.start(run_process, ...)", ) - async def __aenter__(self): + async def __aenter__(self) -> Process: return self @deprecated( "0.20.0", issue=1104, instead="run_process or nursery.start(run_process, ...)" ) - async def aclose(self): + async def aclose(self) -> None: """Close any pipes we have to the process (both input and output) and wait for it to exit. @@ -214,13 +240,13 @@ async def aclose(self): with trio.CancelScope(shield=True): await self.wait() - def _close_pidfd(self): + def _close_pidfd(self) -> None: if self._pidfd is not None: trio.lowlevel.notify_closing(self._pidfd.fileno()) self._pidfd.close() self._pidfd = None - async def wait(self): + async def wait(self) -> int: """Block until the process exits. Returns: @@ -230,7 +256,7 @@ async def wait(self): if self.poll() is None: if self._pidfd is not None: try: - await trio.lowlevel.wait_readable(self._pidfd) + await trio.lowlevel.wait_readable(self._pidfd.fileno()) except ClosedResourceError: # something else (probably a call to poll) already closed the # pidfd @@ -248,7 +274,7 @@ async def wait(self): assert self._proc.returncode is not None return self._proc.returncode - def poll(self): + def poll(self) -> int | None: """Returns the exit status of the process (an integer), or ``None`` if it's still running. @@ -260,7 +286,7 @@ def poll(self): """ return self.returncode - def send_signal(self, sig): + def send_signal(self, sig: signal.Signals | int) -> None: """Send signal ``sig`` to the process. On UNIX, ``sig`` may be any signal defined in the @@ -270,7 +296,7 @@ def send_signal(self, sig): """ self._proc.send_signal(sig) - def terminate(self): + def terminate(self) -> None: """Terminate the process, politely if possible. On UNIX, this is equivalent to @@ -281,7 +307,7 @@ def terminate(self): """ self._proc.terminate() - def kill(self): + def kill(self) -> None: """Immediately terminate the process. On UNIX, this is equivalent to @@ -294,8 +320,13 @@ def kill(self): self._proc.kill() -async def open_process( - command, *, stdin=None, stdout=None, stderr=None, **options +async def _open_process( + command: list[str] | str, + *, + stdin: int | HasFileno | None = None, + stdout: int | HasFileno | None = None, + stderr: int | HasFileno | None = None, + **options: object, ) -> Process: r"""Execute a child program in a new process. @@ -366,9 +397,9 @@ async def open_process( "on UNIX systems" ) - trio_stdin: Optional[ClosableSendStream] = None - trio_stdout: Optional[ClosableReceiveStream] = None - trio_stderr: Optional[ClosableReceiveStream] = None + trio_stdin: ClosableSendStream | None = None + trio_stdout: ClosableReceiveStream | None = None + trio_stderr: ClosableReceiveStream | None = None # Close the parent's handle for each child side of a pipe; we want the child to # have the only copy, so that when it exits we can read EOF on our side. The # trio ends of pipes will be transferred to the Process object, which will be @@ -414,14 +445,14 @@ async def open_process( return Process._create(popen, trio_stdin, trio_stdout, trio_stderr) -async def _windows_deliver_cancel(p): +async def _windows_deliver_cancel(p: Process) -> None: try: p.terminate() except OSError as exc: warnings.warn(RuntimeWarning(f"TerminateProcess on {p!r} failed with: {exc!r}")) -async def _posix_deliver_cancel(p): +async def _posix_deliver_cancel(p: Process) -> None: try: p.terminate() await trio.sleep(5) @@ -439,17 +470,18 @@ async def _posix_deliver_cancel(p): ) -async def run_process( - command, +# Use a private name, so we can declare platform-specific stubs below. +async def _run_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], *, - stdin=b"", - capture_stdout=False, - capture_stderr=False, - check=True, - deliver_cancel=None, - task_status=trio.TASK_STATUS_IGNORED, - **options, -): + stdin: bytes | bytearray | memoryview | int | HasFileno | None = b"", + capture_stdout: bool = False, + capture_stderr: bool = False, + check: bool = True, + deliver_cancel: Callable[[Process], Awaitable[object]] | None = None, + task_status: TaskStatus[Process] = trio.TASK_STATUS_IGNORED, + **options: object, +) -> subprocess.CompletedProcess[bytes]: """Run ``command`` in a subprocess and wait for it to complete. This function can be called in two different ways. @@ -687,23 +719,28 @@ async def my_deliver_cancel(process): assert os.name == "posix" deliver_cancel = _posix_deliver_cancel - stdout_chunks = [] - stderr_chunks = [] + stdout_chunks: list[bytes | bytearray] = [] + stderr_chunks: list[bytes | bytearray] = [] - async def feed_input(stream): + async def feed_input(stream: SendStream) -> None: async with stream: try: + assert input is not None await stream.send_all(input) except trio.BrokenResourceError: pass - async def read_output(stream, chunks): + async def read_output( + stream: ReceiveStream, + chunks: list[bytes | bytearray], + ) -> None: async with stream: async for chunk in stream: chunks.append(chunk) async with trio.open_nursery() as nursery: - proc = await open_process(command, **options) + # options needs a complex TypedDict. The overload error only occurs on Unix. + proc = await open_process(command, **options) # type: ignore[arg-type, call-overload, unused-ignore] try: if input is not None: nursery.start_soon(feed_input, proc.stdin) @@ -722,7 +759,7 @@ async def read_output(stream, chunks): with trio.CancelScope(shield=True): killer_cscope = trio.CancelScope(shield=True) - async def killer(): + async def killer() -> None: with killer_cscope: await deliver_cancel(proc) @@ -739,4 +776,147 @@ async def killer(): proc.returncode, proc.args, output=stdout, stderr=stderr ) else: + assert proc.returncode is not None return subprocess.CompletedProcess(proc.args, proc.returncode, stdout, stderr) + + +# There's a lot of duplication here because type checkers don't +# have a good way to represent overloads that differ only +# slightly. A cheat sheet: +# - on Windows, command is Union[str, Sequence[str]]; +# on Unix, command is str if shell=True and Sequence[str] otherwise +# - on Windows, there are startupinfo and creationflags options; +# on Unix, there are preexec_fn, restore_signals, start_new_session, and pass_fds +# - run_process() has the signature of open_process() plus arguments +# capture_stdout, capture_stderr, check, deliver_cancel, and the ability to pass +# bytes as stdin + +if TYPE_CHECKING: + if sys.platform == "win32": + + async def open_process( + command: Union[StrOrBytesPath, Sequence[StrOrBytesPath]], + *, + stdin: int | HasFileno | None = None, + stdout: int | HasFileno | None = None, + stderr: int | HasFileno | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: subprocess.STARTUPINFO | None = None, + creationflags: int = 0, + ) -> trio.Process: + ... + + async def run_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + task_status: TaskStatus[Process] = trio.TASK_STATUS_IGNORED, + stdin: bytes | bytearray | memoryview | int | HasFileno | None = None, + capture_stdout: bool = False, + capture_stderr: bool = False, + check: bool = True, + deliver_cancel: Callable[[Process], Awaitable[object]] | None = None, + stdout: int | HasFileno | None = None, + stderr: int | HasFileno | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: subprocess.STARTUPINFO | None = None, + creationflags: int = 0, + ) -> subprocess.CompletedProcess[bytes]: + ... + + else: # Unix + + @overload # type: ignore[no-overload-impl] + async def open_process( + command: StrOrBytesPath, + *, + stdin: int | HasFileno | None = None, + stdout: int | HasFileno | None = None, + stderr: int | HasFileno | None = None, + close_fds: bool = True, + shell: Literal[True], + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + preexec_fn: Callable[[], object] | None = None, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + ) -> trio.Process: + ... + + @overload + async def open_process( + command: Sequence[StrOrBytesPath], + *, + stdin: int | HasFileno | None = None, + stdout: int | HasFileno | None = None, + stderr: int | HasFileno | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + preexec_fn: Callable[[], object] | None = None, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + ) -> trio.Process: + ... + + @overload # type: ignore[no-overload-impl] + async def run_process( + command: StrOrBytesPath, + *, + task_status: TaskStatus[Process] = trio.TASK_STATUS_IGNORED, + stdin: bytes | bytearray | memoryview | int | HasFileno | None = None, + capture_stdout: bool = False, + capture_stderr: bool = False, + check: bool = True, + deliver_cancel: Callable[[Process], Awaitable[object]] | None = None, + stdout: int | HasFileno | None = None, + stderr: int | HasFileno | None = None, + close_fds: bool = True, + shell: Literal[True], + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + preexec_fn: Callable[[], object] | None = None, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + ) -> subprocess.CompletedProcess[bytes]: + ... + + @overload + async def run_process( + command: Sequence[StrOrBytesPath], + *, + task_status: TaskStatus[Process] = trio.TASK_STATUS_IGNORED, + stdin: bytes | bytearray | memoryview | int | HasFileno | None = None, + capture_stdout: bool = False, + capture_stderr: bool = False, + check: bool = True, + deliver_cancel: Callable[[Process], Awaitable[None]] | None = None, + stdout: int | HasFileno | None = None, + stderr: int | HasFileno | None = None, + close_fds: bool = True, + shell: bool = False, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + preexec_fn: Callable[[], object] | None = None, + restore_signals: bool = True, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + ) -> subprocess.CompletedProcess[bytes]: + ... + +else: + # At runtime, use the actual implementations. + open_process = _open_process + open_process.__name__ = open_process.__qualname__ = "open_process" + + run_process = _run_process + run_process.__name__ = run_process.__qualname__ = "run_process" diff --git a/trio/_subprocess_platform/kqueue.py b/trio/_subprocess_platform/kqueue.py index 9839fd046b..efd0562fc2 100644 --- a/trio/_subprocess_platform/kqueue.py +++ b/trio/_subprocess_platform/kqueue.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import select import sys from typing import TYPE_CHECKING @@ -35,7 +37,7 @@ async def wait_child_exiting(process: "_subprocess.Process") -> None: # in Chromium it seems we should still keep the check. return - def abort(_): + def abort(_: _core.RaiseCancelT) -> _core.Abort: kqueue.control([make_event(select.KQ_EV_DELETE)], 0) return _core.Abort.SUCCEEDED diff --git a/trio/_subprocess_platform/waitid.py b/trio/_subprocess_platform/waitid.py index ad69017219..2a2ca6719d 100644 --- a/trio/_subprocess_platform/waitid.py +++ b/trio/_subprocess_platform/waitid.py @@ -2,15 +2,19 @@ import math import os import sys +from typing import TYPE_CHECKING from .. import _core, _subprocess from .._sync import CapacityLimiter, Event from .._threads import to_thread_run_sync +assert (sys.platform != "win32" and sys.platform != "darwin") or not TYPE_CHECKING + + try: from os import waitid - def sync_wait_reapable(pid): + def sync_wait_reapable(pid: int) -> None: waitid(os.P_PID, pid, os.WEXITED | os.WNOWAIT) except ImportError: @@ -39,9 +43,9 @@ def sync_wait_reapable(pid): int waitid(int idtype, int id, siginfo_t* result, int options); """ ) - waitid = waitid_ffi.dlopen(None).waitid + waitid_cffi = waitid_ffi.dlopen(None).waitid - def sync_wait_reapable(pid): + def sync_wait_reapable(pid: int) -> None: P_PID = 1 WEXITED = 0x00000004 if sys.platform == "darwin": # pragma: no cover @@ -52,7 +56,7 @@ def sync_wait_reapable(pid): else: WNOWAIT = 0x01000000 result = waitid_ffi.new("siginfo_t *") - while waitid(P_PID, pid, result, WEXITED | WNOWAIT) < 0: + while waitid_cffi(P_PID, pid, result, WEXITED | WNOWAIT) < 0: got_errno = waitid_ffi.errno if got_errno == errno.EINTR: continue @@ -101,7 +105,7 @@ async def wait_child_exiting(process: "_subprocess.Process") -> None: # process. if process._wait_for_exit_data is None: - process._wait_for_exit_data = event = Event() # type: ignore + process._wait_for_exit_data = event = Event() _core.spawn_system_task(_waitid_system_task, process.pid, event) assert isinstance(process._wait_for_exit_data, Event) await process._wait_for_exit_data.wait() diff --git a/trio/_subprocess_platform/windows.py b/trio/_subprocess_platform/windows.py index 958be8675c..1634e74fa7 100644 --- a/trio/_subprocess_platform/windows.py +++ b/trio/_subprocess_platform/windows.py @@ -3,4 +3,5 @@ async def wait_child_exiting(process: "_subprocess.Process") -> None: - await WaitForSingleObject(int(process._proc._handle)) + # _handle is not in Popen stubs, though it is present on Windows. + await WaitForSingleObject(int(process._proc._handle)) # type: ignore[attr-defined] diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index 397860bcff..d3a994933e 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9538216560509554, + "completenessScore": 0.9585987261146497, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 599, - "withUnknownType": 29 + "withKnownType": 602, + "withUnknownType": 26 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -45,9 +45,9 @@ } ], "otherSymbolCounts": { - "withAmbiguousType": 3, - "withKnownType": 627, - "withUnknownType": 50 + "withAmbiguousType": 1, + "withKnownType": 642, + "withUnknownType": 39 }, "packageName": "trio", "symbols": [ @@ -68,21 +68,8 @@ "trio._ssl.SSLStream.transport_stream", "trio._ssl.SSLStream.unwrap", "trio._ssl.SSLStream.wait_send_all_might_not_block", - "trio._subprocess.Process.__init__", - "trio._subprocess.Process.__repr__", - "trio._subprocess.Process.args", - "trio._subprocess.Process.encoding", - "trio._subprocess.Process.errors", - "trio._subprocess.Process.kill", - "trio._subprocess.Process.pid", - "trio._subprocess.Process.poll", - "trio._subprocess.Process.returncode", - "trio._subprocess.Process.send_signal", - "trio._subprocess.Process.terminate", - "trio._subprocess.Process.wait", "trio.lowlevel.cancel_shielded_checkpoint", "trio.lowlevel.notify_closing", - "trio.lowlevel.open_process", "trio.lowlevel.permanently_detach_coroutine_object", "trio.lowlevel.reattach_detached_coroutine_object", "trio.lowlevel.temporarily_detach_coroutine_object", @@ -91,7 +78,6 @@ "trio.open_ssl_over_tcp_listeners", "trio.open_ssl_over_tcp_stream", "trio.open_unix_socket", - "trio.run_process", "trio.serve_listeners", "trio.serve_ssl_over_tcp", "trio.testing._memory_streams.MemoryReceiveStream.__init__", From ba605c2c648464c63536a54efcf1cafe56d1c13d Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Thu, 17 Aug 2023 19:43:51 +1000 Subject: [PATCH 135/162] Add a Sphinx extension to specially handle typevars in docstrings (#2752) * Add a new sphinx extension to handle typevars in docstrings --------- Co-authored-by: jakkdl Co-authored-by: John Litborn <11260241+jakkdl@users.noreply.github.com> --- docs/source/_static/hackrtd.css | 5 ++ docs/source/conf.py | 18 +----- docs/source/typevars.py | 103 ++++++++++++++++++++++++++++++++ 3 files changed, 109 insertions(+), 17 deletions(-) create mode 100644 docs/source/typevars.py diff --git a/docs/source/_static/hackrtd.css b/docs/source/_static/hackrtd.css index e75a889f69..48401f2389 100644 --- a/docs/source/_static/hackrtd.css +++ b/docs/source/_static/hackrtd.css @@ -12,6 +12,11 @@ pre { background-color: #ffe13b; } +/* Make typevar/paramspec names distinguishable from classes. */ +.typevarref { + text-decoration: dashed underline; +} + /* Add a snakey triskelion ornament to
* https://stackoverflow.com/questions/8862344/css-hr-with-ornament/18541258#18541258 * but only do it to
s in the content box, b/c the RTD popup control panel diff --git a/docs/source/conf.py b/docs/source/conf.py index 4561c8cfe3..4c6e42b709 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -53,23 +53,6 @@ ("py:exc", "Anything else"), ("py:class", "async function"), ("py:class", "sync function"), - # https://github.com/sphinx-doc/sphinx/issues/7722 - # TODO: why do these need to be spelled out? - ("py:class", "trio._abc.ReceiveType"), - ("py:class", "trio._abc.SendType"), - ("py:class", "trio._abc.T"), - ("py:obj", "trio._abc.ReceiveType"), - ("py:obj", "trio._abc.SendType"), - ("py:obj", "trio._abc.T"), - ("py:obj", "trio._abc.T_resource"), - ("py:class", "trio._core._run.StatusT"), - ("py:class", "trio._core._run.StatusT_co"), - ("py:class", "trio._core._run.StatusT_contra"), - ("py:class", "trio._core._run.RetT"), - ("py:class", "trio._threads.RetT"), - ("py:class", "P.args"), - ("py:class", "P.kwargs"), - ("py:class", "RetT"), # why aren't these found in stdlib? ("py:class", "types.FrameType"), # TODO: figure out if you can link this to SSL @@ -139,6 +122,7 @@ def setup(app): "sphinxcontrib_trio", "sphinxcontrib.jquery", "local_customization", + "typevars", ] intersphinx_mapping = { diff --git a/docs/source/typevars.py b/docs/source/typevars.py new file mode 100644 index 0000000000..ab492b98b8 --- /dev/null +++ b/docs/source/typevars.py @@ -0,0 +1,103 @@ +"""Transform references to typevars to avoid missing reference errors. + +See https://github.com/sphinx-doc/sphinx/issues/7722 also. +""" +from __future__ import annotations + +import re +from pathlib import Path + +from sphinx.addnodes import Element, pending_xref +from sphinx.application import Sphinx +from sphinx.environment import BuildEnvironment +from sphinx.errors import NoUri + +import trio + + +def identify_typevars(trio_folder: Path) -> None: + """Record all typevars in trio.""" + for filename in trio_folder.rglob("*.py"): + with open(filename, encoding="utf8") as f: + for line in f: + # A simple regex should be sufficient to find them all, no need to actually parse. + match = re.search( + r"\b(TypeVar|TypeVarTuple|ParamSpec)\(['\"]([^'\"]+)['\"]", + line, + ) + if match is not None: + relative = "trio" / filename.relative_to(trio_folder) + relative = relative.with_suffix("") + if relative.name == "__init__": # Package, remove. + relative = relative.parent + kind = match.group(1) + name = match.group(2) + typevars_qualified[f'{".".join(relative.parts)}.{name}'] = kind + existing = typevars_named.setdefault(name, kind) + if existing != kind: + print("Mismatch: {} = {}, {}", name, existing, kind) + + +# All our typevars, so we can suppress reference errors for them. +typevars_qualified: dict[str, str] = {} +typevars_named: dict[str, str] = {} + + +def lookup_reference( + app: Sphinx, + env: BuildEnvironment, + node: pending_xref, + contnode: Element, +) -> Element | None: + """Handle missing references.""" + # If this is a typing_extensions object, redirect to typing. + # Most things there are backports, so the stdlib docs should have an entry. + target: str = node["reftarget"] + if target.startswith("typing_extensions."): + new_node = node.copy() + new_node["reftarget"] = f"typing.{target[18:]}" + # This fires off this same event, with our new modified node in order to fetch the right + # URL to use. + return app.emit_firstresult( + "missing-reference", + env, + new_node, + contnode, + allowed_exceptions=(NoUri,), + ) + + try: + typevar_type = typevars_qualified[target] + except KeyError: + # Imports might mean the typevar was defined in a different module or something. + # Fall back to checking just by name. + dot = target.rfind(".") + stem = target[dot + 1 :] if dot >= 0 else target + try: + typevar_type = typevars_named[stem] + except KeyError: + # Let other handlers deal with this name, it's not a typevar. + return None + + # Found a typevar. Redirect to the stdlib docs for that kind of var. + new_node = node.copy() + new_node["reftarget"] = f"typing.{typevar_type}" + new_node = app.emit_firstresult( + "missing-reference", + env, + new_node, + contnode, + allowed_exceptions=(NoUri,), + ) + reftitle = new_node["reftitle"] + # Is normally "(in Python 3.XX)", make it say typevar/paramspec/etc + paren = "(" if reftitle.startswith("(") else "" + new_node["reftitle"] = f"{paren}{typevar_type}, {reftitle.lstrip('(')}" + # Add a CSS class, for restyling. + new_node["classes"].append("typevarref") + return new_node + + +def setup(app: Sphinx) -> None: + identify_typevars(Path(trio.__file__).parent) + app.connect("missing-reference", lookup_reference, -10) From 4c38ba91df15ae374d0af194dcf9b6d342aab2f4 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Thu, 17 Aug 2023 12:11:39 +0200 Subject: [PATCH 136/162] add types to _core/_traps (#2751) * type _core/_traps --- docs/source/conf.py | 6 +++--- pyproject.toml | 6 ++++-- trio/_core/_traps.py | 31 ++++++++++++++++++++----------- trio/_tests/verify_types.json | 10 +++------- 4 files changed, 30 insertions(+), 23 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 4c6e42b709..b6d5e63043 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -44,7 +44,6 @@ nitpick_ignore = [ ("py:class", "CapacityLimiter-like object"), ("py:class", "bytes-like"), - ("py:class", "None"), # Was removed but still shows up in changelog ("py:class", "trio.lowlevel.RunLocal"), # trio.abc is documented at random places scattered throughout the docs @@ -55,8 +54,6 @@ ("py:class", "sync function"), # why aren't these found in stdlib? ("py:class", "types.FrameType"), - # TODO: figure out if you can link this to SSL - ("py:class", "Context"), # TODO: temporary type ("py:class", "_SocketType"), # these are not defined in https://docs.python.org/3/objects.inv @@ -74,6 +71,9 @@ # aliasing doesn't actually fix the warning for types.FrameType, but displaying # "types.FrameType" is more helpful than just "frame" "FrameType": "types.FrameType", + # unaliasing these makes intersphinx able to resolve them + "Outcome": "outcome.Outcome", + "Context": "OpenSSL.SSL.Context", } diff --git a/pyproject.toml b/pyproject.toml index 566e9ec49f..073e2508d1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,16 +51,18 @@ module = [ "trio._abc", "trio._core._asyncgens", "trio._core._entry_queue", - "trio._core._generated_run", "trio._core._generated_io_epoll", "trio._core._generated_io_kqueue", + "trio._core._generated_run", "trio._core._io_epoll", "trio._core._io_kqueue", "trio._core._local", "trio._core._multierror", + "trio._core._run", "trio._core._thread_cache", + "trio._core._traps", + "trio._core._unbounded_queue", "trio._core._unbounded_queue", - "trio._core._run", "trio._deprecate", "trio._dtls", "trio._file_io", diff --git a/trio/_core/_traps.py b/trio/_core/_traps.py index 08a8ceac01..760c46bc51 100644 --- a/trio/_core/_traps.py +++ b/trio/_core/_traps.py @@ -1,14 +1,21 @@ -# These are the only functions that ever yield back to the task runner. +"""These are the only functions that ever yield back to the task runner.""" +from __future__ import annotations import enum import types -from typing import Any, Callable, NoReturn +from typing import TYPE_CHECKING, Any, Callable, NoReturn import attr import outcome from . import _run +if TYPE_CHECKING: + from outcome import Outcome + from typing_extensions import TypeAlias + + from ._run import Task + # Helper for the bottommost 'yield'. You can't use 'yield' inside an async # function, but you can inside a generator, and if you decorate your generator @@ -18,7 +25,7 @@ # tracking machinery. Since our traps are public APIs, we make them real async # functions, and then this helper takes care of the actual yield: @types.coroutine -def _async_yield(obj): +def _async_yield(obj: Any) -> Any: # type: ignore[misc] return (yield obj) @@ -28,7 +35,7 @@ class CancelShieldedCheckpoint: pass -async def cancel_shielded_checkpoint(): +async def cancel_shielded_checkpoint() -> None: """Introduce a schedule point, but not a cancel point. This is *not* a :ref:`checkpoint `, but it is half of a @@ -41,7 +48,7 @@ async def cancel_shielded_checkpoint(): await trio.lowlevel.checkpoint() """ - return (await _async_yield(CancelShieldedCheckpoint)).unwrap() + (await _async_yield(CancelShieldedCheckpoint)).unwrap() # Return values for abort functions @@ -62,10 +69,10 @@ class Abort(enum.Enum): # Not exported in the trio._core namespace, but imported directly by _run. @attr.s(frozen=True) class WaitTaskRescheduled: - abort_func = attr.ib() + abort_func: Callable[[RaiseCancelT], Abort] = attr.ib() -RaiseCancelT = Callable[[], NoReturn] # TypeAlias +RaiseCancelT: TypeAlias = Callable[[], NoReturn] # Should always return the type a Task "expects", unless you willfully reschedule it @@ -175,10 +182,10 @@ def abort(inner_raise_cancel): # Not exported in the trio._core namespace, but imported directly by _run. @attr.s(frozen=True) class PermanentlyDetachCoroutineObject: - final_outcome = attr.ib() + final_outcome: Outcome = attr.ib() -async def permanently_detach_coroutine_object(final_outcome): +async def permanently_detach_coroutine_object(final_outcome: Outcome) -> Any: """Permanently detach the current task from the Trio scheduler. Normally, a Trio task doesn't exit until its coroutine object exits. When @@ -209,7 +216,9 @@ async def permanently_detach_coroutine_object(final_outcome): return await _async_yield(PermanentlyDetachCoroutineObject(final_outcome)) -async def temporarily_detach_coroutine_object(abort_func): +async def temporarily_detach_coroutine_object( + abort_func: Callable[[RaiseCancelT], Abort] +) -> Any: """Temporarily detach the current coroutine object from the Trio scheduler. @@ -245,7 +254,7 @@ async def temporarily_detach_coroutine_object(abort_func): return await _async_yield(WaitTaskRescheduled(abort_func)) -async def reattach_detached_coroutine_object(task, yield_value): +async def reattach_detached_coroutine_object(task: Task, yield_value: object) -> None: """Reattach a coroutine object that was detached using :func:`temporarily_detach_coroutine_object`. diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index d3a994933e..c5e9c4dc66 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9585987261146497, + "completenessScore": 0.964968152866242, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 602, - "withUnknownType": 26 + "withKnownType": 606, + "withUnknownType": 22 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -68,11 +68,7 @@ "trio._ssl.SSLStream.transport_stream", "trio._ssl.SSLStream.unwrap", "trio._ssl.SSLStream.wait_send_all_might_not_block", - "trio.lowlevel.cancel_shielded_checkpoint", "trio.lowlevel.notify_closing", - "trio.lowlevel.permanently_detach_coroutine_object", - "trio.lowlevel.reattach_detached_coroutine_object", - "trio.lowlevel.temporarily_detach_coroutine_object", "trio.lowlevel.wait_readable", "trio.lowlevel.wait_writable", "trio.open_ssl_over_tcp_listeners", From a316034efbc8304209c1554f4cd132748fd83ddd Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Fri, 18 Aug 2023 10:35:33 +0200 Subject: [PATCH 137/162] Fix various files so the entire trio/ directory can be type-checked (#2763) * Edit various files so the entire trio/ directory can be type-checked without error * Use the correct type here * Fix wrong function name * This is a StapledStream * Specify generic type for ContextVar * Run linters * Test double-binding FakeNet sockets, for coverage * Apply suggestions from CoolCat * Import MultiError directly to bypass deprecation warnings --------- Co-authored-by: Spencer Brown --- check.sh | 6 +- pyproject.toml | 100 ++++++++---------- trio/_core/_generated_run.py | 2 +- trio/_core/_io_common.py | 8 +- trio/_core/_ki.py | 8 +- trio/_core/_parking_lot.py | 2 +- trio/_core/_tests/test_io.py | 13 ++- trio/_core/_tests/test_ki.py | 10 +- trio/_core/_tests/test_multierror.py | 2 +- .../apport_excepthook.py | 6 +- .../ipython_custom_exc.py | 6 +- .../simple_excepthook.py | 6 +- trio/_core/_tests/test_run.py | 4 +- trio/_core/_wakeup_socketpair.py | 16 +-- trio/_path.py | 8 +- trio/_tests/check_type_completeness.py | 2 + trio/_tests/test_contextvars.py | 24 +++-- trio/_tests/test_dtls.py | 4 +- trio/_tests/test_exports.py | 11 +- trio/_tests/test_fakenet.py | 15 ++- trio/_tests/test_highlevel_serve_listeners.py | 4 +- trio/_tests/test_socket.py | 2 +- trio/_tests/test_subprocess.py | 14 ++- trio/_tests/test_threads.py | 14 ++- trio/_tests/test_tracing.py | 10 +- trio/_tests/test_unix_pipes.py | 8 +- trio/_tools/gen_exports.py | 7 -- trio/_unix_pipes.py | 3 + trio/py.typed | 0 trio/testing/_fake_net.py | 41 ++++--- 30 files changed, 207 insertions(+), 149 deletions(-) create mode 100644 trio/py.typed diff --git a/check.sh b/check.sh index a0efa531b6..ace193a62a 100755 --- a/check.sh +++ b/check.sh @@ -27,9 +27,9 @@ fi flake8 trio/ || EXIT_STATUS=$? # Run mypy on all supported platforms -mypy -m trio -m trio.testing --platform linux || EXIT_STATUS=$? -mypy -m trio -m trio.testing --platform darwin || EXIT_STATUS=$? # tests FreeBSD too -mypy -m trio -m trio.testing --platform win32 || EXIT_STATUS=$? +mypy trio --platform linux || EXIT_STATUS=$? +mypy trio --platform darwin || EXIT_STATUS=$? # tests FreeBSD too +mypy trio --platform win32 || EXIT_STATUS=$? # Check pip compile is consistent pip-compile test-requirements.in diff --git a/pyproject.toml b/pyproject.toml index 073e2508d1..a212393452 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,70 +34,58 @@ warn_redundant_casts = true warn_return_any = true # Avoid subtle backsliding -#disallow_any_decorated = true -#disallow_incomplete_defs = true -#disallow_subclassing_any = true +disallow_any_decorated = true +disallow_any_generics = true +disallow_any_unimported = false # Enable once Outcome has stubs. +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_decorators = true +disallow_untyped_defs = true -# Enable gradually / for new modules +# Enable once other problems are dealt with check_untyped_defs = false disallow_untyped_calls = false -disallow_untyped_defs = false -# DO NOT use `ignore_errors`; it doesn't apply -# downstream and users have to deal with them. -[[tool.mypy.overrides]] -# Fully typed, enable stricter checks -module = [ - "trio._abc", - "trio._core._asyncgens", - "trio._core._entry_queue", - "trio._core._generated_io_epoll", - "trio._core._generated_io_kqueue", - "trio._core._generated_run", - "trio._core._io_epoll", - "trio._core._io_kqueue", - "trio._core._local", - "trio._core._multierror", - "trio._core._run", - "trio._core._thread_cache", - "trio._core._traps", - "trio._core._unbounded_queue", - "trio._core._unbounded_queue", - "trio._deprecate", - "trio._dtls", - "trio._file_io", - "trio._highlevel_open_tcp_stream", - "trio._ki", - "trio._socket", - "trio._subprocess", - "trio._subprocess_platform", - "trio._subprocess_platform.kqueue", - "trio._subprocess_platform.waitid", - "trio._subprocess_platform.windows", - "trio._sync", - "trio._threads", - "trio._tools.gen_exports", - "trio._util", -] -disallow_incomplete_defs = true -disallow_untyped_defs = true -disallow_untyped_decorators = true -disallow_any_generics = true -disallow_any_decorated = true -disallow_any_unimported = false # Enable once outcome has stubs. -disallow_subclassing_any = true +# files not yet fully typed [[tool.mypy.overrides]] -# Needs to use Any due to some complex introspection. module = [ - "trio._path", +# 2747 +"trio/testing/_network", +"trio/testing/_trio_test", +"trio/testing/_checkpoints", +"trio/testing/_check_streams", +"trio/testing/_memory_streams", +# 2745 +"trio/_ssl", +# 2756 +"trio/_highlevel_open_unix_stream", +"trio/_highlevel_serve_listeners", +"trio/_highlevel_ssl_helpers", +"trio/_highlevel_socket", +# 2755 +"trio/_core/_windows_cffi", +"trio/_wait_for_object", +# 2761 +"trio/_core/_generated_io_windows", +"trio/_core/_io_windows", + + +"trio/_signals", + +# internal +"trio/_windows_pipes", + +# tests +"trio/_core/_tests/*", +"trio/_tests/*", +"trio/testing/_fake_net", # 30 ] -disallow_incomplete_defs = true -disallow_untyped_defs = true -#disallow_any_generics = true -#disallow_any_decorated = true -disallow_any_unimported = true -disallow_subclassing_any = true +disallow_any_decorated = false +disallow_any_generics = false +disallow_any_unimported = false +disallow_incomplete_defs = false +disallow_untyped_defs = false [tool.pytest.ini_options] addopts = ["--strict-markers", "--strict-config"] diff --git a/trio/_core/_generated_run.py b/trio/_core/_generated_run.py index 35ecd45a1b..bd5abbd639 100644 --- a/trio/_core/_generated_run.py +++ b/trio/_core/_generated_run.py @@ -88,7 +88,7 @@ def current_root_task() ->(Task | None): raise RuntimeError("must be called from async context") -def reschedule(task: Task, next_send: Outcome[Any]=_NO_SEND) ->None: # type: ignore[has-type] +def reschedule(task: Task, next_send: Outcome[Any]=_NO_SEND) ->None: """Reschedule the given task with the given :class:`outcome.Outcome`. diff --git a/trio/_core/_io_common.py b/trio/_core/_io_common.py index b141474fda..c1af293278 100644 --- a/trio/_core/_io_common.py +++ b/trio/_core/_io_common.py @@ -1,12 +1,18 @@ +from __future__ import annotations + import copy +from typing import TYPE_CHECKING import outcome from .. import _core +if TYPE_CHECKING: + from ._io_epoll import EpollWaiters + # Utility function shared between _io_epoll and _io_windows -def wake_all(waiters, exc): +def wake_all(waiters: EpollWaiters, exc: BaseException) -> None: try: current_task = _core.current_task() except RuntimeError: diff --git a/trio/_core/_ki.py b/trio/_core/_ki.py index 8ae83c287a..10172e4989 100644 --- a/trio/_core/_ki.py +++ b/trio/_core/_ki.py @@ -121,7 +121,7 @@ def currently_ki_protected() -> bool: # see python-trio/async_generator/async_generator/_impl.py def legacy_isasyncgenfunction( obj: object, -) -> TypeGuard[Callable[..., types.AsyncGeneratorType]]: +) -> TypeGuard[Callable[..., types.AsyncGeneratorType[object, object]]]: return getattr(obj, "_async_gen_function", None) == id(obj) @@ -196,7 +196,9 @@ def wrapper(*args: ArgsT.args, **kwargs: ArgsT.kwargs) -> RetT: @attr.s class KIManager: - handler = attr.ib(default=None) + handler: Callable[[int, types.FrameType | None], None] | None = attr.ib( + default=None + ) def install( self, @@ -221,7 +223,7 @@ def handler(signum: int, frame: types.FrameType | None) -> None: self.handler = handler signal.signal(signal.SIGINT, handler) - def close(self): + def close(self) -> None: if self.handler is not None: if signal.getsignal(signal.SIGINT) is self.handler: signal.signal(signal.SIGINT, signal.default_int_handler) diff --git a/trio/_core/_parking_lot.py b/trio/_core/_parking_lot.py index 74708433da..6510745e5b 100644 --- a/trio/_core/_parking_lot.py +++ b/trio/_core/_parking_lot.py @@ -139,7 +139,7 @@ async def park(self) -> None: self._parked[task] = None task.custom_sleep_data = self - def abort_fn(_): + def abort_fn(_: _core.RaiseCancelT) -> _core.Abort: del task.custom_sleep_data._parked[task] return _core.Abort.SUCCEEDED diff --git a/trio/_core/_tests/test_io.py b/trio/_core/_tests/test_io.py index 21a954941c..65b9b82bcf 100644 --- a/trio/_core/_tests/test_io.py +++ b/trio/_core/_tests/test_io.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import random import socket as stdlib_socket +from collections.abc import Callable from contextlib import suppress import pytest @@ -47,15 +50,15 @@ def fileno_wrapper(fileobj): return fileno_wrapper -wait_readable_options = [trio.lowlevel.wait_readable] -wait_writable_options = [trio.lowlevel.wait_writable] -notify_closing_options = [trio.lowlevel.notify_closing] +wait_readable_options: list[Callable] = [trio.lowlevel.wait_readable] +wait_writable_options: list[Callable] = [trio.lowlevel.wait_writable] +notify_closing_options: list[Callable] = [trio.lowlevel.notify_closing] -for options_list in [ +for options_list in ( wait_readable_options, wait_writable_options, notify_closing_options, -]: +): options_list += [using_fileno(f) for f in options_list] # Decorators that feed in different settings for wait_readable / wait_writable diff --git a/trio/_core/_tests/test_ki.py b/trio/_core/_tests/test_ki.py index fdbada4624..b6eef68e22 100644 --- a/trio/_core/_tests/test_ki.py +++ b/trio/_core/_tests/test_ki.py @@ -1,7 +1,10 @@ +from __future__ import annotations + import contextlib import inspect import signal import threading +from typing import TYPE_CHECKING import outcome import pytest @@ -16,6 +19,9 @@ from ..._util import signal_raise from ...testing import wait_all_tasks_blocked +if TYPE_CHECKING: + from ..._core import Abort, RaiseCancelT + def ki_self(): signal_raise(signal.SIGINT) @@ -375,7 +381,7 @@ async def main(): ki_self() task = _core.current_task() - def abort(_): + def abort(_: RaiseCancelT) -> Abort: _core.reschedule(task, outcome.Value(1)) return _core.Abort.FAILED @@ -394,7 +400,7 @@ async def main(): ki_self() task = _core.current_task() - def abort(raise_cancel): + def abort(raise_cancel: RaiseCancelT) -> Abort: result = outcome.capture(raise_cancel) _core.reschedule(task, result) return _core.Abort.FAILED diff --git a/trio/_core/_tests/test_multierror.py b/trio/_core/_tests/test_multierror.py index 7a8bd2f9a8..52e5e39d1b 100644 --- a/trio/_core/_tests/test_multierror.py +++ b/trio/_core/_tests/test_multierror.py @@ -555,7 +555,7 @@ def test_apport_excepthook_monkeypatch_interaction(): @pytest.mark.parametrize("protocol", range(0, pickle.HIGHEST_PROTOCOL + 1)) -def test_pickle_multierror(protocol) -> None: +def test_pickle_multierror(protocol: int) -> None: # use trio.MultiError to make sure that pickle works through the deprecation layer import trio diff --git a/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py b/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py index 3e1d23ca8e..0e46f37e17 100644 --- a/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py +++ b/trio/_core/_tests/test_multierror_scripts/apport_excepthook.py @@ -3,13 +3,13 @@ # make sure it's on sys.path. import sys -import _common +import _common # isort: split sys.path.append("/usr/lib/python3/dist-packages") import apport_python_hook apport_python_hook.install() -import trio +from trio._core._multierror import MultiError # Bypass deprecation warnings -raise trio.MultiError([KeyError("key_error"), ValueError("value_error")]) +raise MultiError([KeyError("key_error"), ValueError("value_error")]) diff --git a/trio/_core/_tests/test_multierror_scripts/ipython_custom_exc.py b/trio/_core/_tests/test_multierror_scripts/ipython_custom_exc.py index 80e42b6a2c..7ccb341dc9 100644 --- a/trio/_core/_tests/test_multierror_scripts/ipython_custom_exc.py +++ b/trio/_core/_tests/test_multierror_scripts/ipython_custom_exc.py @@ -3,7 +3,7 @@ # about it. import sys -import _common +import _common # isort: split def custom_excepthook(*args): @@ -29,8 +29,8 @@ def custom_exc_hook(etype, value, tb, tb_offset=None): ip.set_custom_exc((SomeError,), custom_exc_hook) -import trio +from trio._core._multierror import MultiError # Bypass deprecation warnings. # The custom excepthook should run, because Trio was polite and didn't # override it -raise trio.MultiError([ValueError(), KeyError()]) +raise MultiError([ValueError(), KeyError()]) diff --git a/trio/_core/_tests/test_multierror_scripts/simple_excepthook.py b/trio/_core/_tests/test_multierror_scripts/simple_excepthook.py index 94004525db..65371107bc 100644 --- a/trio/_core/_tests/test_multierror_scripts/simple_excepthook.py +++ b/trio/_core/_tests/test_multierror_scripts/simple_excepthook.py @@ -1,6 +1,6 @@ -import _common +import _common # isort: split -import trio +from trio._core._multierror import MultiError # Bypass deprecation warnings def exc1_fn(): @@ -18,4 +18,4 @@ def exc2_fn(): # This should be printed nicely, because Trio overrode sys.excepthook -raise trio.MultiError([exc1_fn(), exc2_fn()]) +raise MultiError([exc1_fn(), exc2_fn()]) diff --git a/trio/_core/_tests/test_run.py b/trio/_core/_tests/test_run.py index 81c3b73cc4..6d34d8f223 100644 --- a/trio/_core/_tests/test_run.py +++ b/trio/_core/_tests/test_run.py @@ -1954,7 +1954,7 @@ async def test_Nursery_private_init(): def test_Nursery_subclass(): with pytest.raises(TypeError): - class Subclass(_core._run.Nursery): + class Subclass(_core._run.Nursery): # type: ignore[misc] pass @@ -1984,7 +1984,7 @@ class Subclass(_core.Cancelled): def test_CancelScope_subclass(): with pytest.raises(TypeError): - class Subclass(_core.CancelScope): + class Subclass(_core.CancelScope): # type: ignore[misc] pass diff --git a/trio/_core/_wakeup_socketpair.py b/trio/_core/_wakeup_socketpair.py index 51a80ef024..2ad1a023fe 100644 --- a/trio/_core/_wakeup_socketpair.py +++ b/trio/_core/_wakeup_socketpair.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import signal import socket import warnings @@ -7,7 +9,7 @@ class WakeupSocketpair: - def __init__(self): + def __init__(self) -> None: self.wakeup_sock, self.write_sock = socket.socketpair() self.wakeup_sock.setblocking(False) self.write_sock.setblocking(False) @@ -27,26 +29,26 @@ def __init__(self): self.write_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) except OSError: pass - self.old_wakeup_fd = None + self.old_wakeup_fd: int | None = None - def wakeup_thread_and_signal_safe(self): + def wakeup_thread_and_signal_safe(self) -> None: try: self.write_sock.send(b"\x00") except BlockingIOError: pass - async def wait_woken(self): + async def wait_woken(self) -> None: await _core.wait_readable(self.wakeup_sock) self.drain() - def drain(self): + def drain(self) -> None: try: while True: self.wakeup_sock.recv(2**16) except BlockingIOError: pass - def wakeup_on_signals(self): + def wakeup_on_signals(self) -> None: assert self.old_wakeup_fd is None if not is_main_thread(): return @@ -64,7 +66,7 @@ def wakeup_on_signals(self): ) ) - def close(self): + def close(self) -> None: self.wakeup_sock.close() self.write_sock.close() if self.old_wakeup_fd is not None: diff --git a/trio/_path.py b/trio/_path.py index b7e6b16e4a..c2763e03af 100644 --- a/trio/_path.py +++ b/trio/_path.py @@ -116,9 +116,9 @@ async def wrapper(self: Path, *args: P.args, **kwargs: P.kwargs) -> Path: def classmethod_wrapper_factory( cls: AsyncAutoWrapperType, meth_name: str -) -> classmethod: +) -> classmethod: # type: ignore[type-arg] @async_wraps(cls, cls._wraps, meth_name) - async def wrapper(cls: type[Path], *args: Any, **kwargs: Any) -> Path: + async def wrapper(cls: type[Path], *args: Any, **kwargs: Any) -> Path: # type: ignore[misc] # contains Any meth = getattr(cls._wraps, meth_name) func = partial(meth, *args, **kwargs) value = await trio.to_thread.run_sync(func) @@ -163,7 +163,7 @@ def generate_forwards(cls, attrs: dict[str, object]) -> None: def generate_wraps(cls, attrs: dict[str, object]) -> None: # generate wrappers for functions of _wraps - wrapper: classmethod | Callable + wrapper: classmethod | Callable[..., object] # type: ignore[type-arg] for attr_name, attr in cls._wraps.__dict__.items(): # .z. exclude cls._wrap_iter if attr_name.startswith("_") or attr_name in attrs: @@ -188,7 +188,7 @@ def generate_magic(cls, attrs: dict[str, object]) -> None: def generate_iter(cls, attrs: dict[str, object]) -> None: # generate wrappers for methods that return iterators - wrapper: Callable + wrapper: Callable[..., object] for attr_name, attr in cls._wraps.__dict__.items(): if attr_name in cls._wrap_iter: wrapper = iter_wrapper_factory(cls, attr_name) diff --git a/trio/_tests/check_type_completeness.py b/trio/_tests/check_type_completeness.py index 7a65a4249e..abaabcf785 100755 --- a/trio/_tests/check_type_completeness.py +++ b/trio/_tests/check_type_completeness.py @@ -1,4 +1,6 @@ #!/usr/bin/env python3 +from __future__ import annotations + # this file is not run as part of the tests, instead it's run standalone from check.sh import argparse import json diff --git a/trio/_tests/test_contextvars.py b/trio/_tests/test_contextvars.py index 63853f5171..ae0c25f876 100644 --- a/trio/_tests/test_contextvars.py +++ b/trio/_tests/test_contextvars.py @@ -1,15 +1,19 @@ +from __future__ import annotations + import contextvars from .. import _core -trio_testing_contextvar = contextvars.ContextVar("trio_testing_contextvar") +trio_testing_contextvar: contextvars.ContextVar[str] = contextvars.ContextVar( + "trio_testing_contextvar" +) -async def test_contextvars_default(): +async def test_contextvars_default() -> None: trio_testing_contextvar.set("main") - record = [] + record: list[str] = [] - async def child(): + async def child() -> None: value = trio_testing_contextvar.get() record.append(value) @@ -18,11 +22,11 @@ async def child(): assert record == ["main"] -async def test_contextvars_set(): +async def test_contextvars_set() -> None: trio_testing_contextvar.set("main") - record = [] + record: list[str] = [] - async def child(): + async def child() -> None: trio_testing_contextvar.set("child") value = trio_testing_contextvar.get() record.append(value) @@ -34,13 +38,13 @@ async def child(): assert value == "main" -async def test_contextvars_copy(): +async def test_contextvars_copy() -> None: trio_testing_contextvar.set("main") context = contextvars.copy_context() trio_testing_contextvar.set("second_main") - record = [] + record: list[str] = [] - async def child(): + async def child() -> None: value = trio_testing_contextvar.get() record.append(value) diff --git a/trio/_tests/test_dtls.py b/trio/_tests/test_dtls.py index b8c32c6d5f..8cb06ccb3d 100644 --- a/trio/_tests/test_dtls.py +++ b/trio/_tests/test_dtls.py @@ -17,10 +17,10 @@ ca = trustme.CA() server_cert = ca.issue_cert("example.com") -server_ctx = SSL.Context(SSL.DTLS_METHOD) +server_ctx = SSL.Context(SSL.DTLS_METHOD) # type: ignore[attr-defined] server_cert.configure_cert(server_ctx) -client_ctx = SSL.Context(SSL.DTLS_METHOD) +client_ctx = SSL.Context(SSL.DTLS_METHOD) # type: ignore[attr-defined] ca.configure_trust(client_ctx) diff --git a/trio/_tests/test_exports.py b/trio/_tests/test_exports.py index c0da975fb4..2f1157db06 100644 --- a/trio/_tests/test_exports.py +++ b/trio/_tests/test_exports.py @@ -1,3 +1,4 @@ +from __future__ import annotations # isort: split import __future__ # Regular import, not special! import enum @@ -27,7 +28,7 @@ try: # If installed, check both versions of this class. from typing_extensions import Protocol as Protocol_ext except ImportError: # pragma: no cover - Protocol_ext = Protocol + Protocol_ext = Protocol # type: ignore[assignment] def _ensure_mypy_cache_updated(): @@ -240,7 +241,9 @@ def no_underscores(symbols): ) @pytest.mark.parametrize("module_name", PUBLIC_MODULE_NAMES) @pytest.mark.parametrize("tool", ["jedi", "mypy"]) -def test_static_tool_sees_class_members(tool, module_name, tmpdir) -> None: +def test_static_tool_sees_class_members( + tool: str, module_name: str, tmpdir: Path +) -> None: module = PUBLIC_MODULES[PUBLIC_MODULE_NAMES.index(module_name)] # ignore hidden, but not dunder, symbols @@ -481,7 +484,7 @@ def lookup_symbol(symbol): assert not errors -def test_classes_are_final(): +def test_classes_are_final() -> None: for module in PUBLIC_MODULES: for name, class_ in module.__dict__.items(): if not isinstance(class_, type): @@ -503,7 +506,7 @@ def test_classes_are_final(): continue # These are classes that are conceptually abstract, but # inspect.isabstract returns False for boring reasons. - if class_ in {trio.abc.Instrument, trio.socket.SocketType}: + if class_ is trio.abc.Instrument or class_ is trio.socket.SocketType: continue # Enums have their own metaclass, so we can't use our metaclasses. # And I don't think there's a lot of risk from people subclassing diff --git a/trio/_tests/test_fakenet.py b/trio/_tests/test_fakenet.py index bc691c9db5..d250a105a3 100644 --- a/trio/_tests/test_fakenet.py +++ b/trio/_tests/test_fakenet.py @@ -1,16 +1,18 @@ +import errno + import pytest import trio from trio.testing._fake_net import FakeNet -def fn(): +def fn() -> FakeNet: fn = FakeNet() fn.enable() return fn -async def test_basic_udp(): +async def test_basic_udp() -> None: fn() s1 = trio.socket.socket(type=trio.socket.SOCK_DGRAM) s2 = trio.socket.socket(type=trio.socket.SOCK_DGRAM) @@ -19,6 +21,11 @@ async def test_basic_udp(): ip, port = s1.getsockname() assert ip == "127.0.0.1" assert port != 0 + + with pytest.raises(OSError) as exc: # Cannot rebind. + await s1.bind(("192.0.2.1", 0)) + assert exc.value.errno == errno.EINVAL + await s2.sendto(b"xyz", s1.getsockname()) data, addr = await s1.recvfrom(10) assert data == b"xyz" @@ -29,7 +36,7 @@ async def test_basic_udp(): assert addr == s1.getsockname() -async def test_msg_trunc(): +async def test_msg_trunc() -> None: fn() s1 = trio.socket.socket(type=trio.socket.SOCK_DGRAM) s2 = trio.socket.socket(type=trio.socket.SOCK_DGRAM) @@ -38,7 +45,7 @@ async def test_msg_trunc(): data, addr = await s1.recvfrom(10) -async def test_basic_tcp(): +async def test_basic_tcp() -> None: fn() with pytest.raises(NotImplementedError): trio.socket.socket() diff --git a/trio/_tests/test_highlevel_serve_listeners.py b/trio/_tests/test_highlevel_serve_listeners.py index 4385263899..65804f4222 100644 --- a/trio/_tests/test_highlevel_serve_listeners.py +++ b/trio/_tests/test_highlevel_serve_listeners.py @@ -12,7 +12,9 @@ class MemoryListener(trio.abc.Listener): closed = attr.ib(default=False) accepted_streams = attr.ib(factory=list) - queued_streams = attr.ib(factory=(lambda: trio.open_memory_channel(1))) + queued_streams = attr.ib( + factory=(lambda: trio.open_memory_channel[trio.StapledStream](1)) + ) accept_hook = attr.ib(default=None) async def connect(self): diff --git a/trio/_tests/test_socket.py b/trio/_tests/test_socket.py index e9baff436a..f01b4fde14 100644 --- a/trio/_tests/test_socket.py +++ b/trio/_tests/test_socket.py @@ -360,7 +360,7 @@ async def test_SocketType_basics(): sock.close() -async def test_SocketType_setsockopt(): +async def test_SocketType_setsockopt() -> None: sock = tsocket.socket() with sock as _: # specifying optlen. Not supported on pypy, and I couldn't find diff --git a/trio/_tests/test_subprocess.py b/trio/_tests/test_subprocess.py index 4dfaef4c7f..7986dfd71e 100644 --- a/trio/_tests/test_subprocess.py +++ b/trio/_tests/test_subprocess.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import random import signal @@ -6,6 +8,7 @@ from contextlib import asynccontextmanager from functools import partial from pathlib import Path as SyncPath +from typing import TYPE_CHECKING import pytest @@ -24,8 +27,15 @@ from ..lowlevel import open_process from ..testing import assert_no_checkpoints, wait_all_tasks_blocked +if TYPE_CHECKING: + ... + from signal import Signals + posix = os.name == "posix" -if posix: +SIGKILL: Signals | None +SIGTERM: Signals | None +SIGUSR1: Signals | None +if (not TYPE_CHECKING and posix) or sys.platform != "win32": from signal import SIGKILL, SIGTERM, SIGUSR1 else: SIGKILL, SIGTERM, SIGUSR1 = None, None, None @@ -574,7 +584,7 @@ async def test_for_leaking_fds(): async def test_subprocess_pidfd_unnotified(): noticed_exit = None - async def wait_and_tell(proc) -> None: + async def wait_and_tell(proc: Process) -> None: nonlocal noticed_exit noticed_exit = Event() await proc.wait() diff --git a/trio/_tests/test_threads.py b/trio/_tests/test_threads.py index 21eb7b12e8..a2988ed0ff 100644 --- a/trio/_tests/test_threads.py +++ b/trio/_tests/test_threads.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import contextvars import queue as stdlib_queue import re @@ -170,7 +172,7 @@ async def main(): async def test_named_thread(): ending = " from trio._tests.test_threads.test_named_thread" - def inner(name="inner" + ending) -> threading.Thread: + def inner(name: str = "inner" + ending) -> threading.Thread: assert threading.current_thread().name == name return threading.current_thread() @@ -185,7 +187,7 @@ def f(name: str) -> Callable[[None], threading.Thread]: await to_thread_run_sync(f("None" + ending)) # test that you can set a custom name, and that it's reset afterwards - async def test_thread_name(name: str): + async def test_thread_name(name: str) -> None: thread = await to_thread_run_sync(f(name), thread_name=name) assert re.match("Trio thread [0-9]*", thread.name) @@ -235,7 +237,7 @@ def _get_thread_name(ident: Optional[int] = None) -> Optional[str]: # and most mac machines. So unless the platform is linux it will just skip # in case it fails to fetch the os thread name. async def test_named_thread_os(): - def inner(name) -> threading.Thread: + def inner(name: str) -> threading.Thread: os_thread_name = _get_thread_name() if os_thread_name is None and sys.platform != "linux": pytest.skip(f"no pthread OS support on {sys.platform}") @@ -253,7 +255,7 @@ def f(name: str) -> Callable[[None], threading.Thread]: await to_thread_run_sync(f(default), thread_name=None) # test that you can set a custom name, and that it's reset afterwards - async def test_thread_name(name: str, expected: Optional[str] = None): + async def test_thread_name(name: str, expected: Optional[str] = None) -> None: if expected is None: expected = name thread = await to_thread_run_sync(f(expected), thread_name=name) @@ -584,7 +586,9 @@ async def async_fn(): # pragma: no cover await to_thread_run_sync(async_fn) -trio_test_contextvar = contextvars.ContextVar("trio_test_contextvar") +trio_test_contextvar: contextvars.ContextVar[str] = contextvars.ContextVar( + "trio_test_contextvar" +) async def test_trio_to_thread_run_sync_contextvars(): diff --git a/trio/_tests/test_tracing.py b/trio/_tests/test_tracing.py index 07d1ff7609..e5110eaff3 100644 --- a/trio/_tests/test_tracing.py +++ b/trio/_tests/test_tracing.py @@ -1,26 +1,26 @@ import trio -async def coro1(event: trio.Event): +async def coro1(event: trio.Event) -> None: event.set() await trio.sleep_forever() -async def coro2(event: trio.Event): +async def coro2(event: trio.Event) -> None: await coro1(event) -async def coro3(event: trio.Event): +async def coro3(event: trio.Event) -> None: await coro2(event) -async def coro2_async_gen(event: trio.Event): +async def coro2_async_gen(event): yield await trio.lowlevel.checkpoint() yield await coro1(event) yield await trio.lowlevel.checkpoint() -async def coro3_async_gen(event: trio.Event): +async def coro3_async_gen(event: trio.Event) -> None: async for x in coro2_async_gen(event): pass diff --git a/trio/_tests/test_unix_pipes.py b/trio/_tests/test_unix_pipes.py index acee75aafb..0b0d2ceb23 100644 --- a/trio/_tests/test_unix_pipes.py +++ b/trio/_tests/test_unix_pipes.py @@ -1,7 +1,10 @@ +from __future__ import annotations + import errno import os import select import sys +from typing import TYPE_CHECKING import pytest @@ -11,6 +14,9 @@ posix = os.name == "posix" pytestmark = pytest.mark.skipif(not posix, reason="posix only") + +assert not TYPE_CHECKING or sys.platform == "unix" + if posix: from .._unix_pipes import FdStream else: @@ -19,7 +25,7 @@ # Have to use quoted types so import doesn't crash on windows -async def make_pipe() -> "Tuple[FdStream, FdStream]": +async def make_pipe() -> "tuple[FdStream, FdStream]": """Makes a new pair of pipes.""" (r, w) = os.pipe() return FdStream(w), FdStream(r) diff --git a/trio/_tools/gen_exports.py b/trio/_tools/gen_exports.py index 9d78cd5bd7..3c598e8eae 100755 --- a/trio/_tools/gen_exports.py +++ b/trio/_tools/gen_exports.py @@ -158,13 +158,6 @@ def gen_public_wrappers_source(file: File) -> str: if is_cm: # pragma: no cover func = func.replace("->Iterator", "->ContextManager") - # TODO: hacky workaround until we run mypy without `-m`, which breaks imports - # enough that it cannot figure out the type of _NO_SEND - if file.path.stem == "_run" and func.startswith( - "def reschedule" - ): # pragma: no cover - func = func.replace("None:\n", "None: # type: ignore[has-type]\n") - # Create export function body template = TEMPLATE.format( " await " if isinstance(method, ast.AsyncFunctionDef) else " ", diff --git a/trio/_unix_pipes.py b/trio/_unix_pipes.py index 716550790e..1a389e12dd 100644 --- a/trio/_unix_pipes.py +++ b/trio/_unix_pipes.py @@ -2,6 +2,7 @@ import errno import os +import sys from typing import TYPE_CHECKING import trio @@ -12,6 +13,8 @@ if TYPE_CHECKING: from typing import Final as FinalType +assert not TYPE_CHECKING or sys.platform != "win32" + if os.name != "posix": # We raise an error here rather than gating the import in lowlevel.py # in order to keep jedi static analysis happy. diff --git a/trio/py.typed b/trio/py.typed new file mode 100644 index 0000000000..e69de29bb2 diff --git a/trio/testing/_fake_net.py b/trio/testing/_fake_net.py index b3bdfd85c0..ddf46174f3 100644 --- a/trio/testing/_fake_net.py +++ b/trio/testing/_fake_net.py @@ -19,6 +19,7 @@ from trio._util import Final, NoPublicConstructor if TYPE_CHECKING: + from socket import AddressFamily, SocketKind from types import TracebackType IPAddress = Union[ipaddress.IPv4Address, ipaddress.IPv6Address] @@ -104,7 +105,7 @@ def reply(self, payload): class FakeSocketFactory(trio.abc.SocketFactory): fake_net: "FakeNet" - def socket(self, family: int, type: int, proto: int) -> "FakeSocket": + def socket(self, family: int, type: int, proto: int) -> FakeSocket: # type: ignore[override] return FakeSocket._create(self.fake_net, family, type, proto) @@ -113,22 +114,38 @@ class FakeHostnameResolver(trio.abc.HostnameResolver): fake_net: "FakeNet" async def getaddrinfo( - self, host: str, port: Union[int, str], family=0, type=0, proto=0, flags=0 - ): + self, + host: bytes | str | None, + port: bytes | str | int | None, + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] + ]: raise NotImplementedError("FakeNet doesn't do fake DNS yet") - async def getnameinfo(self, sockaddr, flags: int): + async def getnameinfo( + self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int + ) -> tuple[str, str]: raise NotImplementedError("FakeNet doesn't do fake DNS yet") class FakeNet(metaclass=Final): - def __init__(self): + def __init__(self) -> None: # When we need to pick an arbitrary unique ip address/port, use these: self._auto_ipv4_iter = ipaddress.IPv4Network("1.0.0.0/8").hosts() - self._auto_ipv4_iter = ipaddress.IPv6Network("1::/16").hosts() + self._auto_ipv4_iter = ipaddress.IPv6Network("1::/16").hosts() # type: ignore[assignment] self._auto_port_iter = iter(range(50000, 65535)) - self._bound: Dict[UDPBinding, FakeSocket] = {} + self._bound: dict[UDPBinding, FakeSocket] = {} self.route_packet = None @@ -176,9 +193,9 @@ def __init__(self, fake_net: FakeNet, family: int, type: int, proto: int): self._closed = False - self._packet_sender, self._packet_receiver = trio.open_memory_channel( - float("inf") - ) + self._packet_sender, self._packet_receiver = trio.open_memory_channel[ + UDPPacket + ](float("inf")) # This is the source-of-truth for what port etc. this socket is bound to self._binding: Optional[UDPBinding] = None @@ -206,7 +223,7 @@ async def _resolve_address_nocp(self, address, *, local): local=local, ) - def _deliver_packet(self, packet: UDPPacket): + def _deliver_packet(self, packet: UDPPacket) -> None: try: self._packet_sender.send_nowait(packet) except trio.BrokenResourceError: @@ -220,7 +237,7 @@ def _deliver_packet(self, packet: UDPPacket): async def bind(self, addr): self._check_closed() if self._binding is not None: - _fake_error(errno.EINVAL) + _fake_err(errno.EINVAL) await trio.lowlevel.checkpoint() ip_str, port = await self._resolve_address_nocp(addr, local=True) ip = ipaddress.ip_address(ip_str) From 5c85e6733e2b6ea8de31aa5e14f00152c02f1060 Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Sat, 19 Aug 2023 01:53:58 +1000 Subject: [PATCH 138/162] Add types to most of `trio.testing` (#2747) * Add types to trio.testing._trio_test * Add types to trio.testing._check_streams * Add types to trio.testing._checkpoints * Add types to trio.testing._network * Add types to trio.testing._memory_streams * Introduce some type aliases to make code more readable * Accept bytearray/memoryview in functions where applicable * Expand the type aliases in trio.testing for docs * fix return type of trio/_core/_run to not be an awaitable, remove type: ignore * Make StapledStream generic, to preserve the type of the component streams * remove testing/* files from the ignorelist in pyproject.toml for mypy --------- Co-authored-by: jakkdl Co-authored-by: John Litborn <11260241+jakkdl@users.noreply.github.com> --- pyproject.toml | 7 -- trio/_core/_run.py | 2 +- trio/_highlevel_generic.py | 18 ++-- trio/_subprocess.py | 2 +- trio/_tests/verify_types.json | 42 ++-------- trio/testing/_check_streams.py | 116 ++++++++++++++++---------- trio/testing/_checkpoints.py | 12 ++- trio/testing/_memory_streams.py | 140 ++++++++++++++++++++------------ trio/testing/_network.py | 6 +- trio/testing/_trio_test.py | 40 ++++++--- 10 files changed, 221 insertions(+), 164 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a212393452..6893927337 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,12 +50,6 @@ disallow_untyped_calls = false # files not yet fully typed [[tool.mypy.overrides]] module = [ -# 2747 -"trio/testing/_network", -"trio/testing/_trio_test", -"trio/testing/_checkpoints", -"trio/testing/_check_streams", -"trio/testing/_memory_streams", # 2745 "trio/_ssl", # 2756 @@ -70,7 +64,6 @@ module = [ "trio/_core/_generated_io_windows", "trio/_core/_io_windows", - "trio/_signals", # internal diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 1ba88da85e..3a4751254f 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -2152,7 +2152,7 @@ def setup_runner( def run( - async_fn: Callable[..., RetT], + async_fn: Callable[..., Awaitable[RetT]], *args: object, clock: Clock | None = None, instruments: Sequence[Instrument] = (), diff --git a/trio/_highlevel_generic.py b/trio/_highlevel_generic.py index e1ac378c6a..4269f90bae 100644 --- a/trio/_highlevel_generic.py +++ b/trio/_highlevel_generic.py @@ -1,16 +1,16 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import Generic, TypeVar import attr import trio from trio._util import Final -if TYPE_CHECKING: - from .abc import SendStream, ReceiveStream, AsyncResource +from .abc import AsyncResource, HalfCloseableStream, ReceiveStream, SendStream -from .abc import HalfCloseableStream +SendStreamT = TypeVar("SendStreamT", bound=SendStream) +ReceiveStreamT = TypeVar("ReceiveStreamT", bound=ReceiveStream) async def aclose_forcefully(resource: AsyncResource) -> None: @@ -44,7 +44,11 @@ async def aclose_forcefully(resource: AsyncResource) -> None: @attr.s(eq=False, hash=False) -class StapledStream(HalfCloseableStream, metaclass=Final): +class StapledStream( + HalfCloseableStream, + Generic[SendStreamT, ReceiveStreamT], + metaclass=Final, +): """This class `staples `__ together two unidirectional streams to make single bidirectional stream. @@ -79,8 +83,8 @@ class StapledStream(HalfCloseableStream, metaclass=Final): """ - send_stream: SendStream = attr.ib() - receive_stream: ReceiveStream = attr.ib() + send_stream: SendStreamT = attr.ib() + receive_stream: ReceiveStreamT = attr.ib() async def send_all(self, data: bytes | bytearray | memoryview) -> None: """Calls ``self.send_stream.send_all``.""" diff --git a/trio/_subprocess.py b/trio/_subprocess.py index 7cf990fa53..978f7e6188 100644 --- a/trio/_subprocess.py +++ b/trio/_subprocess.py @@ -149,7 +149,7 @@ def __init__( self.stdout = stdout self.stderr = stderr - self.stdio: StapledStream | None = None + self.stdio: StapledStream[SendStream, ReceiveStream] | None = None if self.stdin is not None and self.stdout is not None: self.stdio = StapledStream(self.stdin, self.stdout) diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index c5e9c4dc66..b61b28a428 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,16 +7,16 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.964968152866242, + "completenessScore": 0.9872611464968153, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 606, - "withUnknownType": 22 + "withKnownType": 620, + "withUnknownType": 8 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, "missingDefaultParamCount": 0, - "missingFunctionDocStringCount": 4, + "missingFunctionDocStringCount": 3, "moduleName": "trio", "modules": [ { @@ -46,8 +46,8 @@ ], "otherSymbolCounts": { "withAmbiguousType": 1, - "withKnownType": 642, - "withUnknownType": 39 + "withKnownType": 662, + "withUnknownType": 19 }, "packageName": "trio", "symbols": [ @@ -76,36 +76,6 @@ "trio.open_unix_socket", "trio.serve_listeners", "trio.serve_ssl_over_tcp", - "trio.testing._memory_streams.MemoryReceiveStream.__init__", - "trio.testing._memory_streams.MemoryReceiveStream.aclose", - "trio.testing._memory_streams.MemoryReceiveStream.close", - "trio.testing._memory_streams.MemoryReceiveStream.close_hook", - "trio.testing._memory_streams.MemoryReceiveStream.put_data", - "trio.testing._memory_streams.MemoryReceiveStream.put_eof", - "trio.testing._memory_streams.MemoryReceiveStream.receive_some", - "trio.testing._memory_streams.MemoryReceiveStream.receive_some_hook", - "trio.testing._memory_streams.MemorySendStream.__init__", - "trio.testing._memory_streams.MemorySendStream.aclose", - "trio.testing._memory_streams.MemorySendStream.close", - "trio.testing._memory_streams.MemorySendStream.close_hook", - "trio.testing._memory_streams.MemorySendStream.get_data", - "trio.testing._memory_streams.MemorySendStream.get_data_nowait", - "trio.testing._memory_streams.MemorySendStream.send_all", - "trio.testing._memory_streams.MemorySendStream.send_all_hook", - "trio.testing._memory_streams.MemorySendStream.wait_send_all_might_not_block", - "trio.testing._memory_streams.MemorySendStream.wait_send_all_might_not_block_hook", - "trio.testing.assert_checkpoints", - "trio.testing.assert_no_checkpoints", - "trio.testing.check_half_closeable_stream", - "trio.testing.check_one_way_stream", - "trio.testing.check_two_way_stream", - "trio.testing.lockstep_stream_one_way_pair", - "trio.testing.lockstep_stream_pair", - "trio.testing.memory_stream_one_way_pair", - "trio.testing.memory_stream_pair", - "trio.testing.memory_stream_pump", - "trio.testing.open_stream_to_socket_listener", - "trio.testing.trio_test", "trio.tests.TestsDeprecationWrapper" ] } diff --git a/trio/testing/_check_streams.py b/trio/testing/_check_streams.py index 401b8ef0c2..33947ccc55 100644 --- a/trio/testing/_check_streams.py +++ b/trio/testing/_check_streams.py @@ -2,24 +2,33 @@ from __future__ import annotations import random +from collections.abc import Generator from contextlib import contextmanager -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Awaitable, Callable, Generic, Tuple, TypeVar -from .. import _core -from .._abc import HalfCloseableStream, ReceiveStream, SendStream, Stream +from .. import CancelScope, _core +from .._abc import AsyncResource, HalfCloseableStream, ReceiveStream, SendStream, Stream from .._highlevel_generic import aclose_forcefully from ._checkpoints import assert_checkpoints if TYPE_CHECKING: from types import TracebackType + from typing_extensions import ParamSpec, TypeAlias -class _ForceCloseBoth: - def __init__(self, both): - self._both = list(both) + ArgsT = ParamSpec("ArgsT") - async def __aenter__(self): - return self._both +Res1 = TypeVar("Res1", bound=AsyncResource) +Res2 = TypeVar("Res2", bound=AsyncResource) +StreamMaker: TypeAlias = Callable[[], Awaitable[Tuple[Res1, Res2]]] + + +class _ForceCloseBoth(Generic[Res1, Res2]): + def __init__(self, both: tuple[Res1, Res2]) -> None: + self._first, self._second = both + + async def __aenter__(self) -> tuple[Res1, Res2]: + return self._first, self._second async def __aexit__( self, @@ -28,13 +37,13 @@ async def __aexit__( traceback: TracebackType | None, ) -> None: try: - await aclose_forcefully(self._both[0]) + await aclose_forcefully(self._first) finally: - await aclose_forcefully(self._both[1]) + await aclose_forcefully(self._second) @contextmanager -def _assert_raises(exc): +def _assert_raises(exc: type[BaseException]) -> Generator[None, None, None]: __tracebackhide__ = True try: yield @@ -44,7 +53,10 @@ def _assert_raises(exc): raise AssertionError(f"expected exception: {exc}") -async def check_one_way_stream(stream_maker, clogged_stream_maker): +async def check_one_way_stream( + stream_maker: StreamMaker[SendStream, ReceiveStream], + clogged_stream_maker: StreamMaker[SendStream, ReceiveStream] | None, +) -> None: """Perform a number of generic tests on a custom one-way stream implementation. @@ -67,18 +79,18 @@ async def check_one_way_stream(stream_maker, clogged_stream_maker): assert isinstance(s, SendStream) assert isinstance(r, ReceiveStream) - async def do_send_all(data): - with assert_checkpoints(): - assert await s.send_all(data) is None + async def do_send_all(data: bytes | bytearray | memoryview) -> None: + with assert_checkpoints(): # We're testing that it doesn't return anything. + assert await s.send_all(data) is None # type: ignore[func-returns-value] - async def do_receive_some(*args): + async def do_receive_some(max_bytes: int | None = None) -> bytes | bytearray: with assert_checkpoints(): - return await r.receive_some(*args) + return await r.receive_some(max_bytes) - async def checked_receive_1(expected): + async def checked_receive_1(expected: bytes) -> None: assert await do_receive_some(1) == expected - async def do_aclose(resource): + async def do_aclose(resource: AsyncResource) -> None: with assert_checkpoints(): await resource.aclose() @@ -87,7 +99,7 @@ async def do_aclose(resource): nursery.start_soon(do_send_all, b"x") nursery.start_soon(checked_receive_1, b"x") - async def send_empty_then_y(): + async def send_empty_then_y() -> None: # Streams should tolerate sending b"" without giving it any # special meaning. await do_send_all(b"") @@ -114,7 +126,7 @@ async def send_empty_then_y(): with _assert_raises(ValueError): await r.receive_some(0) with _assert_raises(TypeError): - await r.receive_some(1.5) + await r.receive_some(1.5) # type: ignore[arg-type] # it can also be missing or None async with _core.open_nursery() as nursery: nursery.start_soon(do_send_all, b"x") @@ -133,7 +145,9 @@ async def send_empty_then_y(): # for send_all to wait until receive_some is called to run, though; a # stream doesn't *have* to have any internal buffering. That's why we # start a concurrent receive_some call, then cancel it.) - async def simple_check_wait_send_all_might_not_block(scope): + async def simple_check_wait_send_all_might_not_block( + scope: CancelScope, + ) -> None: with assert_checkpoints(): await s.wait_send_all_might_not_block() scope.cancel() @@ -146,7 +160,7 @@ async def simple_check_wait_send_all_might_not_block(scope): # closing the r side leads to BrokenResourceError on the s side # (eventually) - async def expect_broken_stream_on_send(): + async def expect_broken_stream_on_send() -> None: with _assert_raises(_core.BrokenResourceError): while True: await do_send_all(b"x" * 100) @@ -189,11 +203,11 @@ async def expect_broken_stream_on_send(): async with _ForceCloseBoth(await stream_maker()) as (s, r): # if send-then-graceful-close, receiver gets data then b"" - async def send_then_close(): + async def send_then_close() -> None: await do_send_all(b"y") await do_aclose(s) - async def receive_send_then_close(): + async def receive_send_then_close() -> None: # We want to make sure that if the sender closes the stream before # we read anything, then we still get all the data. But some # streams might block on the do_send_all call. So we let the @@ -258,9 +272,13 @@ async def receive_send_then_close(): # https://github.com/python-trio/trio/issues/77 async with _ForceCloseBoth(await stream_maker()) as (s, r): - async def expect_cancelled(afn, *args): + async def expect_cancelled( + afn: Callable[ArgsT, Awaitable[object]], + *args: ArgsT.args, + **kwargs: ArgsT.kwargs, + ) -> None: with _assert_raises(_core.Cancelled): - await afn(*args) + await afn(*args, **kwargs) with _core.CancelScope() as scope: scope.cancel() @@ -288,16 +306,16 @@ async def receive_expecting_closed(): # check wait_send_all_might_not_block, if we can if clogged_stream_maker is not None: async with _ForceCloseBoth(await clogged_stream_maker()) as (s, r): - record = [] + record: list[str] = [] - async def waiter(cancel_scope): + async def waiter(cancel_scope: CancelScope) -> None: record.append("waiter sleeping") with assert_checkpoints(): await s.wait_send_all_might_not_block() record.append("waiter wokeup") cancel_scope.cancel() - async def receiver(): + async def receiver() -> None: # give wait_send_all_might_not_block a chance to block await _core.wait_all_tasks_blocked() record.append("receiver starting") @@ -343,14 +361,14 @@ async def receiver(): # with or without an exception async with _ForceCloseBoth(await clogged_stream_maker()) as (s, r): - async def sender(): + async def sender() -> None: try: with assert_checkpoints(): await s.wait_send_all_might_not_block() except _core.BrokenResourceError: # pragma: no cover pass - async def receiver(): + async def receiver() -> None: await _core.wait_all_tasks_blocked() await aclose_forcefully(r) @@ -369,7 +387,7 @@ async def receiver(): # Check that if a task is blocked in a send-side method, then closing # the send stream causes it to wake up. - async def close_soon(s): + async def close_soon(s: SendStream) -> None: await _core.wait_all_tasks_blocked() await aclose_forcefully(s) @@ -386,7 +404,10 @@ async def close_soon(s): await s.wait_send_all_might_not_block() -async def check_two_way_stream(stream_maker, clogged_stream_maker): +async def check_two_way_stream( + stream_maker: StreamMaker[Stream, Stream], + clogged_stream_maker: StreamMaker[Stream, Stream] | None, +) -> None: """Perform a number of generic tests on a custom two-way stream implementation. @@ -401,13 +422,15 @@ async def check_two_way_stream(stream_maker, clogged_stream_maker): """ await check_one_way_stream(stream_maker, clogged_stream_maker) - async def flipped_stream_maker(): - return reversed(await stream_maker()) + async def flipped_stream_maker() -> tuple[Stream, Stream]: + return (await stream_maker())[::-1] + + flipped_clogged_stream_maker: Callable[[], Awaitable[tuple[Stream, Stream]]] | None if clogged_stream_maker is not None: - async def flipped_clogged_stream_maker(): - return reversed(await clogged_stream_maker()) + async def flipped_clogged_stream_maker() -> tuple[Stream, Stream]: + return (await clogged_stream_maker())[::-1] else: flipped_clogged_stream_maker = None @@ -425,7 +448,9 @@ async def flipped_clogged_stream_maker(): i = r.getrandbits(8 * DUPLEX_TEST_SIZE) test_data = i.to_bytes(DUPLEX_TEST_SIZE, "little") - async def sender(s, data, seed): + async def sender( + s: Stream, data: bytes | bytearray | memoryview, seed: int + ) -> None: r = random.Random(seed) m = memoryview(data) while m: @@ -433,7 +458,7 @@ async def sender(s, data, seed): await s.send_all(m[:chunk_size]) m = m[chunk_size:] - async def receiver(s, data, seed): + async def receiver(s: Stream, data: bytes | bytearray, seed: int) -> None: r = random.Random(seed) got = bytearray() while len(got) < len(data): @@ -448,7 +473,7 @@ async def receiver(s, data, seed): nursery.start_soon(receiver, s1, test_data[::-1], 2) nursery.start_soon(receiver, s2, test_data, 3) - async def expect_receive_some_empty(): + async def expect_receive_some_empty() -> None: assert await s2.receive_some(10) == b"" await s2.aclose() @@ -457,7 +482,10 @@ async def expect_receive_some_empty(): nursery.start_soon(s1.aclose) -async def check_half_closeable_stream(stream_maker, clogged_stream_maker): +async def check_half_closeable_stream( + stream_maker: StreamMaker[HalfCloseableStream, HalfCloseableStream], + clogged_stream_maker: StreamMaker[HalfCloseableStream, HalfCloseableStream] | None, +) -> None: """Perform a number of generic tests on a custom half-closeable stream implementation. @@ -476,12 +504,12 @@ async def check_half_closeable_stream(stream_maker, clogged_stream_maker): assert isinstance(s1, HalfCloseableStream) assert isinstance(s2, HalfCloseableStream) - async def send_x_then_eof(s): + async def send_x_then_eof(s: HalfCloseableStream) -> None: await s.send_all(b"x") with assert_checkpoints(): await s.send_eof() - async def expect_x_then_eof(r): + async def expect_x_then_eof(r: HalfCloseableStream) -> None: await _core.wait_all_tasks_blocked() assert await r.receive_some(10) == b"x" assert await r.receive_some(10) == b"" diff --git a/trio/testing/_checkpoints.py b/trio/testing/_checkpoints.py index 5804295300..4a4047813b 100644 --- a/trio/testing/_checkpoints.py +++ b/trio/testing/_checkpoints.py @@ -1,10 +1,14 @@ -from contextlib import contextmanager +from __future__ import annotations + +from collections.abc import Generator +from contextlib import AbstractContextManager, contextmanager from .. import _core @contextmanager -def _assert_yields_or_not(expected): +def _assert_yields_or_not(expected: bool) -> Generator[None, None, None]: + """Check if checkpoints are executed in a block of code.""" __tracebackhide__ = True task = _core.current_task() orig_cancel = task._cancel_points @@ -22,7 +26,7 @@ def _assert_yields_or_not(expected): raise AssertionError("assert_no_checkpoints block yielded!") -def assert_checkpoints(): +def assert_checkpoints() -> AbstractContextManager[None]: """Use as a context manager to check that the code inside the ``with`` block either exits with an exception or executes at least one :ref:`checkpoint `. @@ -42,7 +46,7 @@ def assert_checkpoints(): return _assert_yields_or_not(True) -def assert_no_checkpoints(): +def assert_no_checkpoints() -> AbstractContextManager[None]: """Use as a context manager to check that the code inside the ``with`` block does not execute any :ref:`checkpoints `. diff --git a/trio/testing/_memory_streams.py b/trio/testing/_memory_streams.py index 38e8e54de8..fc23fae842 100644 --- a/trio/testing/_memory_streams.py +++ b/trio/testing/_memory_streams.py @@ -1,16 +1,30 @@ +from __future__ import annotations + import operator +from typing import TYPE_CHECKING, Awaitable, Callable, TypeVar from .. import _core, _util from .._highlevel_generic import StapledStream from ..abc import ReceiveStream, SendStream +if TYPE_CHECKING: + from typing_extensions import TypeAlias + + +AsyncHook: TypeAlias = Callable[[], Awaitable[object]] +# Would be nice to exclude awaitable here, but currently not possible. +SyncHook: TypeAlias = Callable[[], object] +SendStreamT = TypeVar("SendStreamT", bound=SendStream) +ReceiveStreamT = TypeVar("ReceiveStreamT", bound=ReceiveStream) + + ################################################################ # In-memory streams - Unbounded buffer version ################################################################ class _UnboundedByteQueue: - def __init__(self): + def __init__(self) -> None: self._data = bytearray() self._closed = False self._lot = _core.ParkingLot() @@ -22,28 +36,28 @@ def __init__(self): # channel: so after close(), calling put() raises ClosedResourceError, and # calling the get() variants drains the buffer and then returns an empty # bytearray. - def close(self): + def close(self) -> None: self._closed = True self._lot.unpark_all() - def close_and_wipe(self): + def close_and_wipe(self) -> None: self._data = bytearray() self.close() - def put(self, data): + def put(self, data: bytes | bytearray | memoryview) -> None: if self._closed: raise _core.ClosedResourceError("virtual connection closed") self._data += data self._lot.unpark_all() - def _check_max_bytes(self, max_bytes): + def _check_max_bytes(self, max_bytes: int | None) -> None: if max_bytes is None: return max_bytes = operator.index(max_bytes) if max_bytes < 1: raise ValueError("max_bytes must be >= 1") - def _get_impl(self, max_bytes): + def _get_impl(self, max_bytes: int | None) -> bytearray: assert self._closed or self._data if max_bytes is None: max_bytes = len(self._data) @@ -55,14 +69,14 @@ def _get_impl(self, max_bytes): else: return bytearray() - def get_nowait(self, max_bytes=None): + def get_nowait(self, max_bytes: int | None = None) -> bytearray: with self._fetch_lock: self._check_max_bytes(max_bytes) if not self._closed and not self._data: raise _core.WouldBlock return self._get_impl(max_bytes) - async def get(self, max_bytes=None): + async def get(self, max_bytes: int | None = None) -> bytearray: with self._fetch_lock: self._check_max_bytes(max_bytes) if not self._closed and not self._data: @@ -95,9 +109,9 @@ class MemorySendStream(SendStream, metaclass=_util.Final): def __init__( self, - send_all_hook=None, - wait_send_all_might_not_block_hook=None, - close_hook=None, + send_all_hook: AsyncHook | None = None, + wait_send_all_might_not_block_hook: AsyncHook | None = None, + close_hook: SyncHook | None = None, ): self._conflict_detector = _util.ConflictDetector( "another task is using this stream" @@ -107,7 +121,7 @@ def __init__( self.wait_send_all_might_not_block_hook = wait_send_all_might_not_block_hook self.close_hook = close_hook - async def send_all(self, data): + async def send_all(self, data: bytes | bytearray | memoryview) -> None: """Places the given data into the object's internal buffer, and then calls the :attr:`send_all_hook` (if any). @@ -121,12 +135,12 @@ async def send_all(self, data): if self.send_all_hook is not None: await self.send_all_hook() - async def wait_send_all_might_not_block(self): + async def wait_send_all_might_not_block(self) -> None: """Calls the :attr:`wait_send_all_might_not_block_hook` (if any), and then returns immediately. """ - # Execute two checkpoints so we have more of a chance to detect + # Execute two checkpoints so that we have more of a chance to detect # buggy user code that calls this twice at the same time. with self._conflict_detector: await _core.checkpoint() @@ -136,7 +150,7 @@ async def wait_send_all_might_not_block(self): if self.wait_send_all_might_not_block_hook is not None: await self.wait_send_all_might_not_block_hook() - def close(self): + def close(self) -> None: """Marks this stream as closed, and then calls the :attr:`close_hook` (if any). @@ -153,12 +167,12 @@ def close(self): if self.close_hook is not None: self.close_hook() - async def aclose(self): + async def aclose(self) -> None: """Same as :meth:`close`, but async.""" self.close() await _core.checkpoint() - async def get_data(self, max_bytes=None): + async def get_data(self, max_bytes: int | None = None) -> bytearray: """Retrieves data from the internal buffer, blocking if necessary. Args: @@ -174,7 +188,7 @@ async def get_data(self, max_bytes=None): """ return await self._outgoing.get(max_bytes) - def get_data_nowait(self, max_bytes=None): + def get_data_nowait(self, max_bytes: int | None = None) -> bytearray: """Retrieves data from the internal buffer, but doesn't block. See :meth:`get_data` for details. @@ -203,7 +217,11 @@ class MemoryReceiveStream(ReceiveStream, metaclass=_util.Final): """ - def __init__(self, receive_some_hook=None, close_hook=None): + def __init__( + self, + receive_some_hook: AsyncHook | None = None, + close_hook: SyncHook | None = None, + ): self._conflict_detector = _util.ConflictDetector( "another task is using this stream" ) @@ -212,7 +230,7 @@ def __init__(self, receive_some_hook=None, close_hook=None): self.receive_some_hook = receive_some_hook self.close_hook = close_hook - async def receive_some(self, max_bytes=None): + async def receive_some(self, max_bytes: int | None = None) -> bytearray: """Calls the :attr:`receive_some_hook` (if any), and then retrieves data from the internal buffer, blocking if necessary. @@ -235,7 +253,7 @@ async def receive_some(self, max_bytes=None): raise _core.ClosedResourceError return data - def close(self): + def close(self) -> None: """Discards any pending data from the internal buffer, and marks this stream as closed. @@ -245,21 +263,26 @@ def close(self): if self.close_hook is not None: self.close_hook() - async def aclose(self): + async def aclose(self) -> None: """Same as :meth:`close`, but async.""" self.close() await _core.checkpoint() - def put_data(self, data): + def put_data(self, data: bytes | bytearray | memoryview) -> None: """Appends the given data to the internal buffer.""" self._incoming.put(data) - def put_eof(self): + def put_eof(self) -> None: """Adds an end-of-file marker to the internal buffer.""" self._incoming.close() -def memory_stream_pump(memory_send_stream, memory_receive_stream, *, max_bytes=None): +def memory_stream_pump( + memory_send_stream: MemorySendStream, + memory_receive_stream: MemoryReceiveStream, + *, + max_bytes: int | None = None, +) -> bool: """Take data out of the given :class:`MemorySendStream`'s internal buffer, and put it into the given :class:`MemoryReceiveStream`'s internal buffer. @@ -292,7 +315,7 @@ def memory_stream_pump(memory_send_stream, memory_receive_stream, *, max_bytes=N return True -def memory_stream_one_way_pair(): +def memory_stream_one_way_pair() -> tuple[MemorySendStream, MemoryReceiveStream]: """Create a connected, pure-Python, unidirectional stream with infinite buffering and flexible configuration options. @@ -319,10 +342,10 @@ def memory_stream_one_way_pair(): send_stream = MemorySendStream() recv_stream = MemoryReceiveStream() - def pump_from_send_stream_to_recv_stream(): + def pump_from_send_stream_to_recv_stream() -> None: memory_stream_pump(send_stream, recv_stream) - async def async_pump_from_send_stream_to_recv_stream(): + async def async_pump_from_send_stream_to_recv_stream() -> None: pump_from_send_stream_to_recv_stream() send_stream.send_all_hook = async_pump_from_send_stream_to_recv_stream @@ -330,7 +353,12 @@ async def async_pump_from_send_stream_to_recv_stream(): return send_stream, recv_stream -def _make_stapled_pair(one_way_pair): +def _make_stapled_pair( + one_way_pair: Callable[[], tuple[SendStreamT, ReceiveStreamT]] +) -> tuple[ + StapledStream[SendStreamT, ReceiveStreamT], + StapledStream[SendStreamT, ReceiveStreamT], +]: pipe1_send, pipe1_recv = one_way_pair() pipe2_send, pipe2_recv = one_way_pair() stream1 = StapledStream(pipe1_send, pipe2_recv) @@ -338,7 +366,12 @@ def _make_stapled_pair(one_way_pair): return stream1, stream2 -def memory_stream_pair(): +def memory_stream_pair() -> ( + tuple[ + StapledStream[MemorySendStream, MemoryReceiveStream], + StapledStream[MemorySendStream, MemoryReceiveStream], + ] +): """Create a connected, pure-Python, bidirectional stream with infinite buffering and flexible configuration options. @@ -421,7 +454,7 @@ async def receiver(): class _LockstepByteQueue: - def __init__(self): + def __init__(self) -> None: self._data = bytearray() self._sender_closed = False self._receiver_closed = False @@ -434,12 +467,12 @@ def __init__(self): "another task is already receiving" ) - def _something_happened(self): + def _something_happened(self) -> None: self._waiters.unpark_all() # Always wakes up when one side is closed, because everyone always reacts # to that. - async def _wait_for(self, fn): + async def _wait_for(self, fn: Callable[[], bool]) -> None: while True: if fn(): break @@ -448,15 +481,15 @@ async def _wait_for(self, fn): await self._waiters.park() await _core.checkpoint() - def close_sender(self): + def close_sender(self) -> None: self._sender_closed = True self._something_happened() - def close_receiver(self): + def close_receiver(self) -> None: self._receiver_closed = True self._something_happened() - async def send_all(self, data): + async def send_all(self, data: bytes | bytearray | memoryview) -> None: with self._send_conflict_detector: if self._sender_closed: raise _core.ClosedResourceError @@ -465,13 +498,13 @@ async def send_all(self, data): assert not self._data self._data += data self._something_happened() - await self._wait_for(lambda: not self._data) + await self._wait_for(lambda: self._data == b"") if self._sender_closed: raise _core.ClosedResourceError if self._data and self._receiver_closed: raise _core.BrokenResourceError - async def wait_send_all_might_not_block(self): + async def wait_send_all_might_not_block(self) -> None: with self._send_conflict_detector: if self._sender_closed: raise _core.ClosedResourceError @@ -482,7 +515,7 @@ async def wait_send_all_might_not_block(self): if self._sender_closed: raise _core.ClosedResourceError - async def receive_some(self, max_bytes=None): + async def receive_some(self, max_bytes: int | None = None) -> bytes | bytearray: with self._receive_conflict_detector: # Argument validation if max_bytes is not None: @@ -496,7 +529,7 @@ async def receive_some(self, max_bytes=None): self._receiver_waiting = True self._something_happened() try: - await self._wait_for(lambda: self._data) + await self._wait_for(lambda: self._data != b"") finally: self._receiver_waiting = False if self._receiver_closed: @@ -515,39 +548,39 @@ async def receive_some(self, max_bytes=None): class _LockstepSendStream(SendStream): - def __init__(self, lbq): + def __init__(self, lbq: _LockstepByteQueue): self._lbq = lbq - def close(self): + def close(self) -> None: self._lbq.close_sender() - async def aclose(self): + async def aclose(self) -> None: self.close() await _core.checkpoint() - async def send_all(self, data): + async def send_all(self, data: bytes | bytearray | memoryview) -> None: await self._lbq.send_all(data) - async def wait_send_all_might_not_block(self): + async def wait_send_all_might_not_block(self) -> None: await self._lbq.wait_send_all_might_not_block() class _LockstepReceiveStream(ReceiveStream): - def __init__(self, lbq): + def __init__(self, lbq: _LockstepByteQueue): self._lbq = lbq - def close(self): + def close(self) -> None: self._lbq.close_receiver() - async def aclose(self): + async def aclose(self) -> None: self.close() await _core.checkpoint() - async def receive_some(self, max_bytes=None): + async def receive_some(self, max_bytes: int | None = None) -> bytes | bytearray: return await self._lbq.receive_some(max_bytes) -def lockstep_stream_one_way_pair(): +def lockstep_stream_one_way_pair() -> tuple[SendStream, ReceiveStream]: """Create a connected, pure Python, unidirectional stream where data flows in lockstep. @@ -574,7 +607,12 @@ def lockstep_stream_one_way_pair(): return _LockstepSendStream(lbq), _LockstepReceiveStream(lbq) -def lockstep_stream_pair(): +def lockstep_stream_pair() -> ( + tuple[ + StapledStream[SendStream, ReceiveStream], + StapledStream[SendStream, ReceiveStream], + ] +): """Create a connected, pure-Python, bidirectional stream where data flows in lockstep. diff --git a/trio/testing/_network.py b/trio/testing/_network.py index 615ce2effb..fddbbf0fdc 100644 --- a/trio/testing/_network.py +++ b/trio/testing/_network.py @@ -1,8 +1,10 @@ from .. import socket as tsocket -from .._highlevel_socket import SocketStream +from .._highlevel_socket import SocketListener, SocketStream -async def open_stream_to_socket_listener(socket_listener): +async def open_stream_to_socket_listener( + socket_listener: SocketListener, +) -> SocketStream: """Connect to the given :class:`~trio.SocketListener`. This is particularly useful in tests when you want to let a server pick diff --git a/trio/testing/_trio_test.py b/trio/testing/_trio_test.py index b4ef69ef09..5619352846 100644 --- a/trio/testing/_trio_test.py +++ b/trio/testing/_trio_test.py @@ -1,20 +1,36 @@ +from __future__ import annotations + +from collections.abc import Awaitable, Callable from functools import partial, wraps +from typing import TYPE_CHECKING, TypeVar from .. import _core from ..abc import Clock, Instrument +if TYPE_CHECKING: + from typing_extensions import ParamSpec + + ArgsT = ParamSpec("ArgsT") + + +RetT = TypeVar("RetT") + + +def trio_test(fn: Callable[ArgsT, Awaitable[RetT]]) -> Callable[ArgsT, RetT]: + """Converts an async test function to be synchronous, running via Trio. + + Usage:: + + @trio_test + async def test_whatever(): + await ... + + If a pytest fixture is passed in that subclasses the :class:`~trio.abc.Clock` or + :class:`~trio.abc.Instrument` ABCs, then those are passed to :meth:`trio.run()`. + """ -# Use: -# -# @trio_test -# async def test_whatever(): -# await ... -# -# Also: if a pytest fixture is passed in that subclasses the Clock abc, then -# that clock is passed to trio.run(). -def trio_test(fn): @wraps(fn) - def wrapper(**kwargs): + def wrapper(*args: ArgsT.args, **kwargs: ArgsT.kwargs) -> RetT: __tracebackhide__ = True clocks = [c for c in kwargs.values() if isinstance(c, Clock)] if not clocks: @@ -24,6 +40,8 @@ def wrapper(**kwargs): else: raise ValueError("too many clocks spoil the broth!") instruments = [i for i in kwargs.values() if isinstance(i, Instrument)] - return _core.run(partial(fn, **kwargs), clock=clock, instruments=instruments) + return _core.run( + partial(fn, *args, **kwargs), clock=clock, instruments=instruments + ) return wrapper From bf847ecad026ba6cfa859c9462cc4050ca4fb073 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Sat, 19 Aug 2023 21:06:14 -0500 Subject: [PATCH 139/162] Add pre-commit (#2744) * Add pre-commit * Remove python version specifier from black * Add codespell * No more skips * Run pre-commit on all files and fix issues * Revert docs changes and ignore docs formatting in the future * Disable black and isort autoformatting in CI --- .pre-commit-config.yaml | 39 +++++++++++++++++ docs/Makefile | 2 +- docs/source/local_customization.py | 12 +++--- notes-to-self/afd-lab.py | 18 +++++--- notes-to-self/aio-guest-test.py | 4 ++ notes-to-self/atomic-local.py | 2 + notes-to-self/blocking-read-hack.py | 8 +++- notes-to-self/estimate-task-size.py | 7 ++- notes-to-self/fbsd-pipe-close-notify.py | 3 +- notes-to-self/file-read-latency.py | 9 ++-- notes-to-self/graceful-shutdown-idea.py | 4 ++ .../how-does-windows-so-reuseaddr-work.py | 2 +- notes-to-self/loopy.py | 6 ++- notes-to-self/lots-of-tasks.py | 3 ++ notes-to-self/manual-signal-handler.py | 12 ++++-- notes-to-self/measure-listen-backlog.py | 2 + notes-to-self/ntp-example.py | 17 +++++--- notes-to-self/proxy-benchmarks.py | 43 +++++++++++++++---- notes-to-self/reopen-pipe.py | 2 +- notes-to-self/schedule-timing.py | 3 +- notes-to-self/socket-scaling.py | 6 ++- .../ssl-close-notify/ssl-close-notify.py | 6 +-- notes-to-self/ssl-close-notify/ssl2.py | 8 ++-- notes-to-self/ssl-handshake/ssl-handshake.py | 2 +- notes-to-self/sslobject.py | 2 +- notes-to-self/thread-dispatch-bench.py | 2 +- notes-to-self/time-wait.py | 2 +- notes-to-self/trace.py | 13 ++++-- notes-to-self/trivial-err.py | 9 +++- notes-to-self/trivial.py | 2 + notes-to-self/wakeup-fd-racer.py | 17 +++++--- notes-to-self/win-waitable-timer.py | 26 +++++------ pyproject.toml | 10 +++++ 33 files changed, 220 insertions(+), 83 deletions(-) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..f57321189e --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,39 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-toml + - id: check-merge-conflict + - id: mixed-line-ending + - id: check-case-conflict + - repo: https://github.com/psf/black + rev: 23.7.0 + hooks: + - id: black + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + - repo: https://github.com/pycqa/flake8 + rev: 6.1.0 + hooks: + - id: flake8 + additional_dependencies: + - "flake8-pyproject==1.2.3" + types: [file] + types_or: [python, pyi] + - repo: https://github.com/codespell-project/codespell + rev: v2.2.5 + hooks: + - id: codespell + +ci: + autofix_commit_msg: "[pre-commit.ci] auto fixes from pre-commit.com hooks" + autofix_prs: true + autoupdate_commit_msg: "[pre-commit.ci] pre-commit autoupdate" + autoupdate_schedule: weekly + skip: [black,isort] + submodules: false diff --git a/docs/Makefile b/docs/Makefile index 4fd0bb58f2..69095d6d90 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -17,4 +17,4 @@ help: # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/source/local_customization.py b/docs/source/local_customization.py index a970ad6e22..f071b6dfbb 100644 --- a/docs/source/local_customization.py +++ b/docs/source/local_customization.py @@ -1,11 +1,11 @@ -from docutils.parsers.rst import directives +from docutils.parsers.rst import directives as directives # noqa: F401 from sphinx import addnodes from sphinx.domains.python import PyClasslike -from sphinx.ext.autodoc import ( - FunctionDocumenter, - MethodDocumenter, - ClassLevelDocumenter, - Options, +from sphinx.ext.autodoc import ( # noqa: F401 + ClassLevelDocumenter as ClassLevelDocumenter, + FunctionDocumenter as FunctionDocumenter, + MethodDocumenter as MethodDocumenter, + Options as Options, ) """ diff --git a/notes-to-self/afd-lab.py b/notes-to-self/afd-lab.py index ed420dbdbd..600975482c 100644 --- a/notes-to-self/afd-lab.py +++ b/notes-to-self/afd-lab.py @@ -77,22 +77,27 @@ # matter, energy, and life which lie close at hand yet can never be detected # with the senses we have." -import sys import os.path +import sys + sys.path.insert(0, os.path.abspath(os.path.dirname(__file__) + r"\..")) import trio + print(trio.__file__) -import trio.testing import socket +import trio.testing +from trio._core._io_windows import _afd_helper_handle, _check, _get_base_socket from trio._core._windows_cffi import ( - ffi, kernel32, AFDPollFlags, IoControlCodes, ErrorCodes -) -from trio._core._io_windows import ( - _get_base_socket, _afd_helper_handle, _check + AFDPollFlags, + ErrorCodes, + IoControlCodes, + ffi, + kernel32, ) + class AFDLab: def __init__(self): self._afd = _afd_helper_handle() @@ -173,4 +178,5 @@ async def main(): await trio.sleep(2) nursery.cancel_scope.cancel() + trio.run(main) diff --git a/notes-to-self/aio-guest-test.py b/notes-to-self/aio-guest-test.py index b64a11bd04..17d4bfb9e0 100644 --- a/notes-to-self/aio-guest-test.py +++ b/notes-to-self/aio-guest-test.py @@ -1,10 +1,13 @@ import asyncio + import trio + async def aio_main(): loop = asyncio.get_running_loop() trio_done_fut = loop.create_future() + def trio_done_callback(main_outcome): print(f"trio_main finished: {main_outcome!r}") trio_done_fut.set_result(main_outcome) @@ -35,6 +38,7 @@ async def trio_main(): if n >= 10: return + async def aio_pingpong(from_trio, to_trio): print("aio_pingpong!") diff --git a/notes-to-self/atomic-local.py b/notes-to-self/atomic-local.py index 212c9eef00..429211eaf6 100644 --- a/notes-to-self/atomic-local.py +++ b/notes-to-self/atomic-local.py @@ -3,9 +3,11 @@ # Has to be a string :-( sentinel = "_unique_name" + def f(): print(locals()) + # code(argcount, kwonlyargcount, nlocals, stacksize, flags, codestring, # constants, names, varnames, filename, name, firstlineno, # lnotab[, freevars[, cellvars]]) diff --git a/notes-to-self/blocking-read-hack.py b/notes-to-self/blocking-read-hack.py index b301058e85..f4a73f876d 100644 --- a/notes-to-self/blocking-read-hack.py +++ b/notes-to-self/blocking-read-hack.py @@ -1,13 +1,16 @@ -import trio +import errno import os import socket -import errno + +import trio bad_socket = socket.socket() + class BlockingReadTimeoutError(Exception): pass + async def blocking_read_with_timeout(fd, count, timeout): print("reading from fd", fd) cancel_requested = False @@ -42,4 +45,5 @@ async def kill_it_after_timeout(new_fd): finally: os.close(new_fd) + trio.run(blocking_read_with_timeout, 0, 10, 2) diff --git a/notes-to-self/estimate-task-size.py b/notes-to-self/estimate-task-size.py index 1e8597ba42..0010c7a2b4 100644 --- a/notes-to-self/estimate-task-size.py +++ b/notes-to-self/estimate-task-size.py @@ -1,15 +1,18 @@ # Little script to get a rough estimate of how much memory each task takes import resource + import trio import trio.testing LOW = 1000 HIGH = 10000 + async def tinytask(): await trio.sleep_forever() + async def measure(count): async with trio.open_nursery() as nursery: for _ in range(count): @@ -23,8 +26,8 @@ async def main(): low_usage = await measure(LOW) high_usage = await measure(HIGH + LOW) - print("Memory usage per task:", - (high_usage.ru_maxrss - low_usage.ru_maxrss) / HIGH) + print("Memory usage per task:", (high_usage.ru_maxrss - low_usage.ru_maxrss) / HIGH) print("(kilobytes on Linux, bytes on macOS)") + trio.run(main) diff --git a/notes-to-self/fbsd-pipe-close-notify.py b/notes-to-self/fbsd-pipe-close-notify.py index 7b18f65d6f..ab17f94c3f 100644 --- a/notes-to-self/fbsd-pipe-close-notify.py +++ b/notes-to-self/fbsd-pipe-close-notify.py @@ -4,9 +4,8 @@ # # Upstream bug: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=246350 -import select import os -import threading +import select r, w = os.pipe() diff --git a/notes-to-self/file-read-latency.py b/notes-to-self/file-read-latency.py index 9af1b7222d..132e29dc4f 100644 --- a/notes-to-self/file-read-latency.py +++ b/notes-to-self/file-read-latency.py @@ -8,7 +8,7 @@ # ns per call, instead of ~500 ns/call for the syscall and related overhead. # That's probably more fair -- the BufferedIOBase code can't service random # accesses, even if your working set fits entirely in RAM. -f = open("/etc/passwd", "rb")#, buffering=0) +f = open("/etc/passwd", "rb") # , buffering=0) while True: start = time.perf_counter() @@ -23,5 +23,8 @@ both = (between - start) / COUNT * 1e9 seek = (end - between) / COUNT * 1e9 read = both - seek - print("{:.2f} ns/(seek+read), {:.2f} ns/seek, estimate ~{:.2f} ns/read" - .format(both, seek, read)) + print( + "{:.2f} ns/(seek+read), {:.2f} ns/seek, estimate ~{:.2f} ns/read".format( + both, seek, read + ) + ) diff --git a/notes-to-self/graceful-shutdown-idea.py b/notes-to-self/graceful-shutdown-idea.py index 792344de02..b454d7610a 100644 --- a/notes-to-self/graceful-shutdown-idea.py +++ b/notes-to-self/graceful-shutdown-idea.py @@ -1,5 +1,6 @@ import trio + class GracefulShutdownManager: def __init__(self): self._shutting_down = False @@ -21,6 +22,7 @@ def cancel_on_graceful_shutdown(self): def shutting_down(self): return self._shutting_down + # Code can check gsm.shutting_down occasionally at appropriate points to see # if it should exit. # @@ -31,9 +33,11 @@ async def stream_handler(stream): while True: with gsm.cancel_on_graceful_shutdown(): data = await stream.receive_some() + print(f"{data = }") if gsm.shutting_down: break + # To trigger the shutdown: async def listen_for_shutdown_signals(): with trio.open_signal_receiver(signal.SIGINT, signal.SIGTERM) as signal_aiter: diff --git a/notes-to-self/how-does-windows-so-reuseaddr-work.py b/notes-to-self/how-does-windows-so-reuseaddr-work.py index d8d60d1d66..3189d4d594 100644 --- a/notes-to-self/how-does-windows-so-reuseaddr-work.py +++ b/notes-to-self/how-does-windows-so-reuseaddr-work.py @@ -4,8 +4,8 @@ # # See https://github.com/python-trio/trio/issues/928 for details and context -import socket import errno +import socket modes = ["default", "SO_REUSEADDR", "SO_EXCLUSIVEADDRUSE"] bind_types = ["wildcard", "specific"] diff --git a/notes-to-self/loopy.py b/notes-to-self/loopy.py index 9f893590bd..0297a32dd8 100644 --- a/notes-to-self/loopy.py +++ b/notes-to-self/loopy.py @@ -1,6 +1,8 @@ -import trio import time +import trio + + async def loopy(): try: while True: @@ -9,10 +11,12 @@ async def loopy(): except KeyboardInterrupt: print("KI!") + async def main(): async with trio.open_nursery() as nursery: nursery.start_soon(loopy) nursery.start_soon(loopy) nursery.start_soon(loopy) + trio.run(main) diff --git a/notes-to-self/lots-of-tasks.py b/notes-to-self/lots-of-tasks.py index fca2741de9..048c69a7ec 100644 --- a/notes-to-self/lots-of-tasks.py +++ b/notes-to-self/lots-of-tasks.py @@ -1,12 +1,15 @@ import sys + import trio (COUNT_STR,) = sys.argv[1:] COUNT = int(COUNT_STR) + async def main(): async with trio.open_nursery() as nursery: for _ in range(COUNT): nursery.start_soon(trio.sleep, 1) + trio.run(main) diff --git a/notes-to-self/manual-signal-handler.py b/notes-to-self/manual-signal-handler.py index 39ffeb5a4b..e1b5ee3036 100644 --- a/notes-to-self/manual-signal-handler.py +++ b/notes-to-self/manual-signal-handler.py @@ -3,16 +3,20 @@ if os.name == "nt": import cffi + ffi = cffi.FFI() - ffi.cdef(""" + ffi.cdef( + """ void* WINAPI GetProcAddress(void* hModule, char* lpProcName); typedef void (*PyOS_sighandler_t)(int); - """) + """ + ) kernel32 = ffi.dlopen("kernel32.dll") PyOS_getsig_ptr = kernel32.GetProcAddress( - ffi.cast("void*", sys.dllhandle), b"PyOS_getsig") + ffi.cast("void*", sys.dllhandle), b"PyOS_getsig" + ) PyOS_getsig = ffi.cast("PyOS_sighandler_t (*)(int)", PyOS_getsig_ptr) - import signal + PyOS_getsig(signal.SIGINT)(signal.SIGINT) diff --git a/notes-to-self/measure-listen-backlog.py b/notes-to-self/measure-listen-backlog.py index dc32732dfe..b7253b86cc 100644 --- a/notes-to-self/measure-listen-backlog.py +++ b/notes-to-self/measure-listen-backlog.py @@ -1,5 +1,6 @@ import trio + async def run_test(nominal_backlog): print("--\nnominal:", nominal_backlog) @@ -22,5 +23,6 @@ async def run_test(nominal_backlog): for client_sock in client_socks: client_sock.close() + for nominal_backlog in [10, trio.socket.SOMAXCONN, 65535]: trio.run(run_test, nominal_backlog) diff --git a/notes-to-self/ntp-example.py b/notes-to-self/ntp-example.py index 44db8cc873..2bb9f80fb3 100644 --- a/notes-to-self/ntp-example.py +++ b/notes-to-self/ntp-example.py @@ -3,9 +3,11 @@ # - use the hostname "2.pool.ntp.org" # (see: https://news.ntppool.org/2011/06/continuing-ipv6-deployment/) -import trio -import struct import datetime +import struct + +import trio + def make_query_packet(): """Construct a UDP packet suitable for querying an NTP server to ask for @@ -27,6 +29,7 @@ def make_query_packet(): return packet + def extract_transmit_timestamp(ntp_packet): """Given an NTP packet, extract the "transmit timestamp" field, as a Python datetime.""" @@ -49,15 +52,16 @@ def extract_transmit_timestamp(ntp_packet): offset = datetime.timedelta(seconds=seconds + fraction / 2**32) return base_time + offset + async def main(): print("Our clock currently reads (in UTC):", datetime.datetime.utcnow()) # Look up some random NTP servers. # (See www.pool.ntp.org for information about the NTP pool.) servers = await trio.socket.getaddrinfo( - "pool.ntp.org", # host - "ntp", # port - family=trio.socket.AF_INET, # IPv4 + "pool.ntp.org", # host + "ntp", # port + family=trio.socket.AF_INET, # IPv4 type=trio.socket.SOCK_DGRAM, # UDP ) @@ -66,7 +70,7 @@ async def main(): # Create a UDP socket udp_sock = trio.socket.socket( - family=trio.socket.AF_INET, # IPv4 + family=trio.socket.AF_INET, # IPv4 type=trio.socket.SOCK_DGRAM, # UDP ) @@ -88,4 +92,5 @@ async def main(): transmit_timestamp = extract_transmit_timestamp(data) print("Their clock read (in UTC):", transmit_timestamp) + trio.run(main) diff --git a/notes-to-self/proxy-benchmarks.py b/notes-to-self/proxy-benchmarks.py index a45d94d056..ea92e10c6f 100644 --- a/notes-to-self/proxy-benchmarks.py +++ b/notes-to-self/proxy-benchmarks.py @@ -3,6 +3,7 @@ methods = {"fileno"} + class Proxy1: strategy = "__getattr__" works_for = "any attr" @@ -15,8 +16,10 @@ def __getattr__(self, name): return getattr(self._wrapped, name) raise AttributeError(name) + ################################################################ + class Proxy2: strategy = "generated methods (getattr + closure)" works_for = "methods" @@ -24,16 +27,20 @@ class Proxy2: def __init__(self, wrapped): self._wrapped = wrapped + def add_wrapper(cls, method): def wrapper(self, *args, **kwargs): return getattr(self._wrapped, method)(*args, **kwargs) + setattr(cls, method, wrapper) + for method in methods: add_wrapper(Proxy2, method) ################################################################ + class Proxy3: strategy = "generated methods (exec)" works_for = "methods" @@ -41,20 +48,27 @@ class Proxy3: def __init__(self, wrapped): self._wrapped = wrapped + def add_wrapper(cls, method): - code = textwrap.dedent(""" + code = textwrap.dedent( + """ def wrapper(self, *args, **kwargs): return self._wrapped.{}(*args, **kwargs) - """.format(method)) + """.format( + method + ) + ) ns = {} exec(code, ns) setattr(cls, method, ns["wrapper"]) + for method in methods: add_wrapper(Proxy3, method) ################################################################ + class Proxy4: strategy = "generated properties (getattr + closure)" works_for = "any attr" @@ -62,6 +76,7 @@ class Proxy4: def __init__(self, wrapped): self._wrapped = wrapped + def add_wrapper(cls, attr): def getter(self): return getattr(self._wrapped, attr) @@ -74,11 +89,13 @@ def deleter(self): setattr(cls, attr, property(getter, setter, deleter)) + for method in methods: add_wrapper(Proxy4, method) ################################################################ + class Proxy5: strategy = "generated properties (exec)" works_for = "any attr" @@ -86,8 +103,10 @@ class Proxy5: def __init__(self, wrapped): self._wrapped = wrapped + def add_wrapper(cls, attr): - code = textwrap.dedent(""" + code = textwrap.dedent( + """ def getter(self): return self._wrapped.{attr} @@ -96,16 +115,21 @@ def setter(self, newval): def deleter(self): del self._wrapped.{attr} - """.format(attr=attr)) + """.format( + attr=attr + ) + ) ns = {} exec(code, ns) setattr(cls, attr, property(ns["getter"], ns["setter"], ns["deleter"])) + for method in methods: add_wrapper(Proxy5, method) ################################################################ + # methods only class Proxy6: strategy = "copy attrs from wrappee to wrapper" @@ -116,17 +140,19 @@ def __init__(self, wrapper): for method in methods: setattr(self, method, getattr(self._wrapper, method)) - + ################################################################ classes = [Proxy1, Proxy2, Proxy3, Proxy4, Proxy5, Proxy6] + def check(cls): with open("/etc/passwd") as f: p = cls(f) assert p.fileno() == f.fileno() + for cls in classes: check(cls) @@ -135,7 +161,7 @@ def check(cls): COUNT = 1000000 try: - import __pypy__ + import __pypy__ # noqa: F401 # __pypy__ imported but unused except ImportError: pass else: @@ -147,8 +173,7 @@ def check(cls): start = time.perf_counter() for _ in range(COUNT): obj.fileno() - #obj.fileno + # obj.fileno end = time.perf_counter() per_usec = COUNT / (end - start) / 1e6 - print("{:7.2f} / us: {} ({})" - .format(per_usec, obj.strategy, obj.works_for)) + print("{:7.2f} / us: {} ({})".format(per_usec, obj.strategy, obj.works_for)) diff --git a/notes-to-self/reopen-pipe.py b/notes-to-self/reopen-pipe.py index 5e5b31e41f..dbccd567d7 100644 --- a/notes-to-self/reopen-pipe.py +++ b/notes-to-self/reopen-pipe.py @@ -1,7 +1,7 @@ import os +import tempfile import threading import time -import tempfile def check_reopen(r1, w): diff --git a/notes-to-self/schedule-timing.py b/notes-to-self/schedule-timing.py index 176dcf9220..c84ec9a436 100644 --- a/notes-to-self/schedule-timing.py +++ b/notes-to-self/schedule-timing.py @@ -1,6 +1,7 @@ -import trio import time +import trio + LOOPS = 0 RUNNING = True diff --git a/notes-to-self/socket-scaling.py b/notes-to-self/socket-scaling.py index 1571be4d17..bd7e32ef7f 100644 --- a/notes-to-self/socket-scaling.py +++ b/notes-to-self/socket-scaling.py @@ -17,13 +17,16 @@ # # or similar. +import socket import time + import trio import trio.testing -import socket + async def main(): for total in [10, 100, 500, 1_000, 10_000, 20_000, 30_000]: + def pt(desc, *, count=total, item="socket"): nonlocal last_time now = time.perf_counter() @@ -53,4 +56,5 @@ def pt(desc, *, count=total, item="socket"): sock.close() pt("closing sockets") + trio.run(main) diff --git a/notes-to-self/ssl-close-notify/ssl-close-notify.py b/notes-to-self/ssl-close-notify/ssl-close-notify.py index cd4b450de8..32ecbea2f0 100644 --- a/notes-to-self/ssl-close-notify/ssl-close-notify.py +++ b/notes-to-self/ssl-close-notify/ssl-close-notify.py @@ -22,6 +22,7 @@ client_done = threading.Event() + def server_thread_fn(): server_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) server_ctx.load_cert_chain("trio-test-1.pem") @@ -42,13 +43,12 @@ def server_thread_fn(): break server.sendall(data) + server_thread = threading.Thread(target=server_thread_fn) server_thread.start() client_ctx = ssl.create_default_context(cafile="trio-test-CA.pem") -client = client_ctx.wrap_socket( - client_sock, - server_hostname="trio-test-1.example.org") +client = client_ctx.wrap_socket(client_sock, server_hostname="trio-test-1.example.org") # Now we have two SSLSockets that have established an encrypted connection diff --git a/notes-to-self/ssl-close-notify/ssl2.py b/notes-to-self/ssl-close-notify/ssl2.py index 32a68e1495..54ee1fb9b6 100644 --- a/notes-to-self/ssl-close-notify/ssl2.py +++ b/notes-to-self/ssl-close-notify/ssl2.py @@ -5,7 +5,7 @@ import ssl import threading -#client_sock, server_sock = socket.socketpair() +# client_sock, server_sock = socket.socketpair() listen_sock = socket.socket() listen_sock.bind(("127.0.0.1", 0)) listen_sock.listen(1) @@ -52,12 +52,12 @@ server.shutdown(socket.SHUT_WR) # Attempting to read/write to the fd after it's closed should raise EBADF -#os.close(server.fileno()) +# os.close(server.fileno()) # Attempting to read/write to an fd opened with O_DIRECT raises EINVAL in most # cases (unless you're very careful with alignment etc. which openssl isn't) -#os.dup2(os.open("/tmp/blah-example-file", os.O_RDWR | os.O_CREAT | os.O_DIRECT), server.fileno()) +# os.dup2(os.open("/tmp/blah-example-file", os.O_RDWR | os.O_CREAT | os.O_DIRECT), server.fileno()) # Sending or receiving server.sendall(b"hello") -#server.recv(10) +# server.recv(10) diff --git a/notes-to-self/ssl-handshake/ssl-handshake.py b/notes-to-self/ssl-handshake/ssl-handshake.py index 18a0e1a675..e906bc2a87 100644 --- a/notes-to-self/ssl-handshake/ssl-handshake.py +++ b/notes-to-self/ssl-handshake/ssl-handshake.py @@ -1,5 +1,5 @@ -import ssl import socket +import ssl import threading from contextlib import contextmanager diff --git a/notes-to-self/sslobject.py b/notes-to-self/sslobject.py index 0692af319c..a6e7b07a08 100644 --- a/notes-to-self/sslobject.py +++ b/notes-to-self/sslobject.py @@ -1,5 +1,5 @@ -from contextlib import contextmanager import ssl +from contextlib import contextmanager client_ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH) client_ctx.check_hostname = False diff --git a/notes-to-self/thread-dispatch-bench.py b/notes-to-self/thread-dispatch-bench.py index 9afb4bbec8..70547a6000 100644 --- a/notes-to-self/thread-dispatch-bench.py +++ b/notes-to-self/thread-dispatch-bench.py @@ -5,8 +5,8 @@ # trio.to_thread.run_sync import threading -from queue import Queue import time +from queue import Queue COUNT = 10000 diff --git a/notes-to-self/time-wait.py b/notes-to-self/time-wait.py index 08c71b0048..772f6c2727 100644 --- a/notes-to-self/time-wait.py +++ b/notes-to-self/time-wait.py @@ -26,8 +26,8 @@ # Also, it must be set on listen2 before calling bind(), or it will conflict # with the lingering server1 socket. -import socket import errno +import socket import attr diff --git a/notes-to-self/trace.py b/notes-to-self/trace.py index c024a36ba5..aa68fac125 100644 --- a/notes-to-self/trace.py +++ b/notes-to-self/trace.py @@ -1,8 +1,9 @@ -import trio -import os import json +import os from itertools import count +import trio + # Experiment with generating Chrome Event Trace format, which can be browsed # through chrome://tracing or other mechanisms. # @@ -29,6 +30,7 @@ # let us also show "task is running", because neither kind of event is # strictly nested inside the other + class Trace(trio.abc.Instrument): def __init__(self, out): self.out = out @@ -108,14 +110,14 @@ def task_scheduled(self, task): def before_io_wait(self, timeout): self._write( - name=f"I/O wait", + name="I/O wait", ph="B", tid=-1, ) def after_io_wait(self, timeout): self._write( - name=f"I/O wait", + name="I/O wait", ph="E", tid=-1, ) @@ -126,11 +128,13 @@ async def child1(): await trio.sleep(1) print(" child1: exiting!") + async def child2(): print(" child2: started! sleeping now...") await trio.sleep(1) print(" child2: exiting!") + async def parent(): print("parent: started!") async with trio.open_nursery() as nursery: @@ -144,5 +148,6 @@ async def parent(): # -- we exit the nursery block here -- print("parent: all done!") + t = Trace(open("/tmp/t.json", "w")) trio.run(parent, instruments=[t]) diff --git a/notes-to-self/trivial-err.py b/notes-to-self/trivial-err.py index ed11ec33e6..6c32617c74 100644 --- a/notes-to-self/trivial-err.py +++ b/notes-to-self/trivial-err.py @@ -1,26 +1,33 @@ import sys + import trio sys.stderr = sys.stdout + async def child1(): raise ValueError + async def child2(): async with trio.open_nursery() as nursery: nursery.start_soon(grandchild1) nursery.start_soon(grandchild2) + async def grandchild1(): raise KeyError + async def grandchild2(): raise NameError("Bob") + async def main(): async with trio.open_nursery() as nursery: nursery.start_soon(child1) nursery.start_soon(child2) - #nursery.start_soon(grandchild1) + # nursery.start_soon(grandchild1) + trio.run(main) diff --git a/notes-to-self/trivial.py b/notes-to-self/trivial.py index 6852d63200..405d92daf5 100644 --- a/notes-to-self/trivial.py +++ b/notes-to-self/trivial.py @@ -1,8 +1,10 @@ import trio + async def foo(): print("in foo!") return 3 + print("running!") print(trio.run(foo)) diff --git a/notes-to-self/wakeup-fd-racer.py b/notes-to-self/wakeup-fd-racer.py index c6ef6caec1..b56cbdc91c 100644 --- a/notes-to-self/wakeup-fd-racer.py +++ b/notes-to-self/wakeup-fd-racer.py @@ -1,19 +1,21 @@ +import itertools import os +import select import signal +import socket import threading import time -import socket -import select -import itertools # Equivalent to the C function raise(), which Python doesn't wrap if os.name == "nt": import cffi + _ffi = cffi.FFI() _ffi.cdef("int raise(int);") _lib = _ffi.dlopen("api-ms-win-crt-runtime-l1-1-0.dll") signal_raise = getattr(_lib, "raise") else: + def signal_raise(signum): # Use pthread_kill to make sure we're actually using the wakeup fd on # Unix @@ -26,7 +28,7 @@ def raise_SIGINT_soon(): # Sending 2 signals becomes reliable, as we'd expect (because we need # set-flags -> write-to-fd, and doing it twice does # write-to-fd -> set-flags -> write-to-fd -> set-flags) - #signal_raise(signal.SIGINT) + # signal_raise(signal.SIGINT) def drain(sock): @@ -87,8 +89,10 @@ def main(): # them. duration = time.perf_counter() - start if duration < 2: - print(f"Attempt {attempt}: OK, trying again " - f"(select_calls = {select_calls}, drained = {drained})") + print( + f"Attempt {attempt}: OK, trying again " + f"(select_calls = {select_calls}, drained = {drained})" + ) else: print(f"Attempt {attempt}: FAILED, took {duration} seconds") print(f"select_calls = {select_calls}, drained = {drained}") @@ -96,5 +100,6 @@ def main(): thread.join() + if __name__ == "__main__": main() diff --git a/notes-to-self/win-waitable-timer.py b/notes-to-self/win-waitable-timer.py index 92bfd7a39a..5309f43867 100644 --- a/notes-to-self/win-waitable-timer.py +++ b/notes-to-self/win-waitable-timer.py @@ -24,12 +24,12 @@ # make this fairly straightforward, but you obviously need to use a separate # time source -import cffi from datetime import datetime, timedelta, timezone -import time + +import cffi import trio -from trio._core._windows_cffi import (ffi, kernel32, raise_winerror) +from trio._core._windows_cffi import ffi, kernel32, raise_winerror try: ffi.cdef( @@ -91,7 +91,7 @@ LPFILETIME lpFileTime ); """, - override=True + override=True, ) ProcessLeapSecondInfo = 8 @@ -106,10 +106,10 @@ def set_leap_seconds_enabled(enabled): plsi.Flags = 0 plsi.Reserved = 0 if not kernel32.SetProcessInformation( - ffi.cast("HANDLE", -1), # current process - ProcessLeapSecondInfo, - plsi, - ffi.sizeof("PROCESS_LEAP_SECOND_INFO"), + ffi.cast("HANDLE", -1), # current process + ProcessLeapSecondInfo, + plsi, + ffi.sizeof("PROCESS_LEAP_SECOND_INFO"), ): raise_winerror() @@ -135,9 +135,7 @@ def now_as_filetime(): # https://www.epochconverter.com/ldap # FILETIME_TICKS_PER_SECOND = 10**7 -FILETIME_EPOCH = datetime.strptime( - '1601-01-01 00:00:00 Z', '%Y-%m-%d %H:%M:%S %z' -) +FILETIME_EPOCH = datetime.strptime("1601-01-01 00:00:00 Z", "%Y-%m-%d %H:%M:%S %z") # XXX THE ABOVE IS WRONG: # # https://techcommunity.microsoft.com/t5/networking-blog/leap-seconds-for-the-appdev-what-you-should-know/ba-p/339813# @@ -159,11 +157,9 @@ def now_as_filetime(): def py_datetime_to_win_filetime(dt): # We'll want to call this on every datetime as it comes in - #dt = dt.astimezone(timezone.utc) + # dt = dt.astimezone(timezone.utc) assert dt.tzinfo is timezone.utc - return round( - (dt - FILETIME_EPOCH).total_seconds() * FILETIME_TICKS_PER_SECOND - ) + return round((dt - FILETIME_EPOCH).total_seconds() * FILETIME_TICKS_PER_SECOND) async def main(): diff --git a/pyproject.toml b/pyproject.toml index 6893927337..9157dd8b38 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,11 @@ [tool.black] target-version = ['py38'] +force-exclude = ''' +( + ^/docs/source/reference-.* + | ^/docs/source/tutorial +) +''' [tool.codespell] ignore-words-list = 'astroid,crasher,asend' @@ -20,6 +26,10 @@ per-file-ignores = [ combine_as_imports = true profile = "black" skip_gitignore = true +skip_glob = [ + "docs/source/reference-*", + "docs/source/tutorial/*" +] [tool.mypy] python_version = "3.8" From 9dbf21c0b3f988a08c8708f68389d6f7c353711b Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Mon, 21 Aug 2023 20:38:25 +1000 Subject: [PATCH 140/162] Type remaining `_highlevel` modules (#2756) * Type _highlevel_open_unix_stream * Add types to _highlevel_socket * Add types to _highlevel_serve_listeners / _ssl_helpers * Update verify_types.json * Add the same TypeErrors to SocketStream.setsockopt() * Enable strict options for highlevel modules * Remove generics here * Remove unused type: ignores --- docs/source/conf.py | 1 + pyproject.toml | 5 --- trio/_highlevel_open_unix_stream.py | 19 +++++++++-- trio/_highlevel_serve_listeners.py | 34 ++++++++++++++++--- trio/_highlevel_socket.py | 51 +++++++++++++++++++++++----- trio/_highlevel_ssl_helpers.py | 43 +++++++++++++---------- trio/_socket.py | 2 +- trio/_tests/test_highlevel_socket.py | 3 ++ trio/_tests/test_socket.py | 29 +++++++++------- trio/_tests/verify_types.json | 18 +++------- 10 files changed, 142 insertions(+), 63 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index b6d5e63043..66aa8dea05 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -59,6 +59,7 @@ # these are not defined in https://docs.python.org/3/objects.inv ("py:class", "socket.AddressFamily"), ("py:class", "socket.SocketKind"), + ("py:class", "Buffer"), # collections.abc.Buffer, in 3.12 ] autodoc_inherit_docstrings = False default_role = "obj" diff --git a/pyproject.toml b/pyproject.toml index 9157dd8b38..603d5dbe44 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,11 +62,6 @@ disallow_untyped_calls = false module = [ # 2745 "trio/_ssl", -# 2756 -"trio/_highlevel_open_unix_stream", -"trio/_highlevel_serve_listeners", -"trio/_highlevel_ssl_helpers", -"trio/_highlevel_socket", # 2755 "trio/_core/_windows_cffi", "trio/_wait_for_object", diff --git a/trio/_highlevel_open_unix_stream.py b/trio/_highlevel_open_unix_stream.py index c2c3a3ca7c..c05b8f3fc8 100644 --- a/trio/_highlevel_open_unix_stream.py +++ b/trio/_highlevel_open_unix_stream.py @@ -1,9 +1,22 @@ +from __future__ import annotations + import os +from collections.abc import Generator from contextlib import contextmanager +from typing import Protocol, TypeVar import trio from trio.socket import SOCK_STREAM, socket + +class Closable(Protocol): + def close(self) -> None: + ... + + +CloseT = TypeVar("CloseT", bound=Closable) + + try: from trio.socket import AF_UNIX @@ -13,7 +26,7 @@ @contextmanager -def close_on_error(obj): +def close_on_error(obj: CloseT) -> Generator[CloseT, None, None]: try: yield obj except: @@ -21,7 +34,9 @@ def close_on_error(obj): raise -async def open_unix_socket(filename): +async def open_unix_socket( + filename: str | bytes | os.PathLike[str] | os.PathLike[bytes], +) -> trio.SocketStream: """Opens a connection to the specified `Unix domain socket `__. diff --git a/trio/_highlevel_serve_listeners.py b/trio/_highlevel_serve_listeners.py index 0585fa516f..d5c7a3bdad 100644 --- a/trio/_highlevel_serve_listeners.py +++ b/trio/_highlevel_serve_listeners.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import errno import logging import os +from typing import Any, Awaitable, Callable, NoReturn, TypeVar import trio @@ -20,14 +23,23 @@ LOGGER = logging.getLogger("trio.serve_listeners") -async def _run_handler(stream, handler): +StreamT = TypeVar("StreamT", bound=trio.abc.AsyncResource) +ListenerT = TypeVar("ListenerT", bound=trio.abc.Listener[Any]) +Handler = Callable[[StreamT], Awaitable[object]] + + +async def _run_handler(stream: StreamT, handler: Handler[StreamT]) -> None: try: await handler(stream) finally: await trio.aclose_forcefully(stream) -async def _serve_one_listener(listener, handler_nursery, handler): +async def _serve_one_listener( + listener: trio.abc.Listener[StreamT], + handler_nursery: trio.Nursery, + handler: Handler[StreamT], +) -> NoReturn: async with listener: while True: try: @@ -48,9 +60,21 @@ async def _serve_one_listener(listener, handler_nursery, handler): handler_nursery.start_soon(_run_handler, stream, handler) -async def serve_listeners( - handler, listeners, *, handler_nursery=None, task_status=trio.TASK_STATUS_IGNORED -): +# This cannot be typed correctly, we need generic typevar bounds / HKT to indicate the +# relationship between StreamT & ListenerT. +# https://github.com/python/typing/issues/1226 +# https://github.com/python/typing/issues/548 + + +# It does never return (since _serve_one_listener never completes), but type checkers can't +# understand nurseries. +async def serve_listeners( # type: ignore[misc] + handler: Handler[StreamT], + listeners: list[ListenerT], + *, + handler_nursery: trio.Nursery | None = None, + task_status: trio.TaskStatus[list[ListenerT]] = trio.TASK_STATUS_IGNORED, +) -> NoReturn: r"""Listen for incoming connections on ``listeners``, and for each one start a task running ``handler(stream)``. diff --git a/trio/_highlevel_socket.py b/trio/_highlevel_socket.py index ce96153805..f8d01cd755 100644 --- a/trio/_highlevel_socket.py +++ b/trio/_highlevel_socket.py @@ -2,8 +2,9 @@ from __future__ import annotations import errno +from collections.abc import Generator from contextlib import contextmanager -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, overload import trio @@ -12,6 +13,8 @@ from .abc import HalfCloseableStream, Listener if TYPE_CHECKING: + from typing_extensions import Buffer + from ._socket import _SocketType as SocketType # XX TODO: this number was picked arbitrarily. We should do experiments to @@ -29,7 +32,7 @@ @contextmanager -def _translate_socket_errors_to_stream_errors(): +def _translate_socket_errors_to_stream_errors() -> Generator[None, None, None]: try: yield except OSError as exc: @@ -97,7 +100,7 @@ def __init__(self, socket: SocketType): except OSError: pass - async def send_all(self, data): + async def send_all(self, data: bytes | bytearray | memoryview) -> None: if self.socket.did_shutdown_SHUT_WR: raise trio.ClosedResourceError("can't send data after sending EOF") with self._send_conflict_detector: @@ -145,15 +148,47 @@ async def aclose(self) -> None: # __aenter__, __aexit__ inherited from HalfCloseableStream are OK - def setsockopt(self, level, option, value): + @overload + def setsockopt(self, level: int, option: int, value: int | Buffer) -> None: + ... + + @overload + def setsockopt(self, level: int, option: int, value: None, length: int) -> None: + ... + + def setsockopt( + self, + level: int, + option: int, + value: int | Buffer | None, + length: int | None = None, + ) -> None: """Set an option on the underlying socket. See :meth:`socket.socket.setsockopt` for details. """ - return self.socket.setsockopt(level, option, value) - - def getsockopt(self, level, option, buffersize=0): + if length is None: + if value is None: + raise TypeError( + "invalid value for argument 'value', must not be None when specifying length" + ) + return self.socket.setsockopt(level, option, value) + if value is not None: + raise TypeError( + f"invalid value for argument 'value': {value!r}, must be None when specifying optlen" + ) + return self.socket.setsockopt(level, option, value, length) + + @overload + def getsockopt(self, level: int, option: int) -> int: + ... + + @overload + def getsockopt(self, level: int, option: int, buffersize: int) -> bytes: + ... + + def getsockopt(self, level: int, option: int, buffersize: int = 0) -> int | bytes: """Check the current value of an option on the underlying socket. See :meth:`socket.socket.getsockopt` for details. @@ -311,7 +346,7 @@ def getsockopt(self, level, option, buffersize=0): ] # Not all errnos are defined on all platforms -_ignorable_accept_errnos = set() +_ignorable_accept_errnos: set[int] = set() for name in _ignorable_accept_errno_names: try: _ignorable_accept_errnos.add(getattr(errno, name)) diff --git a/trio/_highlevel_ssl_helpers.py b/trio/_highlevel_ssl_helpers.py index ad77a302f0..1647f373c2 100644 --- a/trio/_highlevel_ssl_helpers.py +++ b/trio/_highlevel_ssl_helpers.py @@ -1,4 +1,8 @@ +from __future__ import annotations + import ssl +from collections.abc import Awaitable, Callable +from typing import NoReturn import trio @@ -15,13 +19,13 @@ # So... let's punt on that for now. Hopefully we'll be getting a new Python # TLS API soon and can revisit this then. async def open_ssl_over_tcp_stream( - host, - port, + host: str | bytes, + port: int, *, - https_compatible=False, - ssl_context=None, - happy_eyeballs_delay=DEFAULT_DELAY, -): + https_compatible: bool = False, + ssl_context: ssl.SSLContext | None = None, + happy_eyeballs_delay: float | None = DEFAULT_DELAY, +) -> trio.SSLStream: """Make a TLS-encrypted Connection to the given host and port over TCP. This is a convenience wrapper that calls :func:`open_tcp_stream` and @@ -63,8 +67,13 @@ async def open_ssl_over_tcp_stream( async def open_ssl_over_tcp_listeners( - port, ssl_context, *, host=None, https_compatible=False, backlog=None -): + port: int, + ssl_context: ssl.SSLContext, + *, + host: str | bytes | None = None, + https_compatible: bool = False, + backlog: int | float | None = None, +) -> list[trio.SSLListener]: """Start listening for SSL/TLS-encrypted TCP connections to the given port. Args: @@ -86,16 +95,16 @@ async def open_ssl_over_tcp_listeners( async def serve_ssl_over_tcp( - handler, - port, - ssl_context, + handler: Callable[[trio.SSLStream], Awaitable[object]], + port: int, + ssl_context: ssl.SSLContext, *, - host=None, - https_compatible=False, - backlog=None, - handler_nursery=None, - task_status=trio.TASK_STATUS_IGNORED, -): + host: str | bytes | None = None, + https_compatible: bool = False, + backlog: int | float | None = None, + handler_nursery: trio.Nursery | None = None, + task_status: trio.TaskStatus[list[trio.SSLListener]] = trio.TASK_STATUS_IGNORED, +) -> NoReturn: """Listen for incoming TCP connections, and for each one start a task running ``handler(stream)``. diff --git a/trio/_socket.py b/trio/_socket.py index b0ec1d480d..b6d5966397 100644 --- a/trio/_socket.py +++ b/trio/_socket.py @@ -599,7 +599,7 @@ def setsockopt( return self._sock.setsockopt(level, optname, value) if value is not None: raise TypeError( - "invalid value for argument 'value': {value!r}, must be None when specifying optlen" + f"invalid value for argument 'value': {value!r}, must be None when specifying optlen" ) # Note: PyPy may crash here due to setsockopt only supporting diff --git a/trio/_tests/test_highlevel_socket.py b/trio/_tests/test_highlevel_socket.py index 14143affe2..1a987df3f3 100644 --- a/trio/_tests/test_highlevel_socket.py +++ b/trio/_tests/test_highlevel_socket.py @@ -11,6 +11,7 @@ check_half_closeable_stream, wait_all_tasks_blocked, ) +from .test_socket import setsockopt_tests async def test_SocketStream_basics(): @@ -50,6 +51,8 @@ async def test_SocketStream_basics(): b = s.getsockopt(tsocket.IPPROTO_TCP, tsocket.TCP_NODELAY, 1) assert isinstance(b, bytes) + setsockopt_tests(s) + async def test_SocketStream_send_all(): BIG = 10000000 diff --git a/trio/_tests/test_socket.py b/trio/_tests/test_socket.py index f01b4fde14..036098b8e5 100644 --- a/trio/_tests/test_socket.py +++ b/trio/_tests/test_socket.py @@ -363,21 +363,26 @@ async def test_SocketType_basics(): async def test_SocketType_setsockopt() -> None: sock = tsocket.socket() with sock as _: - # specifying optlen. Not supported on pypy, and I couldn't find - # valid calls on darwin or win32. - if hasattr(tsocket, "SO_BINDTODEVICE"): - sock.setsockopt(tsocket.SOL_SOCKET, tsocket.SO_BINDTODEVICE, None, 0) + setsockopt_tests(sock) - # specifying value - sock.setsockopt(tsocket.IPPROTO_TCP, tsocket.TCP_NODELAY, False) - # specifying both - with pytest.raises(TypeError, match="invalid value for argument 'value'"): - sock.setsockopt(tsocket.IPPROTO_TCP, tsocket.TCP_NODELAY, False, 5) # type: ignore[call-overload] +def setsockopt_tests(sock): + """Extract these out, to be reused for SocketStream also.""" + # specifying optlen. Not supported on pypy, and I couldn't find + # valid calls on darwin or win32. + if hasattr(tsocket, "SO_BINDTODEVICE"): + sock.setsockopt(tsocket.SOL_SOCKET, tsocket.SO_BINDTODEVICE, None, 0) + + # specifying value + sock.setsockopt(tsocket.IPPROTO_TCP, tsocket.TCP_NODELAY, False) + + # specifying both + with pytest.raises(TypeError, match="invalid value for argument 'value'"): + sock.setsockopt(tsocket.IPPROTO_TCP, tsocket.TCP_NODELAY, False, 5) - # specifying neither - with pytest.raises(TypeError, match="invalid value for argument 'value'"): - sock.setsockopt(tsocket.IPPROTO_TCP, tsocket.TCP_NODELAY, None) # type: ignore[call-overload] + # specifying neither + with pytest.raises(TypeError, match="invalid value for argument 'value'"): + sock.setsockopt(tsocket.IPPROTO_TCP, tsocket.TCP_NODELAY, None) async def test_SocketType_dup(): diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index b61b28a428..e8c405d2eb 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9872611464968153, + "completenessScore": 0.9968152866242038, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 620, - "withUnknownType": 8 + "withKnownType": 626, + "withUnknownType": 2 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -46,14 +46,11 @@ ], "otherSymbolCounts": { "withAmbiguousType": 1, - "withKnownType": 662, - "withUnknownType": 19 + "withKnownType": 666, + "withUnknownType": 15 }, "packageName": "trio", "symbols": [ - "trio._highlevel_socket.SocketStream.getsockopt", - "trio._highlevel_socket.SocketStream.send_all", - "trio._highlevel_socket.SocketStream.setsockopt", "trio._ssl.SSLListener.__init__", "trio._ssl.SSLListener.accept", "trio._ssl.SSLListener.aclose", @@ -71,11 +68,6 @@ "trio.lowlevel.notify_closing", "trio.lowlevel.wait_readable", "trio.lowlevel.wait_writable", - "trio.open_ssl_over_tcp_listeners", - "trio.open_ssl_over_tcp_stream", - "trio.open_unix_socket", - "trio.serve_listeners", - "trio.serve_ssl_over_tcp", "trio.tests.TestsDeprecationWrapper" ] } From 9028678f7c94b53599460a60de3510c91a83f537 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Mon, 21 Aug 2023 05:42:20 -0500 Subject: [PATCH 141/162] Enable local partial types (#2766) * Enable local partial types * Fix missing annotations --- pyproject.toml | 1 + trio/_core/_tests/test_multierror.py | 8 +++++--- trio/_socket.py | 3 ++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 603d5dbe44..12b0a09894 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,7 @@ python_version = "3.8" ignore_missing_imports = true # Be strict about use of Mypy +local_partial_types = true warn_unused_ignores = true warn_unused_configs = true warn_redundant_casts = true diff --git a/trio/_core/_tests/test_multierror.py b/trio/_core/_tests/test_multierror.py index 52e5e39d1b..6990a7b756 100644 --- a/trio/_core/_tests/test_multierror.py +++ b/trio/_core/_tests/test_multierror.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import gc import os import pickle @@ -20,13 +22,13 @@ class NotHashableException(Exception): - code = None + code: int | None = None - def __init__(self, code): + def __init__(self, code: int) -> None: super().__init__() self.code = code - def __eq__(self, other): + def __eq__(self, other: object) -> bool: if not isinstance(other, NotHashableException): return False return self.code == other.code diff --git a/trio/_socket.py b/trio/_socket.py index b6d5966397..2834a5b055 100644 --- a/trio/_socket.py +++ b/trio/_socket.py @@ -12,6 +12,7 @@ Any, Awaitable, Callable, + Literal, NoReturn, SupportsIndex, Tuple, @@ -315,7 +316,7 @@ def fromshare(info: bytes) -> _SocketType: TypeT: TypeAlias = int FamilyDefault = _stdlib_socket.AF_INET else: - FamilyDefault = None + FamilyDefault: Literal[None] = None FamilyT: TypeAlias = Union[int, AddressFamily, None] TypeT: TypeAlias = Union[_stdlib_socket.socket, int] From 3bdab9380a9619655c07494d4fe12ab167849e1e Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Mon, 21 Aug 2023 06:12:36 -0500 Subject: [PATCH 142/162] Add typing for `_wait_for_object.py` (#2755) * Add typing for `_wait_for_object.py` * Fix CData type * Future annotations * Use explicit type alias * Maybe saying it's `cffi.api.FFI` will fix it? * Fix this type alias * Handle None in raise_winerror() * Fix copy-paste error Interestingly not caught by tests, maybe we need specific tests for this. * Update `pyproject.toml` * Add a test for raise_winerror() * Change names * Fix mypy CI failure on linux and macos * Remove `_wait_for_object` and `_core._windows_cffi` fully typed ignores * Import sys for new mypy CI fix * Entirely skip type checking test_windows on other platforms --------- Co-authored-by: Spencer Brown Co-authored-by: John Litborn <11260241+jakkdl@users.noreply.github.com> --- pyproject.toml | 4 --- trio/_core/_tests/test_windows.py | 42 +++++++++++++++++++++++++++++++ trio/_core/_windows_cffi.py | 33 ++++++++++++++++++------ trio/_wait_for_object.py | 15 ++++++++--- 4 files changed, 79 insertions(+), 15 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 12b0a09894..34f2f069b3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,15 +57,11 @@ disallow_untyped_defs = true check_untyped_defs = false disallow_untyped_calls = false - # files not yet fully typed [[tool.mypy.overrides]] module = [ # 2745 "trio/_ssl", -# 2755 -"trio/_core/_windows_cffi", -"trio/_wait_for_object", # 2761 "trio/_core/_generated_io_windows", "trio/_core/_io_windows", diff --git a/trio/_core/_tests/test_windows.py b/trio/_core/_tests/test_windows.py index 0dac94543c..99bb97284b 100644 --- a/trio/_core/_tests/test_windows.py +++ b/trio/_core/_tests/test_windows.py @@ -1,6 +1,9 @@ import os +import sys import tempfile from contextlib import contextmanager +from typing import TYPE_CHECKING +from unittest.mock import create_autospec import pytest @@ -8,6 +11,8 @@ # Mark all the tests in this file as being windows-only pytestmark = pytest.mark.skipif(not on_windows, reason="windows only") +assert sys.platform == "win32" or not TYPE_CHECKING # Skip type checking on Windows + from ... import _core, sleep from ...testing import wait_all_tasks_blocked from .tutil import gc_collect_harder, restore_unraisablehook, slow @@ -22,6 +27,43 @@ ) +def test_winerror(monkeypatch) -> None: + mock = create_autospec(ffi.getwinerror) + monkeypatch.setattr(ffi, "getwinerror", mock) + + # Returning none = no error, should not happen. + mock.return_value = None + with pytest.raises(RuntimeError, match="No error set"): + raise_winerror() + mock.assert_called_once_with() + mock.reset_mock() + + with pytest.raises(RuntimeError, match="No error set"): + raise_winerror(38) + mock.assert_called_once_with(38) + mock.reset_mock() + + mock.return_value = (12, "test error") + with pytest.raises(OSError) as exc: + raise_winerror(filename="file_1", filename2="file_2") + mock.assert_called_once_with() + mock.reset_mock() + assert exc.value.winerror == 12 + assert exc.value.strerror == "test error" + assert exc.value.filename == "file_1" + assert exc.value.filename2 == "file_2" + + # With an explicit number passed in, it overrides what getwinerror() returns. + with pytest.raises(OSError) as exc: + raise_winerror(18, filename="a/file", filename2="b/file") + mock.assert_called_once_with(18) + mock.reset_mock() + assert exc.value.winerror == 18 + assert exc.value.strerror == "test error" + assert exc.value.filename == "a/file" + assert exc.value.filename2 == "b/file" + + # The undocumented API that this is testing should be changed to stop using # UnboundedQueue (or just removed until we have time to redo it), but until # then we filter out the warning. diff --git a/trio/_core/_windows_cffi.py b/trio/_core/_windows_cffi.py index 639e75b50e..a65a332c2f 100644 --- a/trio/_core/_windows_cffi.py +++ b/trio/_core/_windows_cffi.py @@ -1,5 +1,11 @@ +from __future__ import annotations + import enum import re +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing_extensions import NoReturn, TypeAlias import cffi @@ -215,7 +221,8 @@ # being _MSC_VER >= 800) LIB = re.sub(r"\bPASCAL\b", "__stdcall", LIB) -ffi = cffi.FFI() +ffi = cffi.api.FFI() +CData: TypeAlias = cffi.api.FFI.CData ffi.cdef(LIB) kernel32 = ffi.dlopen("kernel32.dll") @@ -302,23 +309,33 @@ class IoControlCodes(enum.IntEnum): ################################################################ -def _handle(obj): +def _handle(obj: int | CData) -> CData: # For now, represent handles as either cffi HANDLEs or as ints. If you # try to pass in a file descriptor instead, it's not going to work # out. (For that msvcrt.get_osfhandle does the trick, but I don't know if # we'll actually need that for anything...) For sockets this doesn't # matter, Python never allocates an fd. So let's wait until we actually # encounter the problem before worrying about it. - if type(obj) is int: + if isinstance(obj, int): return ffi.cast("HANDLE", obj) - else: - return obj + return obj -def raise_winerror(winerror=None, *, filename=None, filename2=None): +def raise_winerror( + winerror: int | None = None, + *, + filename: str | None = None, + filename2: str | None = None, +) -> NoReturn: if winerror is None: - winerror, msg = ffi.getwinerror() + err = ffi.getwinerror() + if err is None: + raise RuntimeError("No error set?") + winerror, msg = err else: - _, msg = ffi.getwinerror(winerror) + err = ffi.getwinerror(winerror) + if err is None: + raise RuntimeError("No error set?") + _, msg = err # https://docs.python.org/3/library/exceptions.html#OSError raise OSError(0, msg, filename, winerror, filename2) diff --git a/trio/_wait_for_object.py b/trio/_wait_for_object.py index 32a88e5398..50a9d13ff2 100644 --- a/trio/_wait_for_object.py +++ b/trio/_wait_for_object.py @@ -1,11 +1,20 @@ +from __future__ import annotations + import math import trio -from ._core._windows_cffi import ErrorCodes, _handle, ffi, kernel32, raise_winerror +from ._core._windows_cffi import ( + CData, + ErrorCodes, + _handle, + ffi, + kernel32, + raise_winerror, +) -async def WaitForSingleObject(obj): +async def WaitForSingleObject(obj: int | CData) -> None: """Async and cancellable variant of WaitForSingleObject. Windows only. Args: @@ -45,7 +54,7 @@ async def WaitForSingleObject(obj): kernel32.CloseHandle(cancel_handle) -def WaitForMultipleObjects_sync(*handles): +def WaitForMultipleObjects_sync(*handles: int | CData) -> None: """Wait for any of the given Windows handles to be signaled.""" n = len(handles) handle_arr = ffi.new(f"HANDLE[{n}]") From f9fdeb72118fb9773f887cac928ac7aecabc3246 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Mon, 21 Aug 2023 07:04:01 -0500 Subject: [PATCH 143/162] Add type annotations for `_ssl.py` (#2745) * Somehow this did not get in to edbf981 * Add type annotations to `_ssl.py` * Fix for pre-3.10 * Fix CI issues * Update `verify_types.json` * Remove unused imports * Change lots of `Any` to `object` * Update `verify_types.json` * Add `_ssl` to stricker check block and sort modules * Add `trio._abc.T_resource` to doc ignore list * Attempt to fix pyright issues * Update `verify_types.json` * Remove `__slots__` * Update `verify_types.json` * Change to `object` (@A5rocks suggestion) * Fix `server_hostname` * Update `verify_types.json` * Add `_highlevel_ssl_helpers` to stricter checks block * Revert "Add `_highlevel_ssl_helpers` to stricter checks block" This reverts commit 3406eaf9c77c461ae999bf11a9787df82cc7fc11. * Revert changes to `_highlevel_ssl_helpers` Apparently already being handled in #2756 * Revert "Update `verify_types.json`" This reverts commit 40e9bf293fb6ccefc0a7ada1ef1d486b3682a164. * Update `verify_types.json` and fix line endings * Remove slots from exception * Remove `_ssl` from not fully typed block --------- Co-authored-by: CoolCat467 Co-authored-by: jakkdl --- pyproject.toml | 2 - trio/_abc.py | 2 +- trio/_ssl.py | 111 ++++++++++++++----------- trio/_tests/check_type_completeness.py | 7 +- trio/_tests/verify_types.json | 26 ++---- 5 files changed, 74 insertions(+), 74 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 34f2f069b3..184b46056d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,8 +60,6 @@ disallow_untyped_calls = false # files not yet fully typed [[tool.mypy.overrides]] module = [ -# 2745 -"trio/_ssl", # 2761 "trio/_core/_generated_io_windows", "trio/_core/_io_windows", diff --git a/trio/_abc.py b/trio/_abc.py index 59454b794c..746360c8f8 100644 --- a/trio/_abc.py +++ b/trio/_abc.py @@ -565,7 +565,7 @@ class Listener(AsyncResource, Generic[T_resource]): __slots__ = () @abstractmethod - async def accept(self) -> AsyncResource: + async def accept(self) -> T_resource: """Wait until an incoming connection arrives, and then return it. Returns: diff --git a/trio/_ssl.py b/trio/_ssl.py index bd8b3b06b6..f0f01f7583 100644 --- a/trio/_ssl.py +++ b/trio/_ssl.py @@ -1,3 +1,18 @@ +from __future__ import annotations + +import operator as _operator +import ssl as _stdlib_ssl +from collections.abc import Awaitable, Callable +from enum import Enum as _Enum +from typing import Any, Final as TFinal, TypeVar + +import trio + +from . import _sync +from ._highlevel_generic import aclose_forcefully +from ._util import ConflictDetector, Final +from .abc import Listener, Stream + # General theory of operation: # # We implement an API that closely mirrors the stdlib ssl module's blocking @@ -149,16 +164,8 @@ # docs will need to make very clear that this is different from all the other # cancellations in core Trio -import operator as _operator -import ssl as _stdlib_ssl -from enum import Enum as _Enum -import trio - -from . import _sync -from ._highlevel_generic import aclose_forcefully -from ._util import ConflictDetector, Final -from .abc import Listener, Stream +T = TypeVar("T") ################################################################ # SSLStream @@ -187,16 +194,16 @@ # MTU and an initial window of 10 (see RFC 6928), then the initial burst of # data will be limited to ~15000 bytes (or a bit less due to IP-level framing # overhead), so this is chosen to be larger than that. -STARTING_RECEIVE_SIZE = 16384 +STARTING_RECEIVE_SIZE: TFinal = 16384 -def _is_eof(exc): +def _is_eof(exc: BaseException | None) -> bool: # There appears to be a bug on Python 3.10, where SSLErrors # aren't properly translated into SSLEOFErrors. # This stringly-typed error check is borrowed from the AnyIO # project. return isinstance(exc, _stdlib_ssl.SSLEOFError) or ( - hasattr(exc, "strerror") and "UNEXPECTED_EOF_WHILE_READING" in exc.strerror + "UNEXPECTED_EOF_WHILE_READING" in getattr(exc, "strerror", ()) ) @@ -209,13 +216,13 @@ class NeedHandshakeError(Exception): class _Once: - def __init__(self, afn, *args): + def __init__(self, afn: Callable[..., Awaitable[object]], *args: object) -> None: self._afn = afn self._args = args self.started = False self._done = _sync.Event() - async def ensure(self, *, checkpoint): + async def ensure(self, *, checkpoint: bool) -> None: if not self.started: self.started = True await self._afn(*self._args) @@ -226,8 +233,8 @@ async def ensure(self, *, checkpoint): await self._done.wait() @property - def done(self): - return self._done.is_set() + def done(self) -> bool: + return bool(self._done.is_set()) _State = _Enum("_State", ["OK", "BROKEN", "CLOSED"]) @@ -257,8 +264,8 @@ class SSLStream(Stream, metaclass=Final): this connection. Required. Usually created by calling :func:`ssl.create_default_context`. - server_hostname (str or None): The name of the server being connected - to. Used for `SNI + server_hostname (str, bytes, or None): The name of the server being + connected to. Used for `SNI `__ and for validating the server's certificate (if hostname checking is enabled). This is effectively mandatory for clients, and actually @@ -331,24 +338,24 @@ class SSLStream(Stream, metaclass=Final): # SSLListener.__init__, and maybe the open_ssl_over_tcp_* helpers. def __init__( self, - transport_stream, - ssl_context, + transport_stream: Stream, + ssl_context: _stdlib_ssl.SSLContext, *, - server_hostname=None, - server_side=False, - https_compatible=False, - ): - self.transport_stream = transport_stream + server_hostname: str | bytes | None = None, + server_side: bool = False, + https_compatible: bool = False, + ) -> None: + self.transport_stream: Stream = transport_stream self._state = _State.OK self._https_compatible = https_compatible self._outgoing = _stdlib_ssl.MemoryBIO() - self._delayed_outgoing = None + self._delayed_outgoing: bytes | None = None self._incoming = _stdlib_ssl.MemoryBIO() self._ssl_object = ssl_context.wrap_bio( self._incoming, self._outgoing, server_side=server_side, - server_hostname=server_hostname, + server_hostname=server_hostname, # type: ignore[arg-type] # Typeshed bug, does accept bytes as well (typeshed#10590) ) # Tracks whether we've already done the initial handshake self._handshook = _Once(self._do_handshake) @@ -399,7 +406,7 @@ def __init__( "version", } - def __getattr__(self, name): + def __getattr__(self, name: str) -> Any: if name in self._forwarded: if name in self._after_handshake and not self._handshook.done: raise NeedHandshakeError(f"call do_handshake() before calling {name!r}") @@ -408,16 +415,16 @@ def __getattr__(self, name): else: raise AttributeError(name) - def __setattr__(self, name, value): + def __setattr__(self, name: str, value: object) -> None: if name in self._forwarded: setattr(self._ssl_object, name, value) else: super().__setattr__(name, value) - def __dir__(self): - return super().__dir__() + list(self._forwarded) + def __dir__(self) -> list[str]: + return list(super().__dir__()) + list(self._forwarded) - def _check_status(self): + def _check_status(self) -> None: if self._state is _State.OK: return elif self._state is _State.BROKEN: @@ -431,7 +438,13 @@ def _check_status(self): # comments, though, just make sure to think carefully if you ever have to # touch it. The big comment at the top of this file will help explain # too. - async def _retry(self, fn, *args, ignore_want_read=False, is_handshake=False): + async def _retry( + self, + fn: Callable[..., T], + *args: object, + ignore_want_read: bool = False, + is_handshake: bool = False, + ) -> T | None: await trio.lowlevel.checkpoint_if_cancelled() yielded = False finished = False @@ -603,14 +616,14 @@ async def _retry(self, fn, *args, ignore_want_read=False, is_handshake=False): await trio.lowlevel.cancel_shielded_checkpoint() return ret - async def _do_handshake(self): + async def _do_handshake(self) -> None: try: await self._retry(self._ssl_object.do_handshake, is_handshake=True) except: self._state = _State.BROKEN raise - async def do_handshake(self): + async def do_handshake(self) -> None: """Ensure that the initial handshake has completed. The SSL protocol requires an initial handshake to exchange @@ -645,7 +658,7 @@ async def do_handshake(self): # https://bugs.python.org/issue30141 # So we *definitely* have to make sure that do_handshake is called # before doing anything else. - async def receive_some(self, max_bytes=None): + async def receive_some(self, max_bytes: int | None = None) -> bytes | bytearray: """Read some data from the underlying transport, decrypt it, and return it. @@ -684,7 +697,9 @@ async def receive_some(self, max_bytes=None): if max_bytes < 1: raise ValueError("max_bytes must be >= 1") try: - return await self._retry(self._ssl_object.read, max_bytes) + received = await self._retry(self._ssl_object.read, max_bytes) + assert received is not None + return received except trio.BrokenResourceError as exc: # This isn't quite equivalent to just returning b"" in the # first place, because we still end up with self._state set to @@ -698,7 +713,7 @@ async def receive_some(self, max_bytes=None): else: raise - async def send_all(self, data): + async def send_all(self, data: bytes | bytearray | memoryview) -> None: """Encrypt some data and then send it on the underlying transport. See :meth:`trio.abc.SendStream.send_all` for details. @@ -719,7 +734,7 @@ async def send_all(self, data): return await self._retry(self._ssl_object.write, data) - async def unwrap(self): + async def unwrap(self) -> tuple[Stream, bytes | bytearray]: """Cleanly close down the SSL/TLS encryption layer, allowing the underlying stream to be used for unencrypted communication. @@ -741,11 +756,11 @@ async def unwrap(self): await self._handshook.ensure(checkpoint=False) await self._retry(self._ssl_object.unwrap) transport_stream = self.transport_stream - self.transport_stream = None self._state = _State.CLOSED + self.transport_stream = None # type: ignore[assignment] # State is CLOSED now, nothing should use return (transport_stream, self._incoming.read()) - async def aclose(self): + async def aclose(self) -> None: """Gracefully shut down this connection, and close the underlying transport. @@ -832,7 +847,7 @@ async def aclose(self): finally: self._state = _State.CLOSED - async def wait_send_all_might_not_block(self): + async def wait_send_all_might_not_block(self) -> None: """See :meth:`trio.abc.SendStream.wait_send_all_might_not_block`.""" # This method's implementation is deceptively simple. # @@ -897,16 +912,16 @@ class SSLListener(Listener[SSLStream], metaclass=Final): def __init__( self, - transport_listener, - ssl_context, + transport_listener: Listener[Stream], + ssl_context: _stdlib_ssl.SSLContext, *, - https_compatible=False, - ): + https_compatible: bool = False, + ) -> None: self.transport_listener = transport_listener self._ssl_context = ssl_context self._https_compatible = https_compatible - async def accept(self): + async def accept(self) -> SSLStream: """Accept the next connection and wrap it in an :class:`SSLStream`. See :meth:`trio.abc.Listener.accept` for details. @@ -920,6 +935,6 @@ async def accept(self): https_compatible=self._https_compatible, ) - async def aclose(self): + async def aclose(self) -> None: """Close the transport listener.""" await self.transport_listener.aclose() diff --git a/trio/_tests/check_type_completeness.py b/trio/_tests/check_type_completeness.py index abaabcf785..6a8761b88c 100755 --- a/trio/_tests/check_type_completeness.py +++ b/trio/_tests/check_type_completeness.py @@ -128,9 +128,10 @@ def main(args: argparse.Namespace) -> int: invert=invert, ) - assert ( - res.returncode != 0 - ), "Fully type complete! Delete this script and instead directly run `pyright --verifytypes=trio` (consider `--ignoreexternal`) in CI and checking exit code." + # handle in separate PR + # assert ( + # res.returncode != 0 + # ), "Fully type complete! Delete this script and instead directly run `pyright --verifytypes=trio` (consider `--ignoreexternal`) in CI and checking exit code." if args.overwrite_file: print("Overwriting file") diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json index e8c405d2eb..2d93f0fb3f 100644 --- a/trio/_tests/verify_types.json +++ b/trio/_tests/verify_types.json @@ -7,11 +7,11 @@ "warningCount": 0 }, "typeCompleteness": { - "completenessScore": 0.9968152866242038, + "completenessScore": 1, "exportedSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 626, - "withUnknownType": 2 + "withKnownType": 628, + "withUnknownType": 0 }, "ignoreUnknownTypesFromImports": true, "missingClassDocStringCount": 1, @@ -45,26 +45,12 @@ } ], "otherSymbolCounts": { - "withAmbiguousType": 1, - "withKnownType": 666, - "withUnknownType": 15 + "withAmbiguousType": 0, + "withKnownType": 682, + "withUnknownType": 0 }, "packageName": "trio", "symbols": [ - "trio._ssl.SSLListener.__init__", - "trio._ssl.SSLListener.accept", - "trio._ssl.SSLListener.aclose", - "trio._ssl.SSLStream.__dir__", - "trio._ssl.SSLStream.__getattr__", - "trio._ssl.SSLStream.__init__", - "trio._ssl.SSLStream.__setattr__", - "trio._ssl.SSLStream.aclose", - "trio._ssl.SSLStream.do_handshake", - "trio._ssl.SSLStream.receive_some", - "trio._ssl.SSLStream.send_all", - "trio._ssl.SSLStream.transport_stream", - "trio._ssl.SSLStream.unwrap", - "trio._ssl.SSLStream.wait_send_all_might_not_block", "trio.lowlevel.notify_closing", "trio.lowlevel.wait_readable", "trio.lowlevel.wait_writable", From 87c6ef49ddacfb66661ce235ff30a35b8c650c32 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Mon, 21 Aug 2023 14:37:04 +0200 Subject: [PATCH 144/162] check type completeness on all platforms (#2767) * check type completeness on all platforms * update .gitattributes and check.sh --- .gitattributes | 2 - check.sh | 4 +- trio/_tests/check_type_completeness.py | 52 +- trio/_tests/verify_types.json | 60 --- trio/_tests/verify_types_darwin.json | 221 +++++++++ trio/_tests/verify_types_linux.json | 146 ++++++ trio/_tests/verify_types_windows.json | 661 +++++++++++++++++++++++++ 7 files changed, 1053 insertions(+), 93 deletions(-) delete mode 100644 trio/_tests/verify_types.json create mode 100644 trio/_tests/verify_types_darwin.json create mode 100644 trio/_tests/verify_types_linux.json create mode 100644 trio/_tests/verify_types_windows.json diff --git a/.gitattributes b/.gitattributes index 7fbcb4fe2d..991065e069 100644 --- a/.gitattributes +++ b/.gitattributes @@ -2,5 +2,3 @@ trio/_core/_generated* linguist-generated=true # Treat generated files as binary in git diff trio/_core/_generated* -diff -# don't merge the generated json file, let the user (script) handle it -trio/_tests/verify_types.json merge=binary diff --git a/check.sh b/check.sh index ace193a62a..f9458d95c0 100755 --- a/check.sh +++ b/check.sh @@ -44,9 +44,9 @@ fi codespell || EXIT_STATUS=$? python trio/_tests/check_type_completeness.py --overwrite-file || EXIT_STATUS=$? -if git status --porcelain trio/_tests/verify_types.json | grep -q "M"; then +if git status --porcelain trio/_tests/verify_types*.json | grep -q "M"; then echo "Type completeness changed, please update!" - git --no-pager diff --color trio/_tests/verify_types.json + git --no-pager diff --color trio/_tests/verify_types*.json EXIT_STATUS=1 fi diff --git a/trio/_tests/check_type_completeness.py b/trio/_tests/check_type_completeness.py index 6a8761b88c..449f18f9ab 100755 --- a/trio/_tests/check_type_completeness.py +++ b/trio/_tests/check_type_completeness.py @@ -9,18 +9,21 @@ from pathlib import Path # the result file is not marked in MANIFEST.in so it's not included in the package -RESULT_FILE = Path(__file__).parent / "verify_types.json" failed = False +def get_result_file_name(platform: str): + return Path(__file__).parent / f"verify_types_{platform.lower()}.json" + + # TODO: consider checking manually without `--ignoreexternal`, and/or # removing it from the below call later on. -def run_pyright(): +def run_pyright(platform: str): return subprocess.run( [ "pyright", # Specify a platform and version to keep imported modules consistent. - "--pythonplatform=Linux", + f"--pythonplatform={platform}", "--pythonversion=3.8", "--verifytypes=trio", "--outputjson", @@ -43,9 +46,11 @@ def check_less_than(key, current_dict, last_dict, /, invert=False): print("ERROR: ", end="") if isinstance(current, float): strcurrent = f"{current:.4}" - strlast = f"{last:.4}" else: strcurrent = str(current) + if isinstance(last, float): + strlast = f"{last:.4}" + else: strlast = str(last) print( f"{key} has gone {'down' if current int: +def check_type(args: argparse.Namespace, platform: str) -> int: print("*" * 20, "\nChecking type completeness hasn't gone down...") - res = run_pyright() + res = run_pyright(platform) current_result = json.loads(res.stdout) py_typed_file: Path | None = None @@ -79,26 +84,13 @@ def main(args: argparse.Namespace) -> int: ) py_typed_file.write_text("") - res = run_pyright() + res = run_pyright(platform) current_result = json.loads(res.stdout) if res.stderr: print(res.stderr) - if args.full_diagnostics_file is not None: - with open(args.full_diagnostics_file, "w") as file: - json.dump( - [ - sym - for sym in current_result["typeCompleteness"]["symbols"] - if sym["diagnostics"] - ], - file, - sort_keys=True, - indent=2, - ) - - last_result = json.loads(RESULT_FILE.read_text()) + last_result = json.loads(get_result_file_name(platform).read_text()) for key in "errorCount", "warningCount", "informationCount": check_zero(key, current_result["summary"]) @@ -128,11 +120,6 @@ def main(args: argparse.Namespace) -> int: invert=invert, ) - # handle in separate PR - # assert ( - # res.returncode != 0 - # ), "Fully type complete! Delete this script and instead directly run `pyright --verifytypes=trio` (consider `--ignoreexternal`) in CI and checking exit code." - if args.overwrite_file: print("Overwriting file") @@ -156,11 +143,11 @@ def main(args: argparse.Namespace) -> int: new_symbols = [] for symbol in current_result["typeCompleteness"]["symbols"]: if symbol["diagnostics"]: - new_symbols.append(symbol["name"]) + new_symbols.append(symbol) continue # Ensure order of arrays does not affect result. - new_symbols.sort() + new_symbols.sort(key=lambda module: module.get("name", "")) current_result["generalDiagnostics"].sort() current_result["typeCompleteness"]["modules"].sort( key=lambda module: module.get("name", "") @@ -168,7 +155,7 @@ def main(args: argparse.Namespace) -> int: current_result["typeCompleteness"]["symbols"] = new_symbols - with open(RESULT_FILE, "w") as file: + with open(get_result_file_name(platform), "w") as file: json.dump(current_result, file, sort_keys=True, indent=2) # add newline at end of file so it's easier to manually modify file.write("\n") @@ -182,6 +169,13 @@ def main(args: argparse.Namespace) -> int: return int(failed) +def main(args: argparse.Namespace) -> int: + res = 0 + for platform in "Linux", "Windows", "Darwin": + res += check_type(args, platform) + return res + + parser = argparse.ArgumentParser() parser.add_argument("--overwrite-file", action="store_true", default=False) parser.add_argument("--full-diagnostics-file", type=Path, default=None) diff --git a/trio/_tests/verify_types.json b/trio/_tests/verify_types.json deleted file mode 100644 index 2d93f0fb3f..0000000000 --- a/trio/_tests/verify_types.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "generalDiagnostics": [], - "summary": { - "errorCount": 0, - "filesAnalyzed": 8, - "informationCount": 0, - "warningCount": 0 - }, - "typeCompleteness": { - "completenessScore": 1, - "exportedSymbolCounts": { - "withAmbiguousType": 0, - "withKnownType": 628, - "withUnknownType": 0 - }, - "ignoreUnknownTypesFromImports": true, - "missingClassDocStringCount": 1, - "missingDefaultParamCount": 0, - "missingFunctionDocStringCount": 3, - "moduleName": "trio", - "modules": [ - { - "name": "trio" - }, - { - "name": "trio.abc" - }, - { - "name": "trio.from_thread" - }, - { - "name": "trio.lowlevel" - }, - { - "name": "trio.socket" - }, - { - "name": "trio.testing" - }, - { - "name": "trio.tests" - }, - { - "name": "trio.to_thread" - } - ], - "otherSymbolCounts": { - "withAmbiguousType": 0, - "withKnownType": 682, - "withUnknownType": 0 - }, - "packageName": "trio", - "symbols": [ - "trio.lowlevel.notify_closing", - "trio.lowlevel.wait_readable", - "trio.lowlevel.wait_writable", - "trio.tests.TestsDeprecationWrapper" - ] - } -} diff --git a/trio/_tests/verify_types_darwin.json b/trio/_tests/verify_types_darwin.json new file mode 100644 index 0000000000..7329946baa --- /dev/null +++ b/trio/_tests/verify_types_darwin.json @@ -0,0 +1,221 @@ +{ + "generalDiagnostics": [], + "summary": { + "errorCount": 0, + "filesAnalyzed": 8, + "informationCount": 0, + "warningCount": 0 + }, + "typeCompleteness": { + "completenessScore": 1, + "exportedSymbolCounts": { + "withAmbiguousType": 0, + "withKnownType": 631, + "withUnknownType": 0 + }, + "ignoreUnknownTypesFromImports": true, + "missingClassDocStringCount": 1, + "missingDefaultParamCount": 0, + "missingFunctionDocStringCount": 6, + "moduleName": "trio", + "modules": [ + { + "name": "trio" + }, + { + "name": "trio.abc" + }, + { + "name": "trio.from_thread" + }, + { + "name": "trio.lowlevel" + }, + { + "name": "trio.socket" + }, + { + "name": "trio.testing" + }, + { + "name": "trio.tests" + }, + { + "name": "trio.to_thread" + } + ], + "otherSymbolCounts": { + "withAmbiguousType": 0, + "withKnownType": 682, + "withUnknownType": 0 + }, + "packageName": "trio", + "symbols": [ + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.current_kqueue\"", + "range": { + "end": { + "character": 44, + "line": 72 + }, + "start": { + "character": 30, + "line": 72 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.lowlevel.current_kqueue", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.monitor_kevent\"", + "range": { + "end": { + "character": 44, + "line": 73 + }, + "start": { + "character": 30, + "line": 73 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.lowlevel.monitor_kevent", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.notify_closing\"", + "range": { + "end": { + "character": 36, + "line": 33 + }, + "start": { + "character": 22, + "line": 33 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.lowlevel.notify_closing", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.wait_kevent\"", + "range": { + "end": { + "character": 38, + "line": 74 + }, + "start": { + "character": 27, + "line": 74 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.lowlevel.wait_kevent", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.wait_readable\"", + "range": { + "end": { + "character": 34, + "line": 42 + }, + "start": { + "character": 21, + "line": 42 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.lowlevel.wait_readable", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.wait_writable\"", + "range": { + "end": { + "character": 34, + "line": 44 + }, + "start": { + "character": 21, + "line": 44 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.lowlevel.wait_writable", + "referenceCount": 1 + }, + { + "category": "class", + "diagnostics": [ + { + "file": "", + "message": "No docstring found for class \"trio.tests.TestsDeprecationWrapper\"", + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.tests.TestsDeprecationWrapper", + "referenceCount": 1 + } + ] + } +} diff --git a/trio/_tests/verify_types_linux.json b/trio/_tests/verify_types_linux.json new file mode 100644 index 0000000000..76d66813e5 --- /dev/null +++ b/trio/_tests/verify_types_linux.json @@ -0,0 +1,146 @@ +{ + "generalDiagnostics": [], + "summary": { + "errorCount": 0, + "filesAnalyzed": 8, + "informationCount": 0, + "warningCount": 0 + }, + "typeCompleteness": { + "completenessScore": 1, + "exportedSymbolCounts": { + "withAmbiguousType": 0, + "withKnownType": 628, + "withUnknownType": 0 + }, + "ignoreUnknownTypesFromImports": true, + "missingClassDocStringCount": 1, + "missingDefaultParamCount": 0, + "missingFunctionDocStringCount": 3, + "moduleName": "trio", + "modules": [ + { + "name": "trio" + }, + { + "name": "trio.abc" + }, + { + "name": "trio.from_thread" + }, + { + "name": "trio.lowlevel" + }, + { + "name": "trio.socket" + }, + { + "name": "trio.testing" + }, + { + "name": "trio.tests" + }, + { + "name": "trio.to_thread" + } + ], + "otherSymbolCounts": { + "withAmbiguousType": 0, + "withKnownType": 682, + "withUnknownType": 0 + }, + "packageName": "trio", + "symbols": [ + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.notify_closing\"", + "range": { + "end": { + "character": 36, + "line": 33 + }, + "start": { + "character": 22, + "line": 33 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.lowlevel.notify_closing", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.wait_readable\"", + "range": { + "end": { + "character": 34, + "line": 42 + }, + "start": { + "character": 21, + "line": 42 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.lowlevel.wait_readable", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.wait_writable\"", + "range": { + "end": { + "character": 34, + "line": 44 + }, + "start": { + "character": 21, + "line": 44 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.lowlevel.wait_writable", + "referenceCount": 1 + }, + { + "category": "class", + "diagnostics": [ + { + "file": "", + "message": "No docstring found for class \"trio.tests.TestsDeprecationWrapper\"", + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.tests.TestsDeprecationWrapper", + "referenceCount": 1 + } + ] + } +} diff --git a/trio/_tests/verify_types_windows.json b/trio/_tests/verify_types_windows.json new file mode 100644 index 0000000000..24ef5f355b --- /dev/null +++ b/trio/_tests/verify_types_windows.json @@ -0,0 +1,661 @@ +{ + "generalDiagnostics": [], + "summary": { + "errorCount": 0, + "filesAnalyzed": 8, + "informationCount": 0, + "warningCount": 0 + }, + "typeCompleteness": { + "completenessScore": 0.9857369255150554, + "exportedSymbolCounts": { + "withAmbiguousType": 0, + "withKnownType": 622, + "withUnknownType": 9 + }, + "ignoreUnknownTypesFromImports": true, + "missingClassDocStringCount": 1, + "missingDefaultParamCount": 0, + "missingFunctionDocStringCount": 11, + "moduleName": "trio", + "modules": [ + { + "name": "trio" + }, + { + "name": "trio.abc" + }, + { + "name": "trio.from_thread" + }, + { + "name": "trio.lowlevel" + }, + { + "name": "trio.socket" + }, + { + "name": "trio.testing" + }, + { + "name": "trio.tests" + }, + { + "name": "trio.to_thread" + } + ], + "otherSymbolCounts": { + "withAmbiguousType": 0, + "withKnownType": 673, + "withUnknownType": 0 + }, + "packageName": "trio", + "symbols": [ + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Return type annotation is missing", + "range": { + "end": { + "character": 36, + "line": 57 + }, + "start": { + "character": 24, + "line": 57 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.current_iocp\"", + "range": { + "end": { + "character": 36, + "line": 57 + }, + "start": { + "character": 24, + "line": 57 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": false, + "name": "trio.lowlevel.current_iocp", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Return type annotation is missing", + "range": { + "end": { + "character": 56, + "line": 58 + }, + "start": { + "character": 34, + "line": 58 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.monitor_completion_key\"", + "range": { + "end": { + "character": 56, + "line": 58 + }, + "start": { + "character": 34, + "line": 58 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": false, + "name": "trio.lowlevel.monitor_completion_key", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"handle\" is missing", + "range": { + "end": { + "character": 36, + "line": 33 + }, + "start": { + "character": 22, + "line": 33 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Return type annotation is missing", + "range": { + "end": { + "character": 36, + "line": 33 + }, + "start": { + "character": 22, + "line": 33 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.notify_closing\"", + "range": { + "end": { + "character": 36, + "line": 33 + }, + "start": { + "character": 22, + "line": 33 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": false, + "name": "trio.lowlevel.notify_closing", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_subprocess.py", + "message": "No docstring found for function \"trio.lowlevel.open_process\"", + "range": { + "end": { + "character": 53, + "line": 46 + }, + "start": { + "character": 41, + "line": 46 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.lowlevel.open_process", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"handle\" is missing", + "range": { + "end": { + "character": 50, + "line": 59 + }, + "start": { + "character": 31, + "line": 59 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"buffer\" is missing", + "range": { + "end": { + "character": 50, + "line": 59 + }, + "start": { + "character": 31, + "line": 59 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"file_offset\" is missing", + "range": { + "end": { + "character": 50, + "line": 59 + }, + "start": { + "character": 31, + "line": 59 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Return type annotation is missing", + "range": { + "end": { + "character": 50, + "line": 59 + }, + "start": { + "character": 31, + "line": 59 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.readinto_overlapped\"", + "range": { + "end": { + "character": 50, + "line": 59 + }, + "start": { + "character": 31, + "line": 59 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": false, + "name": "trio.lowlevel.readinto_overlapped", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"handle\" is missing", + "range": { + "end": { + "character": 48, + "line": 60 + }, + "start": { + "character": 30, + "line": 60 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Return type annotation is missing", + "range": { + "end": { + "character": 48, + "line": 60 + }, + "start": { + "character": 30, + "line": 60 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.register_with_iocp\"", + "range": { + "end": { + "character": 48, + "line": 60 + }, + "start": { + "character": 30, + "line": 60 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": false, + "name": "trio.lowlevel.register_with_iocp", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"handle\" is missing", + "range": { + "end": { + "character": 42, + "line": 61 + }, + "start": { + "character": 27, + "line": 61 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"lpOverlapped\" is missing", + "range": { + "end": { + "character": 42, + "line": 61 + }, + "start": { + "character": 27, + "line": 61 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Return type annotation is missing", + "range": { + "end": { + "character": 42, + "line": 61 + }, + "start": { + "character": 27, + "line": 61 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.wait_overlapped\"", + "range": { + "end": { + "character": 42, + "line": 61 + }, + "start": { + "character": 27, + "line": 61 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": false, + "name": "trio.lowlevel.wait_overlapped", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"sock\" is missing", + "range": { + "end": { + "character": 34, + "line": 42 + }, + "start": { + "character": 21, + "line": 42 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Return type annotation is missing", + "range": { + "end": { + "character": 34, + "line": 42 + }, + "start": { + "character": 21, + "line": 42 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.wait_readable\"", + "range": { + "end": { + "character": 34, + "line": 42 + }, + "start": { + "character": 21, + "line": 42 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": false, + "name": "trio.lowlevel.wait_readable", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"sock\" is missing", + "range": { + "end": { + "character": 34, + "line": 44 + }, + "start": { + "character": 21, + "line": 44 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Return type annotation is missing", + "range": { + "end": { + "character": 34, + "line": 44 + }, + "start": { + "character": 21, + "line": 44 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.wait_writable\"", + "range": { + "end": { + "character": 34, + "line": 44 + }, + "start": { + "character": 21, + "line": 44 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": false, + "name": "trio.lowlevel.wait_writable", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"handle\" is missing", + "range": { + "end": { + "character": 44, + "line": 62 + }, + "start": { + "character": 28, + "line": 62 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"data\" is missing", + "range": { + "end": { + "character": 44, + "line": 62 + }, + "start": { + "character": 28, + "line": 62 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Type annotation for parameter \"file_offset\" is missing", + "range": { + "end": { + "character": 44, + "line": 62 + }, + "start": { + "character": 28, + "line": 62 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "Return type annotation is missing", + "range": { + "end": { + "character": 44, + "line": 62 + }, + "start": { + "character": 28, + "line": 62 + } + }, + "severity": "error" + }, + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", + "message": "No docstring found for function \"trio.lowlevel.write_overlapped\"", + "range": { + "end": { + "character": 44, + "line": 62 + }, + "start": { + "character": 28, + "line": 62 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": false, + "name": "trio.lowlevel.write_overlapped", + "referenceCount": 1 + }, + { + "category": "function", + "diagnostics": [ + { + "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_subprocess.py", + "message": "No docstring found for function \"trio.run_process\"", + "range": { + "end": { + "character": 71, + "line": 83 + }, + "start": { + "character": 60, + "line": 83 + } + }, + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.run_process", + "referenceCount": 1 + }, + { + "category": "class", + "diagnostics": [ + { + "file": "", + "message": "No docstring found for class \"trio.tests.TestsDeprecationWrapper\"", + "severity": "warning" + } + ], + "isExported": true, + "isTypeAmbiguous": false, + "isTypeKnown": true, + "name": "trio.tests.TestsDeprecationWrapper", + "referenceCount": 1 + } + ] + } +} From 20f5435e513bf690fd71d79adb3f96030b312fa8 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Mon, 21 Aug 2023 07:40:30 -0500 Subject: [PATCH 145/162] Update `contributing.rst` to talk about pre-commit (#2765) * Update `contributing.rst` to talk about pre-commit * Add details about isort action comments * Prefer `split` instead of `skip` --- docs/source/contributing.rst | 28 ++++++++++++++++++++++------ 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/docs/source/contributing.rst b/docs/source/contributing.rst index 7d66ae711d..6189814b3f 100644 --- a/docs/source/contributing.rst +++ b/docs/source/contributing.rst @@ -286,13 +286,13 @@ Code formatting ~~~~~~~~~~~~~~~ Instead of wasting time arguing about code formatting, we use `black -`__ to automatically format all our -code to a standard style. While you're editing code you can be as -sloppy as you like about whitespace; and then before you commit, just -run:: +`__ as well as other tools to automatically +format all our code to a standard style. While you're editing code you +can be as sloppy as you like about whitespace; and then before you commit, +just run:: - pip install -U black - black setup.py trio + pip install -U pre-commit + pre-commit to fix it up. (And don't worry if you forget – when you submit a pull request then we'll automatically check and remind you.) Hopefully this @@ -300,6 +300,17 @@ will let you focus on more important style issues like choosing good names, writing useful comments, and making sure your docstrings are nicely formatted. (black doesn't reformat comments or docstrings.) +If you would like, you can even have pre-commit run before you commit by +running:: + + pre-commit install + +and now pre-commit will run before git commits. You can uninstall the +pre-commit hook at any time by running:: + + pre-commit uninstall + + Very occasionally, you'll want to override black formatting. To do so, you can can add ``# fmt: off`` and ``# fmt: on`` comments. @@ -311,6 +322,11 @@ If you want to see what changes black will make, you can use:: in-place.) +Additionally, in some cases it is necessary to disable isort changing the +order of imports. To do so you can add ``# isort: split`` comments. +For more information, please see `isort's docs `__. + + .. _pull-request-release-notes: Release notes From ef6d8ee5cbaf63f9d71746a98638249e171b81ec Mon Sep 17 00:00:00 2001 From: EXPLOSION Date: Tue, 22 Aug 2023 09:39:39 +0900 Subject: [PATCH 146/162] Fix pyright output (#2769) --- trio/_tests/check_type_completeness.py | 11 +- trio/_tests/verify_types_darwin.json | 199 ++----- trio/_tests/verify_types_linux.json | 112 +--- trio/_tests/verify_types_windows.json | 743 +++++-------------------- 4 files changed, 193 insertions(+), 872 deletions(-) diff --git a/trio/_tests/check_type_completeness.py b/trio/_tests/check_type_completeness.py index 449f18f9ab..00d519c8a8 100755 --- a/trio/_tests/check_type_completeness.py +++ b/trio/_tests/check_type_completeness.py @@ -140,10 +140,14 @@ def check_type(args: argparse.Namespace, platform: str) -> int: # prune the symbols to only be the name of the symbols with # errors, instead of saving a huge file. - new_symbols = [] + new_symbols: list[dict[str, str]] = [] for symbol in current_result["typeCompleteness"]["symbols"]: if symbol["diagnostics"]: - new_symbols.append(symbol) + # function name + message should be enough context for people! + new_symbols.extend( + {"name": symbol["name"], "message": diagnostic["message"]} + for diagnostic in symbol["diagnostics"] + ) continue # Ensure order of arrays does not affect result. @@ -153,7 +157,8 @@ def check_type(args: argparse.Namespace, platform: str) -> int: key=lambda module: module.get("name", "") ) - current_result["typeCompleteness"]["symbols"] = new_symbols + del current_result["typeCompleteness"]["symbols"] + current_result["typeCompleteness"]["diagnostics"] = new_symbols with open(get_result_file_name(platform), "w") as file: json.dump(current_result, file, sort_keys=True, indent=2) diff --git a/trio/_tests/verify_types_darwin.json b/trio/_tests/verify_types_darwin.json index 7329946baa..2b491521f5 100644 --- a/trio/_tests/verify_types_darwin.json +++ b/trio/_tests/verify_types_darwin.json @@ -8,6 +8,36 @@ }, "typeCompleteness": { "completenessScore": 1, + "diagnostics": [ + { + "message": "No docstring found for function \"trio.lowlevel.current_kqueue\"", + "name": "trio.lowlevel.current_kqueue" + }, + { + "message": "No docstring found for function \"trio.lowlevel.monitor_kevent\"", + "name": "trio.lowlevel.monitor_kevent" + }, + { + "message": "No docstring found for function \"trio.lowlevel.notify_closing\"", + "name": "trio.lowlevel.notify_closing" + }, + { + "message": "No docstring found for function \"trio.lowlevel.wait_kevent\"", + "name": "trio.lowlevel.wait_kevent" + }, + { + "message": "No docstring found for function \"trio.lowlevel.wait_readable\"", + "name": "trio.lowlevel.wait_readable" + }, + { + "message": "No docstring found for function \"trio.lowlevel.wait_writable\"", + "name": "trio.lowlevel.wait_writable" + }, + { + "message": "No docstring found for class \"trio.tests.TestsDeprecationWrapper\"", + "name": "trio.tests.TestsDeprecationWrapper" + } + ], "exportedSymbolCounts": { "withAmbiguousType": 0, "withKnownType": 631, @@ -49,173 +79,6 @@ "withKnownType": 682, "withUnknownType": 0 }, - "packageName": "trio", - "symbols": [ - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.current_kqueue\"", - "range": { - "end": { - "character": 44, - "line": 72 - }, - "start": { - "character": 30, - "line": 72 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.lowlevel.current_kqueue", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.monitor_kevent\"", - "range": { - "end": { - "character": 44, - "line": 73 - }, - "start": { - "character": 30, - "line": 73 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.lowlevel.monitor_kevent", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.notify_closing\"", - "range": { - "end": { - "character": 36, - "line": 33 - }, - "start": { - "character": 22, - "line": 33 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.lowlevel.notify_closing", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.wait_kevent\"", - "range": { - "end": { - "character": 38, - "line": 74 - }, - "start": { - "character": 27, - "line": 74 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.lowlevel.wait_kevent", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.wait_readable\"", - "range": { - "end": { - "character": 34, - "line": 42 - }, - "start": { - "character": 21, - "line": 42 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.lowlevel.wait_readable", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.wait_writable\"", - "range": { - "end": { - "character": 34, - "line": 44 - }, - "start": { - "character": 21, - "line": 44 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.lowlevel.wait_writable", - "referenceCount": 1 - }, - { - "category": "class", - "diagnostics": [ - { - "file": "", - "message": "No docstring found for class \"trio.tests.TestsDeprecationWrapper\"", - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.tests.TestsDeprecationWrapper", - "referenceCount": 1 - } - ] + "packageName": "trio" } } diff --git a/trio/_tests/verify_types_linux.json b/trio/_tests/verify_types_linux.json index 76d66813e5..a112e7edc9 100644 --- a/trio/_tests/verify_types_linux.json +++ b/trio/_tests/verify_types_linux.json @@ -8,6 +8,24 @@ }, "typeCompleteness": { "completenessScore": 1, + "diagnostics": [ + { + "message": "No docstring found for function \"trio.lowlevel.notify_closing\"", + "name": "trio.lowlevel.notify_closing" + }, + { + "message": "No docstring found for function \"trio.lowlevel.wait_readable\"", + "name": "trio.lowlevel.wait_readable" + }, + { + "message": "No docstring found for function \"trio.lowlevel.wait_writable\"", + "name": "trio.lowlevel.wait_writable" + }, + { + "message": "No docstring found for class \"trio.tests.TestsDeprecationWrapper\"", + "name": "trio.tests.TestsDeprecationWrapper" + } + ], "exportedSymbolCounts": { "withAmbiguousType": 0, "withKnownType": 628, @@ -49,98 +67,6 @@ "withKnownType": 682, "withUnknownType": 0 }, - "packageName": "trio", - "symbols": [ - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.notify_closing\"", - "range": { - "end": { - "character": 36, - "line": 33 - }, - "start": { - "character": 22, - "line": 33 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.lowlevel.notify_closing", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.wait_readable\"", - "range": { - "end": { - "character": 34, - "line": 42 - }, - "start": { - "character": 21, - "line": 42 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.lowlevel.wait_readable", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.wait_writable\"", - "range": { - "end": { - "character": 34, - "line": 44 - }, - "start": { - "character": 21, - "line": 44 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.lowlevel.wait_writable", - "referenceCount": 1 - }, - { - "category": "class", - "diagnostics": [ - { - "file": "", - "message": "No docstring found for class \"trio.tests.TestsDeprecationWrapper\"", - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.tests.TestsDeprecationWrapper", - "referenceCount": 1 - } - ] + "packageName": "trio" } } diff --git a/trio/_tests/verify_types_windows.json b/trio/_tests/verify_types_windows.json index 24ef5f355b..13c4756bd4 100644 --- a/trio/_tests/verify_types_windows.json +++ b/trio/_tests/verify_types_windows.json @@ -8,6 +8,140 @@ }, "typeCompleteness": { "completenessScore": 0.9857369255150554, + "diagnostics": [ + { + "message": "Return type annotation is missing", + "name": "trio.lowlevel.current_iocp" + }, + { + "message": "No docstring found for function \"trio.lowlevel.current_iocp\"", + "name": "trio.lowlevel.current_iocp" + }, + { + "message": "Return type annotation is missing", + "name": "trio.lowlevel.monitor_completion_key" + }, + { + "message": "No docstring found for function \"trio.lowlevel.monitor_completion_key\"", + "name": "trio.lowlevel.monitor_completion_key" + }, + { + "message": "Type annotation for parameter \"handle\" is missing", + "name": "trio.lowlevel.notify_closing" + }, + { + "message": "Return type annotation is missing", + "name": "trio.lowlevel.notify_closing" + }, + { + "message": "No docstring found for function \"trio.lowlevel.notify_closing\"", + "name": "trio.lowlevel.notify_closing" + }, + { + "message": "No docstring found for function \"trio.lowlevel.open_process\"", + "name": "trio.lowlevel.open_process" + }, + { + "message": "Type annotation for parameter \"handle\" is missing", + "name": "trio.lowlevel.readinto_overlapped" + }, + { + "message": "Type annotation for parameter \"buffer\" is missing", + "name": "trio.lowlevel.readinto_overlapped" + }, + { + "message": "Type annotation for parameter \"file_offset\" is missing", + "name": "trio.lowlevel.readinto_overlapped" + }, + { + "message": "Return type annotation is missing", + "name": "trio.lowlevel.readinto_overlapped" + }, + { + "message": "No docstring found for function \"trio.lowlevel.readinto_overlapped\"", + "name": "trio.lowlevel.readinto_overlapped" + }, + { + "message": "Type annotation for parameter \"handle\" is missing", + "name": "trio.lowlevel.register_with_iocp" + }, + { + "message": "Return type annotation is missing", + "name": "trio.lowlevel.register_with_iocp" + }, + { + "message": "No docstring found for function \"trio.lowlevel.register_with_iocp\"", + "name": "trio.lowlevel.register_with_iocp" + }, + { + "message": "Type annotation for parameter \"handle\" is missing", + "name": "trio.lowlevel.wait_overlapped" + }, + { + "message": "Type annotation for parameter \"lpOverlapped\" is missing", + "name": "trio.lowlevel.wait_overlapped" + }, + { + "message": "Return type annotation is missing", + "name": "trio.lowlevel.wait_overlapped" + }, + { + "message": "No docstring found for function \"trio.lowlevel.wait_overlapped\"", + "name": "trio.lowlevel.wait_overlapped" + }, + { + "message": "Type annotation for parameter \"sock\" is missing", + "name": "trio.lowlevel.wait_readable" + }, + { + "message": "Return type annotation is missing", + "name": "trio.lowlevel.wait_readable" + }, + { + "message": "No docstring found for function \"trio.lowlevel.wait_readable\"", + "name": "trio.lowlevel.wait_readable" + }, + { + "message": "Type annotation for parameter \"sock\" is missing", + "name": "trio.lowlevel.wait_writable" + }, + { + "message": "Return type annotation is missing", + "name": "trio.lowlevel.wait_writable" + }, + { + "message": "No docstring found for function \"trio.lowlevel.wait_writable\"", + "name": "trio.lowlevel.wait_writable" + }, + { + "message": "Type annotation for parameter \"handle\" is missing", + "name": "trio.lowlevel.write_overlapped" + }, + { + "message": "Type annotation for parameter \"data\" is missing", + "name": "trio.lowlevel.write_overlapped" + }, + { + "message": "Type annotation for parameter \"file_offset\" is missing", + "name": "trio.lowlevel.write_overlapped" + }, + { + "message": "Return type annotation is missing", + "name": "trio.lowlevel.write_overlapped" + }, + { + "message": "No docstring found for function \"trio.lowlevel.write_overlapped\"", + "name": "trio.lowlevel.write_overlapped" + }, + { + "message": "No docstring found for function \"trio.run_process\"", + "name": "trio.run_process" + }, + { + "message": "No docstring found for class \"trio.tests.TestsDeprecationWrapper\"", + "name": "trio.tests.TestsDeprecationWrapper" + } + ], "exportedSymbolCounts": { "withAmbiguousType": 0, "withKnownType": 622, @@ -49,613 +183,6 @@ "withKnownType": 673, "withUnknownType": 0 }, - "packageName": "trio", - "symbols": [ - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Return type annotation is missing", - "range": { - "end": { - "character": 36, - "line": 57 - }, - "start": { - "character": 24, - "line": 57 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.current_iocp\"", - "range": { - "end": { - "character": 36, - "line": 57 - }, - "start": { - "character": 24, - "line": 57 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": false, - "name": "trio.lowlevel.current_iocp", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Return type annotation is missing", - "range": { - "end": { - "character": 56, - "line": 58 - }, - "start": { - "character": 34, - "line": 58 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.monitor_completion_key\"", - "range": { - "end": { - "character": 56, - "line": 58 - }, - "start": { - "character": 34, - "line": 58 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": false, - "name": "trio.lowlevel.monitor_completion_key", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"handle\" is missing", - "range": { - "end": { - "character": 36, - "line": 33 - }, - "start": { - "character": 22, - "line": 33 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Return type annotation is missing", - "range": { - "end": { - "character": 36, - "line": 33 - }, - "start": { - "character": 22, - "line": 33 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.notify_closing\"", - "range": { - "end": { - "character": 36, - "line": 33 - }, - "start": { - "character": 22, - "line": 33 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": false, - "name": "trio.lowlevel.notify_closing", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_subprocess.py", - "message": "No docstring found for function \"trio.lowlevel.open_process\"", - "range": { - "end": { - "character": 53, - "line": 46 - }, - "start": { - "character": 41, - "line": 46 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.lowlevel.open_process", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"handle\" is missing", - "range": { - "end": { - "character": 50, - "line": 59 - }, - "start": { - "character": 31, - "line": 59 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"buffer\" is missing", - "range": { - "end": { - "character": 50, - "line": 59 - }, - "start": { - "character": 31, - "line": 59 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"file_offset\" is missing", - "range": { - "end": { - "character": 50, - "line": 59 - }, - "start": { - "character": 31, - "line": 59 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Return type annotation is missing", - "range": { - "end": { - "character": 50, - "line": 59 - }, - "start": { - "character": 31, - "line": 59 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.readinto_overlapped\"", - "range": { - "end": { - "character": 50, - "line": 59 - }, - "start": { - "character": 31, - "line": 59 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": false, - "name": "trio.lowlevel.readinto_overlapped", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"handle\" is missing", - "range": { - "end": { - "character": 48, - "line": 60 - }, - "start": { - "character": 30, - "line": 60 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Return type annotation is missing", - "range": { - "end": { - "character": 48, - "line": 60 - }, - "start": { - "character": 30, - "line": 60 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.register_with_iocp\"", - "range": { - "end": { - "character": 48, - "line": 60 - }, - "start": { - "character": 30, - "line": 60 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": false, - "name": "trio.lowlevel.register_with_iocp", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"handle\" is missing", - "range": { - "end": { - "character": 42, - "line": 61 - }, - "start": { - "character": 27, - "line": 61 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"lpOverlapped\" is missing", - "range": { - "end": { - "character": 42, - "line": 61 - }, - "start": { - "character": 27, - "line": 61 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Return type annotation is missing", - "range": { - "end": { - "character": 42, - "line": 61 - }, - "start": { - "character": 27, - "line": 61 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.wait_overlapped\"", - "range": { - "end": { - "character": 42, - "line": 61 - }, - "start": { - "character": 27, - "line": 61 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": false, - "name": "trio.lowlevel.wait_overlapped", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"sock\" is missing", - "range": { - "end": { - "character": 34, - "line": 42 - }, - "start": { - "character": 21, - "line": 42 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Return type annotation is missing", - "range": { - "end": { - "character": 34, - "line": 42 - }, - "start": { - "character": 21, - "line": 42 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.wait_readable\"", - "range": { - "end": { - "character": 34, - "line": 42 - }, - "start": { - "character": 21, - "line": 42 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": false, - "name": "trio.lowlevel.wait_readable", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"sock\" is missing", - "range": { - "end": { - "character": 34, - "line": 44 - }, - "start": { - "character": 21, - "line": 44 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Return type annotation is missing", - "range": { - "end": { - "character": 34, - "line": 44 - }, - "start": { - "character": 21, - "line": 44 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.wait_writable\"", - "range": { - "end": { - "character": 34, - "line": 44 - }, - "start": { - "character": 21, - "line": 44 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": false, - "name": "trio.lowlevel.wait_writable", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"handle\" is missing", - "range": { - "end": { - "character": 44, - "line": 62 - }, - "start": { - "character": 28, - "line": 62 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"data\" is missing", - "range": { - "end": { - "character": 44, - "line": 62 - }, - "start": { - "character": 28, - "line": 62 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Type annotation for parameter \"file_offset\" is missing", - "range": { - "end": { - "character": 44, - "line": 62 - }, - "start": { - "character": 28, - "line": 62 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "Return type annotation is missing", - "range": { - "end": { - "character": 44, - "line": 62 - }, - "start": { - "character": 28, - "line": 62 - } - }, - "severity": "error" - }, - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_core/__init__.py", - "message": "No docstring found for function \"trio.lowlevel.write_overlapped\"", - "range": { - "end": { - "character": 44, - "line": 62 - }, - "start": { - "character": 28, - "line": 62 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": false, - "name": "trio.lowlevel.write_overlapped", - "referenceCount": 1 - }, - { - "category": "function", - "diagnostics": [ - { - "file": "/home/h/Git/trio/typing_improvements/.tox/verifytypes/lib/python3.8/site-packages/trio/_subprocess.py", - "message": "No docstring found for function \"trio.run_process\"", - "range": { - "end": { - "character": 71, - "line": 83 - }, - "start": { - "character": 60, - "line": 83 - } - }, - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.run_process", - "referenceCount": 1 - }, - { - "category": "class", - "diagnostics": [ - { - "file": "", - "message": "No docstring found for class \"trio.tests.TestsDeprecationWrapper\"", - "severity": "warning" - } - ], - "isExported": true, - "isTypeAmbiguous": false, - "isTypeKnown": true, - "name": "trio.tests.TestsDeprecationWrapper", - "referenceCount": 1 - } - ] + "packageName": "trio" } } From 0c108f94da7fc82008f5e5a034f48f4a7854269e Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Wed, 23 Aug 2023 16:28:31 +0200 Subject: [PATCH 147/162] expand wildcard for test files in mypy ignorelist (#2768) --- pyproject.toml | 52 ++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 48 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 184b46056d..24be2d07bf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,7 @@ disallow_untyped_decorators = true disallow_untyped_defs = true # Enable once other problems are dealt with -check_untyped_defs = false +check_untyped_defs = true disallow_untyped_calls = false # files not yet fully typed @@ -70,10 +70,54 @@ module = [ "trio/_windows_pipes", # tests -"trio/_core/_tests/*", -"trio/_tests/*", -"trio/testing/_fake_net", # 30 +"trio/testing/_fake_net", +"trio/_core/_tests/test_asyncgen", +"trio/_core/_tests/test_guest_mode", +"trio/_core/_tests/test_instrumentation", +"trio/_core/_tests/test_io", +"trio/_core/_tests/test_ki", +"trio/_core/_tests/test_local", +"trio/_core/_tests/test_mock_clock", +"trio/_core/_tests/test_multierror", +"trio/_core/_tests/test_multierror_scripts/ipython_custom_exc", +"trio/_core/_tests/test_multierror_scripts/simple_excepthook", +"trio/_core/_tests/test_parking_lot", +"trio/_core/_tests/test_run", +"trio/_core/_tests/test_thread_cache", +"trio/_core/_tests/test_tutil", +"trio/_core/_tests/test_unbounded_queue", +"trio/_core/_tests/tutil", +"trio/_tests/pytest_plugin", +"trio/_tests/test_abc", +"trio/_tests/test_channel", +"trio/_tests/test_deprecate", +"trio/_tests/test_dtls", +"trio/_tests/test_exports", +"trio/_tests/test_file_io", +"trio/_tests/test_highlevel_generic", +"trio/_tests/test_highlevel_open_tcp_listeners", +"trio/_tests/test_highlevel_open_tcp_stream", +"trio/_tests/test_highlevel_open_unix_stream", +"trio/_tests/test_highlevel_serve_listeners", +"trio/_tests/test_highlevel_socket", +"trio/_tests/test_highlevel_ssl_helpers", +"trio/_tests/test_path", +"trio/_tests/test_scheduler_determinism", +"trio/_tests/test_signals", +"trio/_tests/test_socket", +"trio/_tests/test_ssl", +"trio/_tests/test_subprocess", +"trio/_tests/test_sync", +"trio/_tests/test_testing", +"trio/_tests/test_threads", +"trio/_tests/test_timeouts", +"trio/_tests/test_tracing", +"trio/_tests/test_util", +"trio/_tests/test_wait_for_object", +"trio/_tests/test_windows_pipes", +"trio/_tests/tools/test_gen_exports", ] +check_untyped_defs = false disallow_any_decorated = false disallow_any_generics = false disallow_any_unimported = false From 4c539419f37b17b22b71e719f8391ebc95597497 Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Sun, 27 Aug 2023 00:27:35 +1000 Subject: [PATCH 148/162] Implement #2750: Run `black`+`isort` on `gen_exports` results (#2759) * Run isort and black on gen_exports results * Auto-format generated modules * Exit with a status code if black/isort failed. * Skip testing gen_exports on non-CPython We don't have black/isort installed, not very useful. * Use the correct attribute for the implementation name * Test that gen_export fails properly with invalid code. --------- Co-authored-by: John Litborn <11260241+jakkdl@users.noreply.github.com> --- trio/_core/_generated_instrumentation.py | 36 ++- trio/_core/_generated_io_epoll.py | 21 +- trio/_core/_generated_io_kqueue.py | 33 ++- trio/_core/_generated_io_windows.py | 25 +- trio/_core/_generated_run.py | 330 +++++++++++------------ trio/_tests/tools/test_gen_exports.py | 21 ++ trio/_tools/gen_exports.py | 46 +++- 7 files changed, 276 insertions(+), 236 deletions(-) diff --git a/trio/_core/_generated_instrumentation.py b/trio/_core/_generated_instrumentation.py index 605a6372f2..652fed1288 100644 --- a/trio/_core/_generated_instrumentation.py +++ b/trio/_core/_generated_instrumentation.py @@ -1,25 +1,22 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# Don't lint this file, generation will not format this too nicely. -# isort: skip_file -# fmt: off from __future__ import annotations +from ._instrumentation import Instrument from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED from ._run import GLOBAL_RUN_CONTEXT -from ._instrumentation import Instrument -def add_instrument(instrument: Instrument) ->None: +def add_instrument(instrument: Instrument) -> None: """Start instrumenting the current run loop with the given instrument. - Args: - instrument (trio.abc.Instrument): The instrument to activate. + Args: + instrument (trio.abc.Instrument): The instrument to activate. - If ``instrument`` is already active, does nothing. + If ``instrument`` is already active, does nothing. - """ + """ locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.instruments.add_instrument(instrument) @@ -27,24 +24,21 @@ def add_instrument(instrument: Instrument) ->None: raise RuntimeError("must be called from async context") -def remove_instrument(instrument: Instrument) ->None: +def remove_instrument(instrument: Instrument) -> None: """Stop instrumenting the current run loop with the given instrument. - Args: - instrument (trio.abc.Instrument): The instrument to de-activate. + Args: + instrument (trio.abc.Instrument): The instrument to de-activate. - Raises: - KeyError: if the instrument is not currently active. This could - occur either because you never added it, or because you added it - and then it raised an unhandled exception and was automatically - deactivated. + Raises: + KeyError: if the instrument is not currently active. This could + occur either because you never added it, or because you added it + and then it raised an unhandled exception and was automatically + deactivated. - """ + """ locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.instruments.remove_instrument(instrument) except AttributeError: raise RuntimeError("must be called from async context") - - -# fmt: on diff --git a/trio/_core/_generated_io_epoll.py b/trio/_core/_generated_io_epoll.py index abe49ed3ff..4dc2b59c98 100644 --- a/trio/_core/_generated_io_epoll.py +++ b/trio/_core/_generated_io_epoll.py @@ -1,21 +1,19 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# Don't lint this file, generation will not format this too nicely. -# isort: skip_file -# fmt: off from __future__ import annotations -from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED -from ._run import GLOBAL_RUN_CONTEXT +import sys from socket import socket from typing import TYPE_CHECKING -import sys -assert not TYPE_CHECKING or sys.platform=="linux" +from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +from ._run import GLOBAL_RUN_CONTEXT + +assert not TYPE_CHECKING or sys.platform == "linux" -async def wait_readable(fd: (int | socket)) ->None: +async def wait_readable(fd: (int | socket)) -> None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_readable(fd) @@ -23,7 +21,7 @@ async def wait_readable(fd: (int | socket)) ->None: raise RuntimeError("must be called from async context") -async def wait_writable(fd: (int | socket)) ->None: +async def wait_writable(fd: (int | socket)) -> None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_writable(fd) @@ -31,12 +29,9 @@ async def wait_writable(fd: (int | socket)) ->None: raise RuntimeError("must be called from async context") -def notify_closing(fd: (int | socket)) ->None: +def notify_closing(fd: (int | socket)) -> None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.io_manager.notify_closing(fd) except AttributeError: raise RuntimeError("must be called from async context") - - -# fmt: on diff --git a/trio/_core/_generated_io_kqueue.py b/trio/_core/_generated_io_kqueue.py index cfcf6354c7..9c8ca26ef3 100644 --- a/trio/_core/_generated_io_kqueue.py +++ b/trio/_core/_generated_io_kqueue.py @@ -1,14 +1,12 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# Don't lint this file, generation will not format this too nicely. -# isort: skip_file -# fmt: off from __future__ import annotations +from typing import TYPE_CHECKING, Callable, ContextManager + from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED from ._run import GLOBAL_RUN_CONTEXT -from typing import Callable, ContextManager, TYPE_CHECKING if TYPE_CHECKING: import select @@ -20,10 +18,10 @@ import sys -assert not TYPE_CHECKING or sys.platform=="darwin" +assert not TYPE_CHECKING or sys.platform == "darwin" -def current_kqueue() ->select.kqueue: +def current_kqueue() -> select.kqueue: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.io_manager.current_kqueue() @@ -31,8 +29,9 @@ def current_kqueue() ->select.kqueue: raise RuntimeError("must be called from async context") -def monitor_kevent(ident: int, filter: int) ->ContextManager[_core.UnboundedQueue - [select.kevent]]: +def monitor_kevent( + ident: int, filter: int +) -> ContextManager[_core.UnboundedQueue[select.kevent]]: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.io_manager.monitor_kevent(ident, filter) @@ -40,16 +39,19 @@ def monitor_kevent(ident: int, filter: int) ->ContextManager[_core.UnboundedQueu raise RuntimeError("must be called from async context") -async def wait_kevent(ident: int, filter: int, abort_func: Callable[[ - RaiseCancelT], Abort]) ->Abort: +async def wait_kevent( + ident: int, filter: int, abort_func: Callable[[RaiseCancelT], Abort] +) -> Abort: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: - return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_kevent(ident, filter, abort_func) + return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_kevent( + ident, filter, abort_func + ) except AttributeError: raise RuntimeError("must be called from async context") -async def wait_readable(fd: (int | socket)) ->None: +async def wait_readable(fd: (int | socket)) -> None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_readable(fd) @@ -57,7 +59,7 @@ async def wait_readable(fd: (int | socket)) ->None: raise RuntimeError("must be called from async context") -async def wait_writable(fd: (int | socket)) ->None: +async def wait_writable(fd: (int | socket)) -> None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_writable(fd) @@ -65,12 +67,9 @@ async def wait_writable(fd: (int | socket)) ->None: raise RuntimeError("must be called from async context") -def notify_closing(fd: (int | socket)) ->None: +def notify_closing(fd: (int | socket)) -> None: locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.io_manager.notify_closing(fd) except AttributeError: raise RuntimeError("must be called from async context") - - -# fmt: on diff --git a/trio/_core/_generated_io_windows.py b/trio/_core/_generated_io_windows.py index 7fa6fd5126..b81255d8a9 100644 --- a/trio/_core/_generated_io_windows.py +++ b/trio/_core/_generated_io_windows.py @@ -1,17 +1,15 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# Don't lint this file, generation will not format this too nicely. -# isort: skip_file -# fmt: off from __future__ import annotations +import sys +from typing import TYPE_CHECKING + from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED from ._run import GLOBAL_RUN_CONTEXT -from typing import TYPE_CHECKING -import sys -assert not TYPE_CHECKING or sys.platform=="win32" +assert not TYPE_CHECKING or sys.platform == "win32" async def wait_readable(sock): @@ -49,7 +47,9 @@ def register_with_iocp(handle): async def wait_overlapped(handle, lpOverlapped): locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: - return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_overlapped(handle, lpOverlapped) + return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_overlapped( + handle, lpOverlapped + ) except AttributeError: raise RuntimeError("must be called from async context") @@ -57,7 +57,9 @@ async def wait_overlapped(handle, lpOverlapped): async def write_overlapped(handle, data, file_offset=0): locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: - return await GLOBAL_RUN_CONTEXT.runner.io_manager.write_overlapped(handle, data, file_offset) + return await GLOBAL_RUN_CONTEXT.runner.io_manager.write_overlapped( + handle, data, file_offset + ) except AttributeError: raise RuntimeError("must be called from async context") @@ -65,7 +67,9 @@ async def write_overlapped(handle, data, file_offset=0): async def readinto_overlapped(handle, buffer, file_offset=0): locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: - return await GLOBAL_RUN_CONTEXT.runner.io_manager.readinto_overlapped(handle, buffer, file_offset) + return await GLOBAL_RUN_CONTEXT.runner.io_manager.readinto_overlapped( + handle, buffer, file_offset + ) except AttributeError: raise RuntimeError("must be called from async context") @@ -84,6 +88,3 @@ def monitor_completion_key(): return GLOBAL_RUN_CONTEXT.runner.io_manager.monitor_completion_key() except AttributeError: raise RuntimeError("must be called from async context") - - -# fmt: on diff --git a/trio/_core/_generated_run.py b/trio/_core/_generated_run.py index bd5abbd639..3e1b7b78f1 100644 --- a/trio/_core/_generated_run.py +++ b/trio/_core/_generated_run.py @@ -1,47 +1,43 @@ # *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# Don't lint this file, generation will not format this too nicely. -# isort: skip_file -# fmt: off from __future__ import annotations -from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED -from ._run import GLOBAL_RUN_CONTEXT +import contextvars from collections.abc import Awaitable, Callable from typing import Any from outcome import Outcome -import contextvars -from ._run import _NO_SEND, RunStatistics, Task -from ._entry_queue import TrioToken from .._abc import Clock +from ._entry_queue import TrioToken +from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED +from ._run import _NO_SEND, GLOBAL_RUN_CONTEXT, RunStatistics, Task -def current_statistics() ->RunStatistics: +def current_statistics() -> RunStatistics: """Returns ``RunStatistics``, which contains run-loop-level debugging information. - Currently, the following fields are defined: - - * ``tasks_living`` (int): The number of tasks that have been spawned - and not yet exited. - * ``tasks_runnable`` (int): The number of tasks that are currently - queued on the run queue (as opposed to blocked waiting for something - to happen). - * ``seconds_to_next_deadline`` (float): The time until the next - pending cancel scope deadline. May be negative if the deadline has - expired but we haven't yet processed cancellations. May be - :data:`~math.inf` if there are no pending deadlines. - * ``run_sync_soon_queue_size`` (int): The number of - unprocessed callbacks queued via - :meth:`trio.lowlevel.TrioToken.run_sync_soon`. - * ``io_statistics`` (object): Some statistics from Trio's I/O - backend. This always has an attribute ``backend`` which is a string - naming which operating-system-specific I/O backend is in use; the - other attributes vary between backends. - - """ + Currently, the following fields are defined: + + * ``tasks_living`` (int): The number of tasks that have been spawned + and not yet exited. + * ``tasks_runnable`` (int): The number of tasks that are currently + queued on the run queue (as opposed to blocked waiting for something + to happen). + * ``seconds_to_next_deadline`` (float): The time until the next + pending cancel scope deadline. May be negative if the deadline has + expired but we haven't yet processed cancellations. May be + :data:`~math.inf` if there are no pending deadlines. + * ``run_sync_soon_queue_size`` (int): The number of + unprocessed callbacks queued via + :meth:`trio.lowlevel.TrioToken.run_sync_soon`. + * ``io_statistics`` (object): Some statistics from Trio's I/O + backend. This always has an attribute ``backend`` which is a string + naming which operating-system-specific I/O backend is in use; the + other attributes vary between backends. + + """ locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.current_statistics() @@ -49,16 +45,16 @@ def current_statistics() ->RunStatistics: raise RuntimeError("must be called from async context") -def current_time() ->float: +def current_time() -> float: """Returns the current time according to Trio's internal clock. - Returns: - float: The current time. + Returns: + float: The current time. - Raises: - RuntimeError: if not inside a call to :func:`trio.run`. + Raises: + RuntimeError: if not inside a call to :func:`trio.run`. - """ + """ locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.current_time() @@ -66,7 +62,7 @@ def current_time() ->float: raise RuntimeError("must be called from async context") -def current_clock() ->Clock: +def current_clock() -> Clock: """Returns the current :class:`~trio.abc.Clock`.""" locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: @@ -75,12 +71,12 @@ def current_clock() ->Clock: raise RuntimeError("must be called from async context") -def current_root_task() ->(Task | None): +def current_root_task() -> Task | None: """Returns the current root :class:`Task`. - This is the task that is the ultimate parent of all other tasks. + This is the task that is the ultimate parent of all other tasks. - """ + """ locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.current_root_task() @@ -88,24 +84,24 @@ def current_root_task() ->(Task | None): raise RuntimeError("must be called from async context") -def reschedule(task: Task, next_send: Outcome[Any]=_NO_SEND) ->None: +def reschedule(task: Task, next_send: Outcome[Any] = _NO_SEND) -> None: """Reschedule the given task with the given - :class:`outcome.Outcome`. + :class:`outcome.Outcome`. - See :func:`wait_task_rescheduled` for the gory details. + See :func:`wait_task_rescheduled` for the gory details. - There must be exactly one call to :func:`reschedule` for every call to - :func:`wait_task_rescheduled`. (And when counting, keep in mind that - returning :data:`Abort.SUCCEEDED` from an abort callback is equivalent - to calling :func:`reschedule` once.) + There must be exactly one call to :func:`reschedule` for every call to + :func:`wait_task_rescheduled`. (And when counting, keep in mind that + returning :data:`Abort.SUCCEEDED` from an abort callback is equivalent + to calling :func:`reschedule` once.) - Args: - task (trio.lowlevel.Task): the task to be rescheduled. Must be blocked - in a call to :func:`wait_task_rescheduled`. - next_send (outcome.Outcome): the value (or error) to return (or - raise) from :func:`wait_task_rescheduled`. + Args: + task (trio.lowlevel.Task): the task to be rescheduled. Must be blocked + in a call to :func:`wait_task_rescheduled`. + next_send (outcome.Outcome): the value (or error) to return (or + raise) from :func:`wait_task_rescheduled`. - """ + """ locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.reschedule(task, next_send) @@ -113,72 +109,77 @@ def reschedule(task: Task, next_send: Outcome[Any]=_NO_SEND) ->None: raise RuntimeError("must be called from async context") -def spawn_system_task(async_fn: Callable[..., Awaitable[object]], *args: - object, name: object=None, context: (contextvars.Context | None)=None - ) ->Task: +def spawn_system_task( + async_fn: Callable[..., Awaitable[object]], + *args: object, + name: object = None, + context: (contextvars.Context | None) = None, +) -> Task: """Spawn a "system" task. - System tasks have a few differences from regular tasks: - - * They don't need an explicit nursery; instead they go into the - internal "system nursery". - - * If a system task raises an exception, then it's converted into a - :exc:`~trio.TrioInternalError` and *all* tasks are cancelled. If you - write a system task, you should be careful to make sure it doesn't - crash. - - * System tasks are automatically cancelled when the main task exits. - - * By default, system tasks have :exc:`KeyboardInterrupt` protection - *enabled*. If you want your task to be interruptible by control-C, - then you need to use :func:`disable_ki_protection` explicitly (and - come up with some plan for what to do with a - :exc:`KeyboardInterrupt`, given that system tasks aren't allowed to - raise exceptions). - - * System tasks do not inherit context variables from their creator. - - Towards the end of a call to :meth:`trio.run`, after the main - task and all system tasks have exited, the system nursery - becomes closed. At this point, new calls to - :func:`spawn_system_task` will raise ``RuntimeError("Nursery - is closed to new arrivals")`` instead of creating a system - task. It's possible to encounter this state either in - a ``finally`` block in an async generator, or in a callback - passed to :meth:`TrioToken.run_sync_soon` at the right moment. - - Args: - async_fn: An async callable. - args: Positional arguments for ``async_fn``. If you want to pass - keyword arguments, use :func:`functools.partial`. - name: The name for this task. Only used for debugging/introspection - (e.g. ``repr(task_obj)``). If this isn't a string, - :func:`spawn_system_task` will try to make it one. A common use - case is if you're wrapping a function before spawning a new - task, you might pass the original function as the ``name=`` to - make debugging easier. - context: An optional ``contextvars.Context`` object with context variables - to use for this task. You would normally get a copy of the current - context with ``context = contextvars.copy_context()`` and then you would - pass that ``context`` object here. - - Returns: - Task: the newly spawned task - - """ + System tasks have a few differences from regular tasks: + + * They don't need an explicit nursery; instead they go into the + internal "system nursery". + + * If a system task raises an exception, then it's converted into a + :exc:`~trio.TrioInternalError` and *all* tasks are cancelled. If you + write a system task, you should be careful to make sure it doesn't + crash. + + * System tasks are automatically cancelled when the main task exits. + + * By default, system tasks have :exc:`KeyboardInterrupt` protection + *enabled*. If you want your task to be interruptible by control-C, + then you need to use :func:`disable_ki_protection` explicitly (and + come up with some plan for what to do with a + :exc:`KeyboardInterrupt`, given that system tasks aren't allowed to + raise exceptions). + + * System tasks do not inherit context variables from their creator. + + Towards the end of a call to :meth:`trio.run`, after the main + task and all system tasks have exited, the system nursery + becomes closed. At this point, new calls to + :func:`spawn_system_task` will raise ``RuntimeError("Nursery + is closed to new arrivals")`` instead of creating a system + task. It's possible to encounter this state either in + a ``finally`` block in an async generator, or in a callback + passed to :meth:`TrioToken.run_sync_soon` at the right moment. + + Args: + async_fn: An async callable. + args: Positional arguments for ``async_fn``. If you want to pass + keyword arguments, use :func:`functools.partial`. + name: The name for this task. Only used for debugging/introspection + (e.g. ``repr(task_obj)``). If this isn't a string, + :func:`spawn_system_task` will try to make it one. A common use + case is if you're wrapping a function before spawning a new + task, you might pass the original function as the ``name=`` to + make debugging easier. + context: An optional ``contextvars.Context`` object with context variables + to use for this task. You would normally get a copy of the current + context with ``context = contextvars.copy_context()`` and then you would + pass that ``context`` object here. + + Returns: + Task: the newly spawned task + + """ locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: - return GLOBAL_RUN_CONTEXT.runner.spawn_system_task(async_fn, *args, name=name, context=context) + return GLOBAL_RUN_CONTEXT.runner.spawn_system_task( + async_fn, *args, name=name, context=context + ) except AttributeError: raise RuntimeError("must be called from async context") -def current_trio_token() ->TrioToken: +def current_trio_token() -> TrioToken: """Retrieve the :class:`TrioToken` for the current call to - :func:`trio.run`. + :func:`trio.run`. - """ + """ locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return GLOBAL_RUN_CONTEXT.runner.current_trio_token() @@ -186,69 +187,66 @@ def current_trio_token() ->TrioToken: raise RuntimeError("must be called from async context") -async def wait_all_tasks_blocked(cushion: float=0.0) ->None: +async def wait_all_tasks_blocked(cushion: float = 0.0) -> None: """Block until there are no runnable tasks. - This is useful in testing code when you want to give other tasks a - chance to "settle down". The calling task is blocked, and doesn't wake - up until all other tasks are also blocked for at least ``cushion`` - seconds. (Setting a non-zero ``cushion`` is intended to handle cases - like two tasks talking to each other over a local socket, where we - want to ignore the potential brief moment between a send and receive - when all tasks are blocked.) - - Note that ``cushion`` is measured in *real* time, not the Trio clock - time. - - If there are multiple tasks blocked in :func:`wait_all_tasks_blocked`, - then the one with the shortest ``cushion`` is the one woken (and - this task becoming unblocked resets the timers for the remaining - tasks). If there are multiple tasks that have exactly the same - ``cushion``, then all are woken. - - You should also consider :class:`trio.testing.Sequencer`, which - provides a more explicit way to control execution ordering within a - test, and will often produce more readable tests. - - Example: - Here's an example of one way to test that Trio's locks are fair: we - take the lock in the parent, start a child, wait for the child to be - blocked waiting for the lock (!), and then check that we can't - release and immediately re-acquire the lock:: - - async def lock_taker(lock): - await lock.acquire() + This is useful in testing code when you want to give other tasks a + chance to "settle down". The calling task is blocked, and doesn't wake + up until all other tasks are also blocked for at least ``cushion`` + seconds. (Setting a non-zero ``cushion`` is intended to handle cases + like two tasks talking to each other over a local socket, where we + want to ignore the potential brief moment between a send and receive + when all tasks are blocked.) + + Note that ``cushion`` is measured in *real* time, not the Trio clock + time. + + If there are multiple tasks blocked in :func:`wait_all_tasks_blocked`, + then the one with the shortest ``cushion`` is the one woken (and + this task becoming unblocked resets the timers for the remaining + tasks). If there are multiple tasks that have exactly the same + ``cushion``, then all are woken. + + You should also consider :class:`trio.testing.Sequencer`, which + provides a more explicit way to control execution ordering within a + test, and will often produce more readable tests. + + Example: + Here's an example of one way to test that Trio's locks are fair: we + take the lock in the parent, start a child, wait for the child to be + blocked waiting for the lock (!), and then check that we can't + release and immediately re-acquire the lock:: + + async def lock_taker(lock): + await lock.acquire() + lock.release() + + async def test_lock_fairness(): + lock = trio.Lock() + await lock.acquire() + async with trio.open_nursery() as nursery: + nursery.start_soon(lock_taker, lock) + # child hasn't run yet, we have the lock + assert lock.locked() + assert lock._owner is trio.lowlevel.current_task() + await trio.testing.wait_all_tasks_blocked() + # now the child has run and is blocked on lock.acquire(), we + # still have the lock + assert lock.locked() + assert lock._owner is trio.lowlevel.current_task() lock.release() - - async def test_lock_fairness(): - lock = trio.Lock() - await lock.acquire() - async with trio.open_nursery() as nursery: - nursery.start_soon(lock_taker, lock) - # child hasn't run yet, we have the lock - assert lock.locked() - assert lock._owner is trio.lowlevel.current_task() - await trio.testing.wait_all_tasks_blocked() - # now the child has run and is blocked on lock.acquire(), we - # still have the lock - assert lock.locked() - assert lock._owner is trio.lowlevel.current_task() - lock.release() - try: - # The child has a prior claim, so we can't have it - lock.acquire_nowait() - except trio.WouldBlock: - assert lock._owner is not trio.lowlevel.current_task() - print("PASS") - else: - print("FAIL") - - """ + try: + # The child has a prior claim, so we can't have it + lock.acquire_nowait() + except trio.WouldBlock: + assert lock._owner is not trio.lowlevel.current_task() + print("PASS") + else: + print("FAIL") + + """ locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return await GLOBAL_RUN_CONTEXT.runner.wait_all_tasks_blocked(cushion) except AttributeError: raise RuntimeError("must be called from async context") - - -# fmt: on diff --git a/trio/_tests/tools/test_gen_exports.py b/trio/_tests/tools/test_gen_exports.py index 7d2d6e99a1..e7d8ab94f2 100644 --- a/trio/_tests/tools/test_gen_exports.py +++ b/trio/_tests/tools/test_gen_exports.py @@ -1,4 +1,5 @@ import ast +import sys import pytest @@ -7,6 +8,7 @@ create_passthrough_args, get_public_methods, process, + run_linters, ) SOURCE = '''from _run import _public @@ -69,6 +71,13 @@ def test_create_pass_through_args(): assert create_passthrough_args(func_node) == expected +skip_lints = pytest.mark.skipif( + sys.implementation.name != "cpython", + reason="gen_exports is internal, black/isort only runs on CPython", +) + + +@skip_lints @pytest.mark.parametrize("imports", ["", IMPORT_1, IMPORT_2, IMPORT_3]) def test_process(tmp_path, imports): modpath = tmp_path / "_module.py" @@ -93,3 +102,15 @@ def test_process(tmp_path, imports): with pytest.raises(SystemExit) as excinfo: process([File(modpath, "runner", imports=imports)], do_test=True) assert excinfo.value.code == 1 + + +@skip_lints +def test_lint_failure(tmp_path) -> None: + """Test that processing properly fails if black or isort does.""" + file = File(tmp_path / "module.py", "module") + + with pytest.raises(SystemExit): + run_linters(file, "class not valid code ><") + + with pytest.raises(SystemExit): + run_linters(file, "# isort: skip_file") diff --git a/trio/_tools/gen_exports.py b/trio/_tools/gen_exports.py index 3c598e8eae..6549d473ab 100755 --- a/trio/_tools/gen_exports.py +++ b/trio/_tools/gen_exports.py @@ -8,7 +8,9 @@ import argparse import ast import os +import subprocess import sys +import traceback from collections.abc import Iterable, Iterator from pathlib import Path from textwrap import indent @@ -19,24 +21,20 @@ import astor import attr +import isort.api +import isort.exceptions PREFIX = "_generated" HEADER = """# *********************************************************** # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ****** # ************************************************************* -# Don't lint this file, generation will not format this too nicely. -# isort: skip_file -# fmt: off from __future__ import annotations from ._ki import LOCALS_KEY_KI_PROTECTION_ENABLED from ._run import GLOBAL_RUN_CONTEXT """ -FOOTER = """# fmt: on -""" - TEMPLATE = """locals()[LOCALS_KEY_KI_PROTECTION_ENABLED] = True try: return{}GLOBAL_RUN_CONTEXT.{}.{} @@ -104,6 +102,40 @@ def create_passthrough_args(funcdef: ast.FunctionDef | ast.AsyncFunctionDef) -> return "({})".format(", ".join(call_args)) +def run_linters(file: File, source: str) -> str: + """Run isort and black on the specified file, returning the new source. + + :raises ValueError: If either failed. + """ + # Black has an undocumented API, but it doesn't easily allow reading configuration from + # pyproject.toml, and simultaneously pass in / receive the code as a string. + # https://github.com/psf/black/issues/779 + try: + result = subprocess.run( + # "-" as a filename = use stdin, return on stdout. + [sys.executable, "-m", "black", "--stdin-filename", file.path, "-"], + input=source, + capture_output=True, + encoding="utf8", + check=True, + ) + except subprocess.CalledProcessError as exc: + print("Failed to run black!") + traceback.print_exception(type(exc), exc, exc.__traceback__) + sys.exit(1) + # isort does have a public API, makes things easy. + try: + isort_res = isort.api.sort_code_string( + result.stdout, + file_path=file.path, + ) + except isort.exceptions.ISortError as exc: + print("Failed to run isort!") + traceback.print_exception(type(exc), exc, exc.__traceback__) + sys.exit(1) + return isort_res + + def gen_public_wrappers_source(file: File) -> str: """Scan the given .py file for @_public decorators, and generate wrapper functions. @@ -170,7 +202,6 @@ def gen_public_wrappers_source(file: File) -> str: # Append the snippet to the corresponding module generated.append(snippet) - generated.append(FOOTER) return "\n\n".join(generated) @@ -190,6 +221,7 @@ def process(files: Iterable[File], *, do_test: bool) -> None: for file in files: print("Scanning:", file.path) new_source = gen_public_wrappers_source(file) + new_source = run_linters(file, new_source) dirname, basename = os.path.split(file.path) new_path = os.path.join(dirname, PREFIX + basename) new_files[new_path] = new_source From 7f78303bf14fbd2b2bbc7b2fd217e2eaff03525e Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Mon, 28 Aug 2023 20:21:29 +1000 Subject: [PATCH 149/162] Fix typecheck failures in `master` (#2777) * Add missing type hints to check_type_completeness * Test_windows is not currently typed --- pyproject.toml | 1 + trio/_tests/check_type_completeness.py | 15 +++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 24be2d07bf..08ac6fa1f5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -86,6 +86,7 @@ module = [ "trio/_core/_tests/test_thread_cache", "trio/_core/_tests/test_tutil", "trio/_core/_tests/test_unbounded_queue", +"trio/_core/_tests/test_windows", "trio/_core/_tests/tutil", "trio/_tests/pytest_plugin", "trio/_tests/test_abc", diff --git a/trio/_tests/check_type_completeness.py b/trio/_tests/check_type_completeness.py index 00d519c8a8..1352926be3 100755 --- a/trio/_tests/check_type_completeness.py +++ b/trio/_tests/check_type_completeness.py @@ -6,19 +6,20 @@ import json import subprocess import sys +from collections.abc import Mapping from pathlib import Path # the result file is not marked in MANIFEST.in so it's not included in the package failed = False -def get_result_file_name(platform: str): +def get_result_file_name(platform: str) -> Path: return Path(__file__).parent / f"verify_types_{platform.lower()}.json" # TODO: consider checking manually without `--ignoreexternal`, and/or # removing it from the below call later on. -def run_pyright(platform: str): +def run_pyright(platform: str) -> subprocess.CompletedProcess[bytes]: return subprocess.run( [ "pyright", @@ -33,7 +34,13 @@ def run_pyright(platform: str): ) -def check_less_than(key, current_dict, last_dict, /, invert=False): +def check_less_than( + key: str, + current_dict: Mapping[str, float], + last_dict: Mapping[str, float], + /, + invert: bool = False, +) -> None: global failed current = current_dict[key] last = last_dict[key] @@ -57,7 +64,7 @@ def check_less_than(key, current_dict, last_dict, /, invert=False): ) -def check_zero(key, current_dict): +def check_zero(key: str, current_dict: Mapping[str, float]) -> None: global failed if current_dict[key] != 0: failed = True From e9975aa237a58af62374e140ce7314cfc43106e2 Mon Sep 17 00:00:00 2001 From: John Litborn <11260241+jakkdl@users.noreply.github.com> Date: Wed, 30 Aug 2023 05:43:48 +0200 Subject: [PATCH 150/162] print out the slowest 10 pytest tests (#2760) --- ci.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci.sh b/ci.sh index ed97ff738b..4fb68d617f 100755 --- a/ci.sh +++ b/ci.sh @@ -112,7 +112,7 @@ else # support subprocess spawning with coverage.py echo "import coverage; coverage.process_startup()" | tee -a "$INSTALLDIR/../sitecustomize.py" - if COVERAGE_PROCESS_START=$(pwd)/../.coveragerc coverage run --rcfile=../.coveragerc -m pytest -r a -p trio._tests.pytest_plugin --junitxml=../test-results.xml --run-slow ${INSTALLDIR} --verbose; then + if COVERAGE_PROCESS_START=$(pwd)/../.coveragerc coverage run --rcfile=../.coveragerc -m pytest -r a -p trio._tests.pytest_plugin --junitxml=../test-results.xml --run-slow ${INSTALLDIR} --verbose --durations=10; then PASSED=true else PASSED=false From e97bcb61b8b02523c82435b5408ff46efca5dfc3 Mon Sep 17 00:00:00 2001 From: Spencer Brown Date: Wed, 30 Aug 2023 16:11:28 +1000 Subject: [PATCH 151/162] Add types to `test_io`, `test_run`, various other fixes (#2773) * Use a type guard to eliminate a type-ignore in `_highlevel_generic.py` * Use a protocol to fully typecheck `AsyncContextManagerMixin` * Fully type `test_run` and `test_io` * Remove `_NurseryStartFunc` protocol, this requires `TypeVarTuple` to work --- pyproject.toml | 2 - trio/_core/_run.py | 14 +- trio/_core/_tests/test_io.py | 115 ++-- trio/_core/_tests/test_run.py | 750 ++++++++++++++------------ trio/_highlevel_generic.py | 26 +- trio/_sync.py | 22 +- trio/_tests/verify_types_darwin.json | 2 +- trio/_tests/verify_types_linux.json | 2 +- trio/_tests/verify_types_windows.json | 2 +- 9 files changed, 515 insertions(+), 420 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 08ac6fa1f5..17dd2aa1b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,7 +74,6 @@ module = [ "trio/_core/_tests/test_asyncgen", "trio/_core/_tests/test_guest_mode", "trio/_core/_tests/test_instrumentation", -"trio/_core/_tests/test_io", "trio/_core/_tests/test_ki", "trio/_core/_tests/test_local", "trio/_core/_tests/test_mock_clock", @@ -82,7 +81,6 @@ module = [ "trio/_core/_tests/test_multierror_scripts/ipython_custom_exc", "trio/_core/_tests/test_multierror_scripts/simple_excepthook", "trio/_core/_tests/test_parking_lot", -"trio/_core/_tests/test_run", "trio/_core/_tests/test_thread_cache", "trio/_core/_tests/test_tutil", "trio/_core/_tests/test_unbounded_queue", diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 3a4751254f..4ad1119d1a 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -907,15 +907,6 @@ def started(self, value: StatusT | None = None) -> None: self._old_nursery._check_nursery_closed() -class _NurseryStartFunc(Protocol[StatusT_co]): - """Type of functions passed to `nursery.start() `.""" - - def __call__( - self, *args: Any, task_status: TaskStatus[StatusT_co] - ) -> Awaitable[object]: - ... - - @attr.s class NurseryManager: """Nursery context manager. @@ -1173,7 +1164,10 @@ def start_soon( GLOBAL_RUN_CONTEXT.runner.spawn_impl(async_fn, args, self, name) async def start( - self, async_fn: _NurseryStartFunc[StatusT], *args: object, name: object = None + self, + async_fn: Callable[..., Awaitable[object]], + *args: object, + name: object = None, ) -> StatusT: r"""Creates and initializes a child task. diff --git a/trio/_core/_tests/test_io.py b/trio/_core/_tests/test_io.py index 65b9b82bcf..7a4689d3c1 100644 --- a/trio/_core/_tests/test_io.py +++ b/trio/_core/_tests/test_io.py @@ -2,8 +2,9 @@ import random import socket as stdlib_socket -from collections.abc import Callable +from collections.abc import Generator from contextlib import suppress +from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, TypeVar import pytest @@ -14,8 +15,13 @@ # Cross-platform tests for IO handling +if TYPE_CHECKING: + from typing_extensions import ParamSpec -def fill_socket(sock): + ArgsT = ParamSpec("ArgsT") + + +def fill_socket(sock: stdlib_socket.socket) -> None: try: while True: sock.send(b"x" * 65536) @@ -23,7 +29,7 @@ def fill_socket(sock): pass -def drain_socket(sock): +def drain_socket(sock: stdlib_socket.socket) -> None: try: while True: sock.recv(65536) @@ -31,8 +37,13 @@ def drain_socket(sock): pass +WaitSocket = Callable[[stdlib_socket.socket], Awaitable[object]] +SocketPair = Tuple[stdlib_socket.socket, stdlib_socket.socket] +RetT = TypeVar("RetT") + + @pytest.fixture -def socketpair(): +def socketpair() -> Generator[SocketPair, None, None]: pair = stdlib_socket.socketpair() for sock in pair: sock.setblocking(False) @@ -41,38 +52,35 @@ def socketpair(): sock.close() -def using_fileno(fn): - def fileno_wrapper(fileobj): +def also_using_fileno( + fn: Callable[[stdlib_socket.socket | int], RetT], +) -> list[Callable[[stdlib_socket.socket], RetT]]: + def fileno_wrapper(fileobj: stdlib_socket.socket) -> RetT: return fn(fileobj.fileno()) name = f"<{fn.__name__} on fileno>" fileno_wrapper.__name__ = fileno_wrapper.__qualname__ = name - return fileno_wrapper + return [fn, fileno_wrapper] -wait_readable_options: list[Callable] = [trio.lowlevel.wait_readable] -wait_writable_options: list[Callable] = [trio.lowlevel.wait_writable] -notify_closing_options: list[Callable] = [trio.lowlevel.notify_closing] - -for options_list in ( - wait_readable_options, - wait_writable_options, - notify_closing_options, -): - options_list += [using_fileno(f) for f in options_list] - # Decorators that feed in different settings for wait_readable / wait_writable # / notify_closing. # Note that if you use all three decorators on the same test, it will run all # N**3 *combinations* read_socket_test = pytest.mark.parametrize( - "wait_readable", wait_readable_options, ids=lambda fn: fn.__name__ + "wait_readable", + also_using_fileno(trio.lowlevel.wait_readable), + ids=lambda fn: fn.__name__, ) write_socket_test = pytest.mark.parametrize( - "wait_writable", wait_writable_options, ids=lambda fn: fn.__name__ + "wait_writable", + also_using_fileno(trio.lowlevel.wait_writable), + ids=lambda fn: fn.__name__, ) notify_closing_test = pytest.mark.parametrize( - "notify_closing", notify_closing_options, ids=lambda fn: fn.__name__ + "notify_closing", + also_using_fileno(trio.lowlevel.notify_closing), + ids=lambda fn: fn.__name__, ) @@ -81,7 +89,9 @@ def fileno_wrapper(fileobj): # momentarily and then immediately resuming. @read_socket_test @write_socket_test -async def test_wait_basic(socketpair, wait_readable, wait_writable): +async def test_wait_basic( + socketpair: SocketPair, wait_readable: WaitSocket, wait_writable: WaitSocket +) -> None: a, b = socketpair # They start out writable() @@ -91,7 +101,7 @@ async def test_wait_basic(socketpair, wait_readable, wait_writable): # But readable() blocks until data arrives record = [] - async def block_on_read(): + async def block_on_read() -> None: try: with assert_checkpoints(): await wait_readable(a) @@ -114,7 +124,7 @@ async def block_on_read(): await wait_readable(b) record = [] - async def block_on_write(): + async def block_on_write() -> None: try: with assert_checkpoints(): await wait_writable(a) @@ -147,7 +157,7 @@ async def block_on_write(): @read_socket_test -async def test_double_read(socketpair, wait_readable): +async def test_double_read(socketpair: SocketPair, wait_readable: WaitSocket) -> None: a, b = socketpair # You can't have two tasks trying to read from a socket at the same time @@ -160,7 +170,7 @@ async def test_double_read(socketpair, wait_readable): @write_socket_test -async def test_double_write(socketpair, wait_writable): +async def test_double_write(socketpair: SocketPair, wait_writable: WaitSocket) -> None: a, b = socketpair # You can't have two tasks trying to write to a socket at the same time @@ -177,15 +187,18 @@ async def test_double_write(socketpair, wait_writable): @write_socket_test @notify_closing_test async def test_interrupted_by_close( - socketpair, wait_readable, wait_writable, notify_closing -): + socketpair: SocketPair, + wait_readable: WaitSocket, + wait_writable: WaitSocket, + notify_closing: Callable[[stdlib_socket.socket], object], +) -> None: a, b = socketpair - async def reader(): + async def reader() -> None: with pytest.raises(_core.ClosedResourceError): await wait_readable(a) - async def writer(): + async def writer() -> None: with pytest.raises(_core.ClosedResourceError): await wait_writable(a) @@ -200,14 +213,16 @@ async def writer(): @read_socket_test @write_socket_test -async def test_socket_simultaneous_read_write(socketpair, wait_readable, wait_writable): - record = [] +async def test_socket_simultaneous_read_write( + socketpair: SocketPair, wait_readable: WaitSocket, wait_writable: WaitSocket +) -> None: + record: list[str] = [] - async def r_task(sock): + async def r_task(sock: stdlib_socket.socket) -> None: await wait_readable(sock) record.append("r_task") - async def w_task(sock): + async def w_task(sock: stdlib_socket.socket) -> None: await wait_writable(sock) record.append("w_task") @@ -228,7 +243,9 @@ async def w_task(sock): @read_socket_test @write_socket_test -async def test_socket_actual_streaming(socketpair, wait_readable, wait_writable): +async def test_socket_actual_streaming( + socketpair: SocketPair, wait_readable: WaitSocket, wait_writable: WaitSocket +) -> None: a, b = socketpair # Use a small send buffer on one of the sockets to increase the chance of @@ -238,9 +255,9 @@ async def test_socket_actual_streaming(socketpair, wait_readable, wait_writable) N = 1000000 # 1 megabyte MAX_CHUNK = 65536 - results = {} + results: dict[str, int] = {} - async def sender(sock, seed, key): + async def sender(sock: stdlib_socket.socket, seed: int, key: str) -> None: r = random.Random(seed) sent = 0 while sent < N: @@ -255,7 +272,7 @@ async def sender(sock, seed, key): sock.shutdown(stdlib_socket.SHUT_WR) results[key] = sent - async def receiver(sock, key): + async def receiver(sock: stdlib_socket.socket, key: str) -> None: received = 0 while True: print("received", received) @@ -277,7 +294,7 @@ async def receiver(sock, key): assert results["send_b"] == results["recv_a"] -async def test_notify_closing_on_invalid_object(): +async def test_notify_closing_on_invalid_object() -> None: # It should either be a no-op (generally on Unix, where we don't know # which fds are valid), or an OSError (on Windows, where we currently only # support sockets, so we have to do some validation to figure out whether @@ -293,7 +310,7 @@ async def test_notify_closing_on_invalid_object(): assert got_oserror or got_no_error -async def test_wait_on_invalid_object(): +async def test_wait_on_invalid_object() -> None: # We definitely want to raise an error everywhere if you pass in an # invalid fd to wait_* for wait in [trio.lowlevel.wait_readable, trio.lowlevel.wait_writable]: @@ -305,12 +322,12 @@ async def test_wait_on_invalid_object(): await wait(fileno) -async def test_io_manager_statistics(): - def check(*, expected_readers, expected_writers): +async def test_io_manager_statistics() -> None: + def check(*, expected_readers: int, expected_writers: int) -> None: statistics = _core.current_statistics() print(statistics) iostats = statistics.io_statistics - if iostats.backend in ["epoll", "windows"]: + if iostats.backend == "epoll" or iostats.backend == "windows": assert iostats.tasks_waiting_read == expected_readers assert iostats.tasks_waiting_write == expected_writers else: @@ -353,7 +370,7 @@ def check(*, expected_readers, expected_writers): check(expected_readers=1, expected_writers=0) -async def test_can_survive_unnotified_close(): +async def test_can_survive_unnotified_close() -> None: # An "unnotified" close is when the user closes an fd/socket/handle # directly, without calling notify_closing first. This should never happen # -- users should call notify_closing before closing things. But, just in @@ -371,9 +388,13 @@ async def test_can_survive_unnotified_close(): # This test exercises some tricky "unnotified close" scenarios, to make # sure we get the "acceptable" behaviors. - async def allow_OSError(async_func, *args): + async def allow_OSError( + async_func: Callable[ArgsT, Awaitable[object]], + *args: ArgsT.args, + **kwargs: ArgsT.kwargs, + ) -> None: with suppress(OSError): - await async_func(*args) + await async_func(*args, **kwargs) with stdlib_socket.socket() as s: async with trio.open_nursery() as nursery: @@ -431,7 +452,7 @@ async def allow_OSError(async_func, *args): # sleep waiting on 'a2', with the idea that the 'a2' notification will # definitely arrive, and when it does then we can assume that whatever # notification was going to arrive for 'a' has also arrived. - async def wait_readable_a2_then_set(): + async def wait_readable_a2_then_set() -> None: await trio.lowlevel.wait_readable(a2) e.set() diff --git a/trio/_core/_tests/test_run.py b/trio/_core/_tests/test_run.py index 6d34d8f223..f26438b554 100644 --- a/trio/_core/_tests/test_run.py +++ b/trio/_core/_tests/test_run.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import contextvars import functools import gc @@ -6,8 +8,16 @@ import time import types import weakref +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Generator, +) from contextlib import ExitStack from math import inf +from typing import NoReturn, TypeVar import outcome import pytest @@ -33,15 +43,27 @@ from exceptiongroup import ExceptionGroup +T = TypeVar("T") + + # slightly different from _timeouts.sleep_forever because it returns the value # its rescheduled with, which is really only useful for tests of # rescheduling... -async def sleep_forever(): +async def sleep_forever() -> object: return await _core.wait_task_rescheduled(lambda _: _core.Abort.SUCCEEDED) -def test_basic(): - async def trivial(x): +def not_none(x: T | None) -> T: + """Assert that this object is not None. + + This is just to satisfy type checkers, if this ever fails the test is broken. + """ + assert x is not None + return x + + +def test_basic() -> None: + async def trivial(x: T) -> T: return x assert _core.run(trivial, 8) == 8 @@ -52,17 +74,17 @@ async def trivial(x): with pytest.raises(TypeError): # Not an async function - _core.run(lambda: None) + _core.run(lambda: None) # type: ignore - async def trivial2(x): + async def trivial2(x: T) -> T: await _core.checkpoint() return x assert _core.run(trivial2, 1) == 1 -def test_initial_task_error(): - async def main(x): +def test_initial_task_error() -> None: + async def main(x: object) -> NoReturn: raise ValueError(x) with pytest.raises(ValueError) as excinfo: @@ -70,9 +92,9 @@ async def main(x): assert excinfo.value.args == (17,) -def test_run_nesting(): - async def inception(): - async def main(): # pragma: no cover +def test_run_nesting() -> None: + async def inception() -> None: + async def main() -> None: # pragma: no cover pass return _core.run(main) @@ -82,10 +104,10 @@ async def main(): # pragma: no cover assert "from inside" in str(excinfo.value) -async def test_nursery_warn_use_async_with(): +async def test_nursery_warn_use_async_with() -> None: with pytest.raises(RuntimeError) as excinfo: on = _core.open_nursery() - with on: + with on: # type: ignore pass # pragma: no cover excinfo.match( r"use 'async with open_nursery\(...\)', not 'with open_nursery\(...\)'" @@ -96,7 +118,7 @@ async def test_nursery_warn_use_async_with(): pass -async def test_nursery_main_block_error_basic(): +async def test_nursery_main_block_error_basic() -> None: exc = ValueError("whoops") with pytest.raises(ValueError) as excinfo: @@ -105,10 +127,10 @@ async def test_nursery_main_block_error_basic(): assert excinfo.value is exc -async def test_child_crash_basic(): +async def test_child_crash_basic() -> None: exc = ValueError("uh oh") - async def erroring(): + async def erroring() -> NoReturn: raise exc try: @@ -119,13 +141,13 @@ async def erroring(): assert e is exc -async def test_basic_interleave(): - async def looper(whoami, record): +async def test_basic_interleave() -> None: + async def looper(whoami: str, record: list[tuple[str, int]]) -> None: for i in range(3): record.append((whoami, i)) await _core.checkpoint() - record = [] + record: list[tuple[str, int]] = [] async with _core.open_nursery() as nursery: nursery.start_soon(looper, "a", record) nursery.start_soon(looper, "b", record) @@ -135,10 +157,10 @@ async def looper(whoami, record): ) -def test_task_crash_propagation(): - looper_record = [] +def test_task_crash_propagation() -> None: + looper_record: list[str] = [] - async def looper(): + async def looper() -> None: try: while True: await _core.checkpoint() @@ -146,10 +168,10 @@ async def looper(): print("looper cancelled") looper_record.append("cancelled") - async def crasher(): + async def crasher() -> NoReturn: raise ValueError("argh") - async def main(): + async def main() -> None: async with _core.open_nursery() as nursery: nursery.start_soon(looper) nursery.start_soon(crasher) @@ -161,13 +183,13 @@ async def main(): assert excinfo.value.args == ("argh",) -def test_main_and_task_both_crash(): +def test_main_and_task_both_crash() -> None: # If main crashes and there's also a task crash, then we get both in a # MultiError - async def crasher(): + async def crasher() -> NoReturn: raise ValueError - async def main(): + async def main() -> NoReturn: async with _core.open_nursery() as nursery: nursery.start_soon(crasher) raise KeyError @@ -181,11 +203,11 @@ async def main(): } -def test_two_child_crashes(): - async def crasher(etype): +def test_two_child_crashes() -> None: + async def crasher(etype: type[Exception]) -> NoReturn: raise etype - async def main(): + async def main() -> None: async with _core.open_nursery() as nursery: nursery.start_soon(crasher, KeyError) nursery.start_soon(crasher, ValueError) @@ -198,8 +220,8 @@ async def main(): } -async def test_child_crash_wakes_parent(): - async def crasher(): +async def test_child_crash_wakes_parent() -> None: + async def crasher() -> NoReturn: raise ValueError with pytest.raises(ValueError): @@ -208,11 +230,11 @@ async def crasher(): await sleep_forever() -async def test_reschedule(): - t1 = None - t2 = None +async def test_reschedule() -> None: + t1: _core.Task | None = None + t2: _core.Task | None = None - async def child1(): + async def child1() -> None: nonlocal t1, t2 t1 = _core.current_task() print("child1 start") @@ -220,14 +242,14 @@ async def child1(): print("child1 woke") assert x == 0 print("child1 rescheduling t2") - _core.reschedule(t2, outcome.Error(ValueError())) + _core.reschedule(not_none(t2), outcome.Error(ValueError())) print("child1 exit") - async def child2(): + async def child2() -> None: nonlocal t1, t2 print("child2 start") t2 = _core.current_task() - _core.reschedule(t1, outcome.Value(0)) + _core.reschedule(not_none(t1), outcome.Value(0)) print("child2 sleep") with pytest.raises(ValueError): await sleep_forever() @@ -240,7 +262,7 @@ async def child2(): nursery.start_soon(child2) -async def test_current_time(): +async def test_current_time() -> None: t1 = _core.current_time() # Windows clock is pretty low-resolution -- appveyor tests fail unless we # sleep for a bit here. @@ -249,7 +271,7 @@ async def test_current_time(): assert t1 < t2 -async def test_current_time_with_mock_clock(mock_clock): +async def test_current_time_with_mock_clock(mock_clock: _core.MockClock) -> None: start = mock_clock.current_time() assert mock_clock.current_time() == _core.current_time() assert mock_clock.current_time() == _core.current_time() @@ -257,38 +279,38 @@ async def test_current_time_with_mock_clock(mock_clock): assert start + 3.14 == mock_clock.current_time() == _core.current_time() -async def test_current_clock(mock_clock): +async def test_current_clock(mock_clock: _core.MockClock) -> None: assert mock_clock is _core.current_clock() -async def test_current_task(): +async def test_current_task() -> None: parent_task = _core.current_task() - async def child(): - assert _core.current_task().parent_nursery.parent_task is parent_task + async def child() -> None: + assert not_none(_core.current_task().parent_nursery).parent_task is parent_task async with _core.open_nursery() as nursery: nursery.start_soon(child) -async def test_root_task(): - root = _core.current_root_task() +async def test_root_task() -> None: + root = not_none(_core.current_root_task()) assert root.parent_nursery is root.eventual_parent_nursery is None -def test_out_of_context(): +def test_out_of_context() -> None: with pytest.raises(RuntimeError): _core.current_task() with pytest.raises(RuntimeError): _core.current_time() -async def test_current_statistics(mock_clock): +async def test_current_statistics(mock_clock: _core.MockClock) -> None: # Make sure all the early startup stuff has settled down await wait_all_tasks_blocked() # A child that sticks around to make some interesting stats: - async def child(): + async def child() -> None: try: await sleep_forever() except _core.Cancelled: @@ -335,7 +357,7 @@ async def child(): assert stats.seconds_to_next_deadline == inf -async def test_cancel_scope_repr(mock_clock): +async def test_cancel_scope_repr(mock_clock: _core.MockClock) -> None: scope = _core.CancelScope() assert "unbound" in repr(scope) with scope: @@ -351,8 +373,8 @@ async def test_cancel_scope_repr(mock_clock): assert "exited" in repr(scope) -def test_cancel_points(): - async def main1(): +def test_cancel_points() -> None: + async def main1() -> None: with _core.CancelScope() as scope: await _core.checkpoint_if_cancelled() scope.cancel() @@ -361,7 +383,7 @@ async def main1(): _core.run(main1) - async def main2(): + async def main2() -> None: with _core.CancelScope() as scope: await _core.checkpoint() scope.cancel() @@ -370,7 +392,7 @@ async def main2(): _core.run(main2) - async def main3(): + async def main3() -> None: with _core.CancelScope() as scope: scope.cancel() with pytest.raises(_core.Cancelled): @@ -378,7 +400,7 @@ async def main3(): _core.run(main3) - async def main4(): + async def main4() -> None: with _core.CancelScope() as scope: scope.cancel() await _core.cancel_shielded_checkpoint() @@ -389,7 +411,7 @@ async def main4(): _core.run(main4) -async def test_cancel_edge_cases(): +async def test_cancel_edge_cases() -> None: with _core.CancelScope() as scope: # Two cancels in a row -- idempotent scope.cancel() @@ -407,8 +429,8 @@ async def test_cancel_edge_cases(): await sleep_forever() -async def test_cancel_scope_multierror_filtering(): - async def crasher(): +async def test_cancel_scope_multierror_filtering() -> None: + async def crasher() -> NoReturn: raise KeyError try: @@ -433,7 +455,7 @@ async def crasher(): # nursery block continue propagating to reach the # outer scope. assert len(multi_exc.exceptions) == 5 - summary = {} + summary: dict[type, int] = {} for exc in multi_exc.exceptions: summary.setdefault(type(exc), 0) summary[type(exc)] += 1 @@ -450,13 +472,13 @@ async def crasher(): assert False -async def test_precancelled_task(): +async def test_precancelled_task() -> None: # a task that gets spawned into an already-cancelled nursery should begin # execution (https://github.com/python-trio/trio/issues/41), but get a # cancelled error at its first blocking call. - record = [] + record: list[str] = [] - async def blocker(): + async def blocker() -> None: record.append("started") await sleep_forever() @@ -466,7 +488,7 @@ async def blocker(): assert record == ["started"] -async def test_cancel_shielding(): +async def test_cancel_shielding() -> None: with _core.CancelScope() as outer: with _core.CancelScope() as inner: await _core.checkpoint() @@ -476,7 +498,7 @@ async def test_cancel_shielding(): assert inner.shield is False with pytest.raises(TypeError): - inner.shield = "hello" + inner.shield = "hello" # type: ignore assert inner.shield is False inner.shield = True @@ -507,16 +529,16 @@ async def test_cancel_shielding(): # make sure that cancellation propagates immediately to all children -async def test_cancel_inheritance(): - record = set() +async def test_cancel_inheritance() -> None: + record: set[str] = set() - async def leaf(ident): + async def leaf(ident: str) -> None: try: await sleep_forever() except _core.Cancelled: record.add(ident) - async def worker(ident): + async def worker(ident: str) -> None: async with _core.open_nursery() as nursery: nursery.start_soon(leaf, ident + "-l1") nursery.start_soon(leaf, ident + "-l2") @@ -529,7 +551,7 @@ async def worker(ident): assert record == {"w1-l1", "w1-l2", "w2-l1", "w2-l2"} -async def test_cancel_shield_abort(): +async def test_cancel_shield_abort() -> None: with _core.CancelScope() as outer: async with _core.open_nursery() as nursery: outer.cancel() @@ -538,7 +560,7 @@ async def test_cancel_shield_abort(): # shield, so it manages to get to sleep record = [] - async def sleeper(): + async def sleeper() -> None: record.append("sleeping") try: await sleep_forever() @@ -560,7 +582,7 @@ async def sleeper(): assert record == ["sleeping", "cancelled"] -async def test_basic_timeout(mock_clock): +async def test_basic_timeout(mock_clock: _core.MockClock) -> None: start = _core.current_time() with _core.CancelScope() as scope: assert scope.deadline == inf @@ -597,7 +619,7 @@ async def test_basic_timeout(mock_clock): await _core.checkpoint() -async def test_cancel_scope_nesting(): +async def test_cancel_scope_nesting() -> None: # Nested scopes: if two triggering at once, the outer one wins with _core.CancelScope() as scope1: with _core.CancelScope() as scope2: @@ -636,7 +658,7 @@ async def test_cancel_scope_nesting(): # Regression test for https://github.com/python-trio/trio/issues/1175 -async def test_unshield_while_cancel_propagating(): +async def test_unshield_while_cancel_propagating() -> None: with _core.CancelScope() as outer: with _core.CancelScope() as inner: outer.cancel() @@ -647,8 +669,8 @@ async def test_unshield_while_cancel_propagating(): assert outer.cancelled_caught and not inner.cancelled_caught -async def test_cancel_unbound(): - async def sleep_until_cancelled(scope): +async def test_cancel_unbound() -> None: + async def sleep_until_cancelled(scope: _core.CancelScope) -> None: with scope, fail_after(1): await sleep_forever() @@ -697,7 +719,7 @@ async def sleep_until_cancelled(scope): # Can't enter from multiple tasks simultaneously scope = _core.CancelScope() - async def enter_scope(): + async def enter_scope() -> None: with scope: await sleep_forever() @@ -721,7 +743,7 @@ async def enter_scope(): assert scope.cancel_called # never become un-cancelled -async def test_cancel_scope_misnesting(): +async def test_cancel_scope_misnesting() -> None: outer = _core.CancelScope() inner = _core.CancelScope() with ExitStack() as stack: @@ -733,12 +755,12 @@ async def test_cancel_scope_misnesting(): # If there are other tasks inside the abandoned part of the cancel tree, # they get cancelled when the misnesting is detected - async def task1(): + async def task1() -> None: with pytest.raises(_core.Cancelled): await sleep_forever() # Even if inside another cancel scope - async def task2(): + async def task2() -> None: with _core.CancelScope(): with pytest.raises(_core.Cancelled): await sleep_forever() @@ -777,20 +799,20 @@ async def task2(): # Trying to exit a cancel scope from an unrelated task raises an error # without affecting any state - async def task3(task_status): + async def task3(task_status: _core.TaskStatus[_core.CancelScope]) -> None: with _core.CancelScope() as scope: task_status.started(scope) await sleep_forever() async with _core.open_nursery() as nursery: - scope = await nursery.start(task3) + scope: _core.CancelScope = await nursery.start(task3) with pytest.raises(RuntimeError, match="from unrelated"): scope.__exit__(None, None, None) scope.cancel() @slow -async def test_timekeeping(): +async def test_timekeeping() -> None: # probably a good idea to use a real clock for *one* test anyway... TARGET = 1.0 # give it a few tries in case of random CI server flakiness @@ -810,15 +832,16 @@ async def test_timekeeping(): assert False -async def test_failed_abort(): - stubborn_task = [None] - stubborn_scope = [None] - record = [] +async def test_failed_abort() -> None: + stubborn_task: _core.Task | None = None + stubborn_scope: _core.CancelScope | None = None + record: list[str] = [] - async def stubborn_sleeper(): - stubborn_task[0] = _core.current_task() + async def stubborn_sleeper() -> None: + nonlocal stubborn_task, stubborn_scope + stubborn_task = _core.current_task() with _core.CancelScope() as scope: - stubborn_scope[0] = scope + stubborn_scope = scope record.append("sleep") x = await _core.wait_task_rescheduled(lambda _: _core.Abort.FAILED) assert x == 1 @@ -832,18 +855,18 @@ async def stubborn_sleeper(): nursery.start_soon(stubborn_sleeper) await wait_all_tasks_blocked() assert record == ["sleep"] - stubborn_scope[0].cancel() + not_none(stubborn_scope).cancel() await wait_all_tasks_blocked() # cancel didn't wake it up assert record == ["sleep"] # wake it up again by hand - _core.reschedule(stubborn_task[0], outcome.Value(1)) + _core.reschedule(not_none(stubborn_task), outcome.Value(1)) assert record == ["sleep", "woke", "cancelled"] @restore_unraisablehook() -def test_broken_abort(): - async def main(): +def test_broken_abort() -> None: + async def main() -> None: # These yields are here to work around an annoying warning -- we're # going to crash the main loop, and if we (by chance) do this before # the run_sync_soon task runs for the first time, then Python gives us @@ -857,7 +880,7 @@ async def main(): with _core.CancelScope() as scope: scope.cancel() # None is not a legal return value here - await _core.wait_task_rescheduled(lambda _: None) + await _core.wait_task_rescheduled(lambda _: None) # type: ignore with pytest.raises(_core.TrioInternalError): _core.run(main) @@ -869,11 +892,11 @@ async def main(): @restore_unraisablehook() -def test_error_in_run_loop(): +def test_error_in_run_loop() -> None: # Blow stuff up real good to check we at least get a TrioInternalError - async def main(): + async def main() -> None: task = _core.current_task() - task._schedule_points = "hello!" + task._schedule_points = "hello!" # type: ignore await _core.checkpoint() with ignore_coroutine_never_awaited_warnings(): @@ -881,10 +904,10 @@ async def main(): _core.run(main) -async def test_spawn_system_task(): - record = [] +async def test_spawn_system_task() -> None: + record: list[tuple[str, int]] = [] - async def system_task(x): + async def system_task(x: int) -> None: record.append(("x", x)) record.append(("ki", _core.currently_ki_protected())) await _core.checkpoint() @@ -895,11 +918,11 @@ async def system_task(x): # intentionally make a system task crash -def test_system_task_crash(): - async def crasher(): +def test_system_task_crash() -> None: + async def crasher() -> NoReturn: raise KeyError - async def main(): + async def main() -> None: _core.spawn_system_task(crasher) await sleep_forever() @@ -907,19 +930,19 @@ async def main(): _core.run(main) -def test_system_task_crash_MultiError(): - async def crasher1(): +def test_system_task_crash_MultiError() -> None: + async def crasher1() -> NoReturn: raise KeyError - async def crasher2(): + async def crasher2() -> NoReturn: raise ValueError - async def system_task(): + async def system_task() -> None: async with _core.open_nursery() as nursery: nursery.start_soon(crasher1) nursery.start_soon(crasher2) - async def main(): + async def main() -> None: _core.spawn_system_task(system_task) await sleep_forever() @@ -933,24 +956,24 @@ async def main(): assert isinstance(exc, (KeyError, ValueError)) -def test_system_task_crash_plus_Cancelled(): +def test_system_task_crash_plus_Cancelled() -> None: # Set up a situation where a system task crashes with a # MultiError([Cancelled, ValueError]) - async def crasher(): + async def crasher() -> None: try: await sleep_forever() except _core.Cancelled: raise ValueError - async def cancelme(): + async def cancelme() -> None: await sleep_forever() - async def system_task(): + async def system_task() -> None: async with _core.open_nursery() as nursery: nursery.start_soon(crasher) nursery.start_soon(cancelme) - async def main(): + async def main() -> None: _core.spawn_system_task(system_task) # then we exit, triggering a cancellation @@ -959,11 +982,11 @@ async def main(): assert type(excinfo.value.__cause__) is ValueError -def test_system_task_crash_KeyboardInterrupt(): - async def ki(): +def test_system_task_crash_KeyboardInterrupt() -> None: + async def ki() -> NoReturn: raise KeyboardInterrupt - async def main(): + async def main() -> None: _core.spawn_system_task(ki) await sleep_forever() @@ -981,7 +1004,7 @@ async def main(): # 4) this task has timed out # 5) ...but it's on the run queue, so the timeout is queued to be delivered # the next time that it's blocked. -async def test_yield_briefly_checks_for_timeout(mock_clock): +async def test_yield_briefly_checks_for_timeout(mock_clock: _core.MockClock) -> None: with _core.CancelScope(deadline=_core.current_time() + 5): await _core.checkpoint() with pytest.raises(_core.Cancelled): @@ -995,11 +1018,11 @@ async def test_yield_briefly_checks_for_timeout(mock_clock): # still nice to know that it works :-). # # Update: it turns out I was right to be nervous! see the next test... -async def test_exc_info(): - record = [] +async def test_exc_info() -> None: + record: list[str] = [] seq = Sequencer() - async def child1(): + async def child1() -> None: with pytest.raises(ValueError) as excinfo: try: async with seq(0): @@ -1016,7 +1039,7 @@ async def child1(): assert excinfo.value.__context__ is None record.append("child1 success") - async def child2(): + async def child2() -> None: with pytest.raises(KeyError) as excinfo: async with seq(1): pass # we don't yield until seq(3) below @@ -1056,10 +1079,10 @@ async def child2(): # like re-raising and exception chaining are broken. # # https://bugs.python.org/issue29587 -async def test_exc_info_after_yield_error(): - child_task = None +async def test_exc_info_after_yield_error() -> None: + child_task: _core.Task | None = None - async def child(): + async def child() -> None: nonlocal child_task child_task = _core.current_task() @@ -1076,15 +1099,15 @@ async def child(): async with _core.open_nursery() as nursery: nursery.start_soon(child) await wait_all_tasks_blocked() - _core.reschedule(child_task, outcome.Error(ValueError())) + _core.reschedule(not_none(child_task), outcome.Error(ValueError())) # Similar to previous test -- if the ValueError() gets sent in via 'throw', # then Python's normal implicit chaining stuff is broken. -async def test_exception_chaining_after_yield_error(): - child_task = None +async def test_exception_chaining_after_yield_error() -> None: + child_task: _core.Task | None = None - async def child(): + async def child() -> None: nonlocal child_task child_task = _core.current_task() @@ -1097,13 +1120,13 @@ async def child(): async with _core.open_nursery() as nursery: nursery.start_soon(child) await wait_all_tasks_blocked() - _core.reschedule(child_task, outcome.Error(ValueError())) + _core.reschedule(not_none(child_task), outcome.Error(ValueError())) assert isinstance(excinfo.value.__context__, KeyError) -async def test_nursery_exception_chaining_doesnt_make_context_loops(): - async def crasher(): +async def test_nursery_exception_chaining_doesnt_make_context_loops() -> None: + async def crasher() -> NoReturn: raise KeyError with pytest.raises(MultiError) as excinfo: @@ -1114,8 +1137,8 @@ async def crasher(): assert excinfo.value.__context__ is None -def test_TrioToken_identity(): - async def get_and_check_token(): +def test_TrioToken_identity() -> None: + async def get_and_check_token() -> _core.TrioToken: token = _core.current_trio_token() # Two calls in the same run give the same object assert token is _core.current_trio_token() @@ -1128,10 +1151,10 @@ async def get_and_check_token(): assert hash(t1) != hash(t2) -async def test_TrioToken_run_sync_soon_basic(): - record = [] +async def test_TrioToken_run_sync_soon_basic() -> None: + record: list[tuple[str, int]] = [] - def cb(x): + def cb(x: int) -> None: record.append(("cb", x)) token = _core.current_trio_token() @@ -1141,23 +1164,22 @@ def cb(x): assert record == [("cb", 1)] -def test_TrioToken_run_sync_soon_too_late(): - token = None +def test_TrioToken_run_sync_soon_too_late() -> None: + token: _core.TrioToken | None = None - async def main(): + async def main() -> None: nonlocal token token = _core.current_trio_token() _core.run(main) - assert token is not None with pytest.raises(_core.RunFinishedError): - token.run_sync_soon(lambda: None) # pragma: no branch + not_none(token).run_sync_soon(lambda: None) # pragma: no branch -async def test_TrioToken_run_sync_soon_idempotent(): - record = [] +async def test_TrioToken_run_sync_soon_idempotent() -> None: + record: list[int] = [] - def cb(x): + def cb(x: int) -> None: record.append(x) token = _core.current_trio_token() @@ -1181,21 +1203,21 @@ def cb(x): assert record == list(range(100)) -def test_TrioToken_run_sync_soon_idempotent_requeue(): +def test_TrioToken_run_sync_soon_idempotent_requeue() -> None: # We guarantee that if a call has finished, queueing it again will call it # again. Due to the lack of synchronization, this effectively means that # we have to guarantee that once a call has *started*, queueing it again # will call it again. Also this is much easier to test :-) - record = [] + record: list[None] = [] - def redo(token): + def redo(token: _core.TrioToken) -> None: record.append(None) try: token.run_sync_soon(redo, token, idempotent=True) except _core.RunFinishedError: pass - async def main(): + async def main() -> None: token = _core.current_trio_token() token.run_sync_soon(redo, token, idempotent=True) await _core.checkpoint() @@ -1207,10 +1229,10 @@ async def main(): assert len(record) >= 2 -def test_TrioToken_run_sync_soon_after_main_crash(): - record = [] +def test_TrioToken_run_sync_soon_after_main_crash() -> None: + record: list[str] = [] - async def main(): + async def main() -> None: token = _core.current_trio_token() # After main exits but before finally cleaning up, callback processed # normally @@ -1223,12 +1245,12 @@ async def main(): assert record == ["sync-cb"] -def test_TrioToken_run_sync_soon_crashes(): - record = set() +def test_TrioToken_run_sync_soon_crashes() -> None: + record: set[str] = set() - async def main(): + async def main() -> None: token = _core.current_trio_token() - token.run_sync_soon(lambda: dict()["nope"]) + token.run_sync_soon(lambda: {}["nope"]) # type: ignore[index] # check that a crashing run_sync_soon callback doesn't stop further # calls to run_sync_soon token.run_sync_soon(lambda: record.add("2nd run_sync_soon ran")) @@ -1244,7 +1266,7 @@ async def main(): assert record == {"2nd run_sync_soon ran", "cancelled!"} -async def test_TrioToken_run_sync_soon_FIFO(): +async def test_TrioToken_run_sync_soon_FIFO() -> None: N = 100 record = [] token = _core.current_trio_token() @@ -1254,43 +1276,42 @@ async def test_TrioToken_run_sync_soon_FIFO(): assert record == list(range(N)) -def test_TrioToken_run_sync_soon_starvation_resistance(): +def test_TrioToken_run_sync_soon_starvation_resistance() -> None: # Even if we push callbacks in from callbacks, so that the callback queue # never empties out, then we still can't starve out other tasks from # running. - token = None - record = [] + token: _core.TrioToken | None = None + record: list[tuple[str, int]] = [] - def naughty_cb(i): - nonlocal token + def naughty_cb(i: int) -> None: try: - token.run_sync_soon(naughty_cb, i + 1) + not_none(token).run_sync_soon(naughty_cb, i + 1) except _core.RunFinishedError: record.append(("run finished", i)) - async def main(): + async def main() -> None: nonlocal token token = _core.current_trio_token() token.run_sync_soon(naughty_cb, 0) - record.append("starting") + record.append(("starting", 0)) for _ in range(20): await _core.checkpoint() _core.run(main) assert len(record) == 2 - assert record[0] == "starting" + assert record[0] == ("starting", 0) assert record[1][0] == "run finished" assert record[1][1] >= 19 -def test_TrioToken_run_sync_soon_threaded_stress_test(): +def test_TrioToken_run_sync_soon_threaded_stress_test() -> None: cb_counter = 0 - def cb(): + def cb() -> None: nonlocal cb_counter cb_counter += 1 - def stress_thread(token): + def stress_thread(token: _core.TrioToken) -> None: try: while True: token.run_sync_soon(cb) @@ -1298,7 +1319,7 @@ def stress_thread(token): except _core.RunFinishedError: pass - async def main(): + async def main() -> None: token = _core.current_trio_token() thread = threading.Thread(target=stress_thread, args=(token,)) thread.start() @@ -1311,7 +1332,7 @@ async def main(): print(cb_counter) -async def test_TrioToken_run_sync_soon_massive_queue(): +async def test_TrioToken_run_sync_soon_massive_queue() -> None: # There are edge cases in the wakeup fd code when the wakeup fd overflows, # so let's try to make that happen. This is also just a good stress test # in general. (With the current-as-of-2017-02-14 code using a socketpair @@ -1322,7 +1343,7 @@ async def test_TrioToken_run_sync_soon_massive_queue(): token = _core.current_trio_token() counter = [0] - def cb(i): + def cb(i: int) -> None: # This also tests FIFO ordering of callbacks assert counter[0] == i counter[0] += 1 @@ -1334,21 +1355,21 @@ def cb(i): @pytest.mark.skipif(buggy_pypy_asyncgens, reason="PyPy 7.2 is buggy") -def test_TrioToken_run_sync_soon_late_crash(): +def test_TrioToken_run_sync_soon_late_crash() -> None: # Crash after system nursery is closed -- easiest way to do that is # from an async generator finalizer. - record = [] - saved = [] + record: list[str] = [] + saved: list[AsyncGenerator[int, None]] = [] - async def agen(): + async def agen() -> AsyncGenerator[int, None]: token = _core.current_trio_token() try: yield 1 finally: - token.run_sync_soon(lambda: {}["nope"]) + token.run_sync_soon(lambda: {}["nope"]) # type: ignore[index] token.run_sync_soon(lambda: record.append("2nd ran")) - async def main(): + async def main() -> None: saved.append(agen()) await saved[-1].asend(None) record.append("main exiting") @@ -1360,14 +1381,14 @@ async def main(): assert record == ["main exiting", "2nd ran"] -async def test_slow_abort_basic(): +async def test_slow_abort_basic() -> None: with _core.CancelScope() as scope: scope.cancel() with pytest.raises(_core.Cancelled): task = _core.current_task() token = _core.current_trio_token() - def slow_abort(raise_cancel): + def slow_abort(raise_cancel: _core.RaiseCancelT) -> _core.Abort: result = outcome.capture(raise_cancel) token.run_sync_soon(_core.reschedule, task, result) return _core.Abort.FAILED @@ -1375,14 +1396,14 @@ def slow_abort(raise_cancel): await _core.wait_task_rescheduled(slow_abort) -async def test_slow_abort_edge_cases(): - record = [] +async def test_slow_abort_edge_cases() -> None: + record: list[str] = [] - async def slow_aborter(): + async def slow_aborter() -> None: task = _core.current_task() token = _core.current_trio_token() - def slow_abort(raise_cancel): + def slow_abort(raise_cancel: _core.RaiseCancelT) -> _core.Abort: record.append("abort-called") result = outcome.capture(raise_cancel) token.run_sync_soon(_core.reschedule, task, result) @@ -1418,11 +1439,13 @@ def slow_abort(raise_cancel): assert record == ["sleeping", "abort-called", "cancelled", "done"] -async def test_task_tree_introspection(): - tasks = {} - nurseries = {} +async def test_task_tree_introspection() -> None: + tasks: dict[str, _core.Task] = {} + nurseries: dict[str, _core.Nursery] = {} - async def parent(task_status=_core.TASK_STATUS_IGNORED): + async def parent( + task_status: _core.TaskStatus[None] = _core.TASK_STATUS_IGNORED, + ) -> None: tasks["parent"] = _core.current_task() assert tasks["parent"].child_nurseries == [] @@ -1433,6 +1456,7 @@ async def parent(task_status=_core.TASK_STATUS_IGNORED): assert tasks["parent"].child_nurseries == [] + nursery: _core.Nursery | None async with _core.open_nursery() as nursery: nurseries["parent"] = nursery await nursery.start(child1) @@ -1450,7 +1474,7 @@ async def parent(task_status=_core.TASK_STATUS_IGNORED): t = nursery.parent_task nursery = t.parent_nursery - async def child2(): + async def child2() -> None: tasks["child2"] = _core.current_task() assert tasks["parent"].child_nurseries == [nurseries["parent"]] assert nurseries["parent"].child_tasks == frozenset({tasks["child1"]}) @@ -1458,9 +1482,11 @@ async def child2(): assert nurseries["child1"].child_tasks == frozenset({tasks["child2"]}) assert tasks["child2"].child_nurseries == [] - async def child1(task_status=_core.TASK_STATUS_IGNORED): + async def child1( + task_status: _core.TaskStatus[None] = _core.TASK_STATUS_IGNORED, + ) -> None: me = tasks["child1"] = _core.current_task() - assert me.parent_nursery.parent_task is tasks["parent"] + assert not_none(me.parent_nursery).parent_task is tasks["parent"] assert me.parent_nursery is not nurseries["parent"] assert me.eventual_parent_nursery is nurseries["parent"] task_status.started() @@ -1484,13 +1510,13 @@ async def child1(task_status=_core.TASK_STATUS_IGNORED): assert task.eventual_parent_nursery is None -async def test_nursery_closure(): - async def child1(nursery): +async def test_nursery_closure() -> None: + async def child1(nursery: _core.Nursery) -> None: # We can add new tasks to the nursery even after entering __aexit__, # so long as there are still tasks running nursery.start_soon(child2) - async def child2(): + async def child2() -> None: pass async with _core.open_nursery() as nursery: @@ -1501,24 +1527,27 @@ async def child2(): nursery.start_soon(child2) -async def test_spawn_name(): - async def func1(expected): +async def test_spawn_name() -> None: + async def func1(expected: str) -> None: task = _core.current_task() assert expected in task.name - async def func2(): # pragma: no cover + async def func2() -> None: # pragma: no cover pass + async def check(spawn_fn: Callable[..., object]) -> None: + spawn_fn(func1, "func1") + spawn_fn(func1, "func2", name=func2) + spawn_fn(func1, "func3", name="func3") + spawn_fn(functools.partial(func1, "func1")) + spawn_fn(func1, "object", name=object()) + async with _core.open_nursery() as nursery: - for spawn_fn in [nursery.start_soon, _core.spawn_system_task]: - spawn_fn(func1, "func1") - spawn_fn(func1, "func2", name=func2) - spawn_fn(func1, "func3", name="func3") - spawn_fn(functools.partial(func1, "func1")) - spawn_fn(func1, "object", name=object()) + await check(nursery.start_soon) + await check(_core.spawn_system_task) -async def test_current_effective_deadline(mock_clock): +async def test_current_effective_deadline(mock_clock: _core.MockClock) -> None: assert _core.current_effective_deadline() == inf with _core.CancelScope(deadline=5) as scope1: @@ -1540,39 +1569,45 @@ async def test_current_effective_deadline(mock_clock): assert _core.current_effective_deadline() == inf -def test_nice_error_on_bad_calls_to_run_or_spawn(): - def bad_call_run(*args): - _core.run(*args) +def test_nice_error_on_bad_calls_to_run_or_spawn() -> None: + def bad_call_run( + func: Callable[..., Awaitable[object]], + *args: tuple[object, ...], + ) -> None: + _core.run(func, *args) - def bad_call_spawn(*args): - async def main(): + def bad_call_spawn( + func: Callable[..., Awaitable[object]], + *args: tuple[object, ...], + ) -> None: + async def main() -> None: async with _core.open_nursery() as nursery: - nursery.start_soon(*args) + nursery.start_soon(func, *args) _core.run(main) for bad_call in bad_call_run, bad_call_spawn: - async def f(): # pragma: no cover + async def f() -> None: # pragma: no cover pass with pytest.raises(TypeError, match="expecting an async function"): - bad_call(f()) + bad_call(f()) # type: ignore[arg-type] - async def async_gen(arg): # pragma: no cover + async def async_gen(arg: T) -> AsyncGenerator[T, None]: # pragma: no cover yield arg with pytest.raises( TypeError, match="expected an async function but got an async generator" ): - bad_call(async_gen, 0) + bad_call(async_gen, 0) # type: ignore -def test_calling_asyncio_function_gives_nice_error(): - async def child_xyzzy(): +def test_calling_asyncio_function_gives_nice_error() -> None: + async def child_xyzzy() -> None: await create_asyncio_future_in_new_loop() - async def misguided(): + async def misguided() -> None: await child_xyzzy() with pytest.raises(TypeError) as excinfo: @@ -1585,7 +1620,7 @@ async def misguided(): ) -async def test_asyncio_function_inside_nursery_does_not_explode(): +async def test_asyncio_function_inside_nursery_does_not_explode() -> None: # Regression test for https://github.com/python-trio/trio/issues/552 with pytest.raises(TypeError) as excinfo: async with _core.open_nursery() as nursery: @@ -1594,7 +1629,7 @@ async def test_asyncio_function_inside_nursery_does_not_explode(): assert "asyncio" in str(excinfo.value) -async def test_trivial_yields(): +async def test_trivial_yields() -> None: with assert_checkpoints(): await _core.checkpoint() @@ -1618,8 +1653,8 @@ async def test_trivial_yields(): } -async def test_nursery_start(autojump_clock): - async def no_args(): # pragma: no cover +async def test_nursery_start(autojump_clock: _core.MockClock) -> None: + async def no_args() -> None: # pragma: no cover pass # Errors in calling convention get raised immediately from start @@ -1627,7 +1662,9 @@ async def no_args(): # pragma: no cover with pytest.raises(TypeError): await nursery.start(no_args) - async def sleep_then_start(seconds, *, task_status=_core.TASK_STATUS_IGNORED): + async def sleep_then_start( + seconds: int, *, task_status: _core.TaskStatus[int] = _core.TASK_STATUS_IGNORED + ) -> None: repr(task_status) # smoke test await sleep(seconds) task_status.started(seconds) @@ -1652,7 +1689,9 @@ async def sleep_then_start(seconds, *, task_status=_core.TASK_STATUS_IGNORED): assert _core.current_time() - t0 == 2 * 3 # calling started twice - async def double_started(task_status=_core.TASK_STATUS_IGNORED): + async def double_started( + task_status: _core.TaskStatus[None] = _core.TASK_STATUS_IGNORED, + ) -> None: task_status.started() with pytest.raises(RuntimeError): task_status.started() @@ -1661,7 +1700,9 @@ async def double_started(task_status=_core.TASK_STATUS_IGNORED): await nursery.start(double_started) # child crashes before calling started -> error comes out of .start() - async def raise_keyerror(task_status=_core.TASK_STATUS_IGNORED): + async def raise_keyerror( + task_status: _core.TaskStatus[None] = _core.TASK_STATUS_IGNORED, + ) -> None: raise KeyError("oops") async with _core.open_nursery() as nursery: @@ -1669,18 +1710,22 @@ async def raise_keyerror(task_status=_core.TASK_STATUS_IGNORED): await nursery.start(raise_keyerror) # child exiting cleanly before calling started -> triggers a RuntimeError - async def nothing(task_status=_core.TASK_STATUS_IGNORED): + async def nothing( + task_status: _core.TaskStatus[None] = _core.TASK_STATUS_IGNORED, + ) -> None: return async with _core.open_nursery() as nursery: - with pytest.raises(RuntimeError) as excinfo: + with pytest.raises(RuntimeError) as excinfo1: await nursery.start(nothing) - assert "exited without calling" in str(excinfo.value) + assert "exited without calling" in str(excinfo1.value) # if the call to start() is cancelled, then the call to started() does # nothing -- the child keeps executing under start(). The value it passed # is ignored; start() raises Cancelled. - async def just_started(task_status=_core.TASK_STATUS_IGNORED): + async def just_started( + task_status: _core.TaskStatus[str] = _core.TASK_STATUS_IGNORED, + ) -> None: task_status.started("hi") async with _core.open_nursery() as nursery: @@ -1691,16 +1736,18 @@ async def just_started(task_status=_core.TASK_STATUS_IGNORED): # and if after the no-op started(), the child crashes, the error comes out # of start() - async def raise_keyerror_after_started(task_status=_core.TASK_STATUS_IGNORED): + async def raise_keyerror_after_started( + *, task_status: _core.TaskStatus[None] = _core.TASK_STATUS_IGNORED + ) -> None: task_status.started() raise KeyError("whoopsiedaisy") async with _core.open_nursery() as nursery: with _core.CancelScope() as cs: cs.cancel() - with pytest.raises(MultiError) as excinfo: + with pytest.raises(MultiError) as excinfo2: await nursery.start(raise_keyerror_after_started) - assert {type(e) for e in excinfo.value.exceptions} == { + assert {type(e) for e in excinfo2.value.exceptions} == { _core.Cancelled, KeyError, } @@ -1714,7 +1761,7 @@ async def raise_keyerror_after_started(task_status=_core.TASK_STATUS_IGNORED): assert _core.current_time() == t0 -async def test_task_nursery_stack(): +async def test_task_nursery_stack() -> None: task = _core.current_task() assert task._child_nurseries == [] async with _core.open_nursery() as nursery1: @@ -1727,10 +1774,12 @@ async def test_task_nursery_stack(): assert task._child_nurseries == [] -async def test_nursery_start_with_cancelled_nursery(): +async def test_nursery_start_with_cancelled_nursery() -> None: # This function isn't testing task_status, it's using task_status as a # convenient way to get a nursery that we can test spawning stuff into. - async def setup_nursery(task_status=_core.TASK_STATUS_IGNORED): + async def setup_nursery( + task_status: _core.TaskStatus[_core.Nursery] = _core.TASK_STATUS_IGNORED, + ) -> None: async with _core.open_nursery() as nursery: task_status.started(nursery) await sleep_forever() @@ -1738,7 +1787,11 @@ async def setup_nursery(task_status=_core.TASK_STATUS_IGNORED): # Calls started() while children are asleep, so we can make sure # that the cancellation machinery notices and aborts when a sleeping task # is moved into a cancelled scope. - async def sleeping_children(fn, *, task_status=_core.TASK_STATUS_IGNORED): + async def sleeping_children( + fn: Callable[[], object], + *, + task_status: _core.TaskStatus[None] = _core.TASK_STATUS_IGNORED, + ) -> None: async with _core.open_nursery() as nursery: nursery.start_soon(sleep_forever) nursery.start_soon(sleep_forever) @@ -1748,7 +1801,7 @@ async def sleeping_children(fn, *, task_status=_core.TASK_STATUS_IGNORED): # Cancelling the setup_nursery just *before* calling started() async with _core.open_nursery() as nursery: - target_nursery = await nursery.start(setup_nursery) + target_nursery: _core.Nursery = await nursery.start(setup_nursery) await target_nursery.start( sleeping_children, target_nursery.cancel_scope.cancel ) @@ -1760,8 +1813,12 @@ async def sleeping_children(fn, *, task_status=_core.TASK_STATUS_IGNORED): target_nursery.cancel_scope.cancel() -async def test_nursery_start_keeps_nursery_open(autojump_clock): - async def sleep_a_bit(task_status=_core.TASK_STATUS_IGNORED): +async def test_nursery_start_keeps_nursery_open( + autojump_clock: _core.MockClock, +) -> None: + async def sleep_a_bit( + task_status: _core.TaskStatus[None] = _core.TASK_STATUS_IGNORED, + ) -> None: await sleep(2) task_status.started() await sleep(3) @@ -1783,11 +1840,13 @@ async def sleep_a_bit(task_status=_core.TASK_STATUS_IGNORED): # Check that it still works even if the task that the nursery is waiting # for ends up crashing, and never actually enters the nursery. - async def sleep_then_crash(task_status=_core.TASK_STATUS_IGNORED): + async def sleep_then_crash( + task_status: _core.TaskStatus[None] = _core.TASK_STATUS_IGNORED, + ) -> None: await sleep(7) raise KeyError - async def start_sleep_then_crash(nursery): + async def start_sleep_then_crash(nursery: _core.Nursery) -> None: with pytest.raises(KeyError): await nursery.start(sleep_then_crash) @@ -1799,14 +1858,14 @@ async def start_sleep_then_crash(nursery): assert _core.current_time() - t0 == 7 -async def test_nursery_explicit_exception(): +async def test_nursery_explicit_exception() -> None: with pytest.raises(KeyError): async with _core.open_nursery(): raise KeyError() -async def test_nursery_stop_iteration(): - async def fail(): +async def test_nursery_stop_iteration() -> None: + async def fail() -> NoReturn: raise ValueError try: @@ -1817,13 +1876,13 @@ async def fail(): assert tuple(map(type, e.exceptions)) == (StopIteration, ValueError) -async def test_nursery_stop_async_iteration(): +async def test_nursery_stop_async_iteration() -> None: class it: - def __init__(self, count): + def __init__(self, count: int): self.count = count self.val = 0 - async def __anext__(self): + async def __anext__(self) -> int: await sleep(0) val = self.val if val >= self.count: @@ -1832,18 +1891,20 @@ async def __anext__(self): return val class async_zip: - def __init__(self, *largs): + def __init__(self, *largs: it): self.nexts = [obj.__anext__ for obj in largs] - async def _accumulate(self, f, items, i): + async def _accumulate( + self, f: Callable[[], Awaitable[int]], items: list[int | None], i: int + ) -> None: items[i] = await f() - def __aiter__(self): + def __aiter__(self) -> async_zip: return self - async def __anext__(self): + async def __anext__(self) -> list[int]: nexts = self.nexts - items = [None] * len(nexts) + items: list[int] = [-1] * len(nexts) async with _core.open_nursery() as nursery: for i, f in enumerate(nexts): @@ -1851,14 +1912,14 @@ async def __anext__(self): return items - result = [] + result: list[list[int]] = [] async for vals in async_zip(it(4), it(2)): result.append(vals) assert result == [[0, 0], [1, 1]] -async def test_traceback_frame_removal(): - async def my_child_task(): +async def test_traceback_frame_removal() -> None: + async def my_child_task() -> NoReturn: raise KeyError() try: @@ -1877,17 +1938,18 @@ async def my_child_task(): # task, not trio/contextvars internals. And there's only one frame # inside the child task, so this will also detect if our frame-removal # is too eager. - frame = first_exc.__traceback__.tb_frame - assert frame.f_code is my_child_task.__code__ + tb = first_exc.__traceback__ + assert tb is not None + assert tb.tb_frame.f_code is my_child_task.__code__ -def test_contextvar_support(): - var = contextvars.ContextVar("test") +def test_contextvar_support() -> None: + var: contextvars.ContextVar[str] = contextvars.ContextVar("test") var.set("before") assert var.get() == "before" - async def inner(): + async def inner() -> None: task = _core.current_task() assert task.context.get(var) == "before" assert var.get() == "before" @@ -1900,15 +1962,15 @@ async def inner(): assert var.get() == "before" -async def test_contextvar_multitask(): +async def test_contextvar_multitask() -> None: var = contextvars.ContextVar("test", default="hmmm") - async def t1(): + async def t1() -> None: assert var.get() == "hmmm" var.set("hmmmm") assert var.get() == "hmmmm" - async def t2(): + async def t2() -> None: assert var.get() == "hmmmm" async with _core.open_nursery() as n: @@ -1920,17 +1982,17 @@ async def t2(): await wait_all_tasks_blocked() -def test_system_task_contexts(): - cvar = contextvars.ContextVar("qwilfish") +def test_system_task_contexts() -> None: + cvar: contextvars.ContextVar[str] = contextvars.ContextVar("qwilfish") cvar.set("water") - async def system_task(): + async def system_task() -> None: assert cvar.get() == "water" - async def regular_task(): + async def regular_task() -> None: assert cvar.get() == "poison" - async def inner(): + async def inner() -> None: async with _core.open_nursery() as nursery: cvar.set("poison") nursery.start_soon(regular_task) @@ -1940,25 +2002,28 @@ async def inner(): _core.run(inner) -def test_Nursery_init(): +async def test_Nursery_init() -> None: + """Test that nurseries cannot be constructed directly.""" + # This function is async so that we have access to a task object we can + # pass in. It should never be accessed though. + task = _core.current_task() + scope = _core.CancelScope() with pytest.raises(TypeError): - _core._run.Nursery(None, None) + _core._run.Nursery(task, scope, True) -async def test_Nursery_private_init(): +async def test_Nursery_private_init() -> None: # context manager creation should not raise async with _core.open_nursery() as nursery: assert False == nursery._closed -def test_Nursery_subclass(): +def test_Nursery_subclass() -> None: with pytest.raises(TypeError): - - class Subclass(_core._run.Nursery): # type: ignore[misc] - pass + type("Subclass", (_core._run.Nursery,), {}) -def test_Cancelled_init(): +def test_Cancelled_init() -> None: with pytest.raises(TypeError): raise _core.Cancelled @@ -1969,33 +2034,29 @@ def test_Cancelled_init(): _core.Cancelled._create() -def test_Cancelled_str(): +def test_Cancelled_str() -> None: cancelled = _core.Cancelled._create() assert str(cancelled) == "Cancelled" -def test_Cancelled_subclass(): +def test_Cancelled_subclass() -> None: with pytest.raises(TypeError): - - class Subclass(_core.Cancelled): - pass + type("Subclass", (_core.Cancelled,), {}) -def test_CancelScope_subclass(): +def test_CancelScope_subclass() -> None: with pytest.raises(TypeError): + type("Subclass", (_core.CancelScope,), {}) - class Subclass(_core.CancelScope): # type: ignore[misc] - pass - -def test_sniffio_integration(): +def test_sniffio_integration() -> None: with pytest.raises(sniffio.AsyncLibraryNotFoundError): sniffio.current_async_library() - async def check_inside_trio(): + async def check_inside_trio() -> None: assert sniffio.current_async_library() == "trio" - def check_function_returning_coroutine(): + def check_function_returning_coroutine() -> Awaitable[object]: assert sniffio.current_async_library() == "trio" return check_inside_trio() @@ -2004,7 +2065,7 @@ def check_function_returning_coroutine(): with pytest.raises(sniffio.AsyncLibraryNotFoundError): sniffio.current_async_library() - async def check_new_task_resets_sniffio_library(): + async def check_new_task_resets_sniffio_library() -> None: sniffio.current_async_library_cvar.set("nullio") _core.spawn_system_task(check_inside_trio) async with _core.open_nursery() as nursery: @@ -2015,7 +2076,7 @@ async def check_new_task_resets_sniffio_library(): _core.run(check_new_task_resets_sniffio_library) -async def test_Task_custom_sleep_data(): +async def test_Task_custom_sleep_data() -> None: task = _core.current_task() assert task.custom_sleep_data is None task.custom_sleep_data = 1 @@ -2025,15 +2086,18 @@ async def test_Task_custom_sleep_data(): @types.coroutine -def async_yield(value): +def async_yield(value: T) -> Generator[T, None, None]: yield value -async def test_permanently_detach_coroutine_object(): - task = None - pdco_outcome = None +async def test_permanently_detach_coroutine_object() -> None: + task: _core.Task | None = None + pdco_outcome: outcome.Outcome[str] | None = None - async def detachable_coroutine(task_outcome, yield_value): + async def detachable_coroutine( + task_outcome: outcome.Outcome[None], + yield_value: object, + ) -> None: await sleep(0) nonlocal task, pdco_outcome task = _core.current_task() @@ -2048,10 +2112,10 @@ async def detachable_coroutine(task_outcome, yield_value): # If we get here then Trio thinks the task has exited... but the coroutine # is still iterable assert pdco_outcome is None - assert task.coro.send("be free!") == "I'm free!" + assert not_none(task).coro.send("be free!") == "I'm free!" assert pdco_outcome == outcome.Value("be free!") with pytest.raises(StopIteration): - task.coro.send(None) + not_none(task).coro.send(None) # Check the exception paths too task = None @@ -2060,12 +2124,13 @@ async def detachable_coroutine(task_outcome, yield_value): async with _core.open_nursery() as nursery: nursery.start_soon(detachable_coroutine, outcome.Error(KeyError()), "uh oh") throw_in = ValueError() - assert task.coro.throw(throw_in) == "uh oh" + assert isinstance(task, _core.Task) # For type checkers. + assert not_none(task).coro.throw(throw_in) == "uh oh" assert pdco_outcome == outcome.Error(throw_in) with pytest.raises(StopIteration): task.coro.send(None) - async def bad_detach(): + async def bad_detach() -> None: async with _core.open_nursery(): with pytest.raises(RuntimeError) as excinfo: await _core.permanently_detach_coroutine_object(outcome.Value(None)) @@ -2075,21 +2140,21 @@ async def bad_detach(): nursery.start_soon(bad_detach) -async def test_detach_and_reattach_coroutine_object(): - unrelated_task = None - task = None +async def test_detach_and_reattach_coroutine_object() -> None: + unrelated_task: _core.Task | None = None + task: _core.Task | None = None - async def unrelated_coroutine(): + async def unrelated_coroutine() -> None: nonlocal unrelated_task unrelated_task = _core.current_task() - async def reattachable_coroutine(): + async def reattachable_coroutine() -> None: + nonlocal task await sleep(0) - nonlocal task task = _core.current_task() - def abort_fn(_): # pragma: no cover + def abort_fn(_: _core.RaiseCancelT) -> _core.Abort: # pragma: no cover return _core.Abort.FAILED got = await _core.temporarily_detach_coroutine_object(abort_fn) @@ -2099,7 +2164,9 @@ def abort_fn(_): # pragma: no cover await async_yield(2) with pytest.raises(RuntimeError) as excinfo: - await _core.reattach_detached_coroutine_object(unrelated_task, None) + await _core.reattach_detached_coroutine_object( + not_none(unrelated_task), None + ) assert "does not match" in str(excinfo.value) await _core.reattach_detached_coroutine_object(task, "byebye") @@ -2110,28 +2177,26 @@ def abort_fn(_): # pragma: no cover nursery.start_soon(unrelated_coroutine) nursery.start_soon(reattachable_coroutine) await wait_all_tasks_blocked() - assert unrelated_task is not None - assert task is not None # Okay, it's detached. Here's our coroutine runner: - assert task.coro.send("not trio!") == 1 - assert task.coro.send(None) == 2 - assert task.coro.send(None) == "byebye" + assert not_none(task).coro.send("not trio!") == 1 + assert not_none(task).coro.send(None) == 2 + assert not_none(task).coro.send(None) == "byebye" # Now it's been reattached, and we can leave the nursery -async def test_detached_coroutine_cancellation(): +async def test_detached_coroutine_cancellation() -> None: abort_fn_called = False - task = None + task: _core.Task | None = None - async def reattachable_coroutine(): + async def reattachable_coroutine() -> None: await sleep(0) nonlocal task task = _core.current_task() - def abort_fn(_): + def abort_fn(_: _core.RaiseCancelT) -> _core.Abort: nonlocal abort_fn_called abort_fn_called = True return _core.Abort.FAILED @@ -2152,22 +2217,22 @@ def abort_fn(_): @restore_unraisablehook() -def test_async_function_implemented_in_C(): +def test_async_function_implemented_in_C() -> None: # These used to crash because we'd try to mutate the coroutine object's # cr_frame, but C functions don't have Python frames. - async def agen_fn(record): + async def agen_fn(record: list[str]) -> AsyncIterator[None]: assert not _core.currently_ki_protected() record.append("the generator ran") yield - run_record = [] + run_record: list[str] = [] agen = agen_fn(run_record) _core.run(agen.__anext__) assert run_record == ["the generator ran"] - async def main(): - start_soon_record = [] + async def main() -> None: + start_soon_record: list[str] = [] agen = agen_fn(start_soon_record) async with _core.open_nursery() as nursery: nursery.start_soon(agen.__anext__) @@ -2176,7 +2241,7 @@ async def main(): _core.run(main) -async def test_very_deep_cancel_scope_nesting(): +async def test_very_deep_cancel_scope_nesting() -> None: # This used to crash with a RecursionError in CancelStatus.recalculate with ExitStack() as exit_stack: outermost_scope = _core.CancelScope() @@ -2186,7 +2251,7 @@ async def test_very_deep_cancel_scope_nesting(): outermost_scope.cancel() -async def test_cancel_scope_deadline_duplicates(): +async def test_cancel_scope_deadline_duplicates() -> None: # This exercises an assert in Deadlines._prune, by intentionally creating # duplicate entries in the deadline heap. now = _core.current_time() @@ -2200,16 +2265,16 @@ async def test_cancel_scope_deadline_duplicates(): @pytest.mark.skipif( sys.implementation.name != "cpython", reason="Only makes sense with refcounting GC" ) -async def test_simple_cancel_scope_usage_doesnt_create_cyclic_garbage(): +async def test_simple_cancel_scope_usage_doesnt_create_cyclic_garbage() -> None: # https://github.com/python-trio/trio/issues/1770 gc.collect() - async def do_a_cancel(): + async def do_a_cancel() -> None: with _core.CancelScope() as cscope: cscope.cancel() await sleep_forever() - async def crasher(): + async def crasher() -> NoReturn: raise ValueError old_flags = gc.get_debug() @@ -2239,11 +2304,11 @@ async def crasher(): @pytest.mark.skipif( sys.implementation.name != "cpython", reason="Only makes sense with refcounting GC" ) -async def test_cancel_scope_exit_doesnt_create_cyclic_garbage(): +async def test_cancel_scope_exit_doesnt_create_cyclic_garbage() -> None: # https://github.com/python-trio/trio/pull/2063 gc.collect() - async def crasher(): + async def crasher() -> NoReturn: raise ValueError old_flags = gc.get_debug() @@ -2275,13 +2340,14 @@ async def crasher(): @pytest.mark.skipif( sys.implementation.name != "cpython", reason="Only makes sense with refcounting GC" ) -async def test_nursery_cancel_doesnt_create_cyclic_garbage(): +async def test_nursery_cancel_doesnt_create_cyclic_garbage() -> None: + collected = False + # https://github.com/python-trio/trio/issues/1770#issuecomment-730229423 - def toggle_collected(): + def toggle_collected() -> None: nonlocal collected collected = True - collected = False gc.collect() old_flags = gc.get_debug() try: @@ -2310,17 +2376,17 @@ def toggle_collected(): @pytest.mark.skipif( sys.implementation.name != "cpython", reason="Only makes sense with refcounting GC" ) -async def test_locals_destroyed_promptly_on_cancel(): +async def test_locals_destroyed_promptly_on_cancel() -> None: destroyed = False - def finalizer(): + def finalizer() -> None: nonlocal destroyed destroyed = True class A: pass - async def task(): + async def task() -> None: a = A() weakref.finalize(a, finalizer) await _core.checkpoint() @@ -2331,12 +2397,12 @@ async def task(): assert destroyed -def test_run_strict_exception_groups(): +def test_run_strict_exception_groups() -> None: """ Test that nurseries respect the global context setting of strict_exception_groups. """ - async def main(): + async def main() -> NoReturn: async with _core.open_nursery(): raise Exception("foo") @@ -2348,13 +2414,13 @@ async def main(): assert exc.value.exceptions[0].args == ("foo",) -def test_run_strict_exception_groups_nursery_override(): +def test_run_strict_exception_groups_nursery_override() -> None: """ Test that a nursery can override the global context setting of strict_exception_groups. """ - async def main(): + async def main() -> NoReturn: async with _core.open_nursery(strict_exception_groups=False): raise Exception("foo") @@ -2362,7 +2428,7 @@ async def main(): _core.run(main, strict_exception_groups=True) -async def test_nursery_strict_exception_groups(): +async def test_nursery_strict_exception_groups() -> None: """Test that strict exception groups can be enabled on a per-nursery basis.""" with pytest.raises(MultiError) as exc: async with _core.open_nursery(strict_exception_groups=True): @@ -2373,13 +2439,13 @@ async def test_nursery_strict_exception_groups(): assert exc.value.exceptions[0].args == ("foo",) -async def test_nursery_collapse_strict(): +async def test_nursery_collapse_strict() -> None: """ Test that a single exception from a nested nursery with strict semantics doesn't get collapsed when CancelledErrors are stripped from it. """ - async def raise_error(): + async def raise_error() -> NoReturn: raise RuntimeError("test error") with pytest.raises(MultiError) as exc: @@ -2399,13 +2465,13 @@ async def raise_error(): assert isinstance(exceptions[1].exceptions[0], RuntimeError) -async def test_nursery_collapse_loose(): +async def test_nursery_collapse_loose() -> None: """ Test that a single exception from a nested nursery with loose semantics gets collapsed when CancelledErrors are stripped from it. """ - async def raise_error(): + async def raise_error() -> NoReturn: raise RuntimeError("test error") with pytest.raises(MultiError) as exc: @@ -2423,7 +2489,7 @@ async def raise_error(): assert isinstance(exceptions[1], RuntimeError) -async def test_cancel_scope_no_cancellederror(): +async def test_cancel_scope_no_cancellederror() -> None: """ Test that when a cancel scope encounters an exception group that does NOT contain a Cancelled exception, it will NOT set the ``cancelled_caught`` flag. diff --git a/trio/_highlevel_generic.py b/trio/_highlevel_generic.py index 4269f90bae..e136b2e4bc 100644 --- a/trio/_highlevel_generic.py +++ b/trio/_highlevel_generic.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Generic, TypeVar +from typing import TYPE_CHECKING, Generic, TypeVar import attr @@ -9,6 +9,10 @@ from .abc import AsyncResource, HalfCloseableStream, ReceiveStream, SendStream +if TYPE_CHECKING: + from typing_extensions import TypeGuard + + SendStreamT = TypeVar("SendStreamT", bound=SendStream) ReceiveStreamT = TypeVar("ReceiveStreamT", bound=ReceiveStream) @@ -43,6 +47,11 @@ async def aclose_forcefully(resource: AsyncResource) -> None: await resource.aclose() +def _is_halfclosable(stream: SendStream) -> TypeGuard[HalfCloseableStream]: + """Check if the stream has a send_eof() method.""" + return hasattr(stream, "send_eof") + + @attr.s(eq=False, hash=False) class StapledStream( HalfCloseableStream, @@ -97,16 +106,15 @@ async def wait_send_all_might_not_block(self) -> None: async def send_eof(self) -> None: """Shuts down the send side of the stream. - If ``self.send_stream.send_eof`` exists, then calls it. Otherwise, - calls ``self.send_stream.aclose()``. - + If :meth:`self.send_stream.send_eof() ` exists, + then this calls it. Otherwise, this calls + :meth:`self.send_stream.aclose() `. """ - if hasattr(self.send_stream, "send_eof"): - # send_stream.send_eof() is not defined in Trio, this should maybe be - # redesigned so it's possible to type it. - return await self.send_stream.send_eof() # type: ignore[no-any-return] + stream = self.send_stream + if _is_halfclosable(stream): + return await stream.send_eof() else: - return await self.send_stream.aclose() + return await stream.aclose() # we intentionally accept more types from the caller than we support returning async def receive_some(self, max_bytes: int | None = None) -> bytes: diff --git a/trio/_sync.py b/trio/_sync.py index bd2122858e..df4790ae74 100644 --- a/trio/_sync.py +++ b/trio/_sync.py @@ -1,7 +1,7 @@ from __future__ import annotations import math -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Protocol import attr @@ -105,21 +105,29 @@ def statistics(self) -> EventStatistics: return EventStatistics(tasks_waiting=len(self._tasks)) -# TODO: type this with a Protocol to get rid of type: ignore, see -# https://github.com/python-trio/trio/pull/2682#discussion_r1259097422 +class _HasAcquireRelease(Protocol): + """Only classes with acquire() and release() can use the mixin's implementations.""" + + async def acquire(self) -> object: + ... + + def release(self) -> object: + ... + + class AsyncContextManagerMixin: @enable_ki_protection - async def __aenter__(self) -> None: - await self.acquire() # type: ignore[attr-defined] + async def __aenter__(self: _HasAcquireRelease) -> None: + await self.acquire() @enable_ki_protection async def __aexit__( - self, + self: _HasAcquireRelease, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, ) -> None: - self.release() # type: ignore[attr-defined] + self.release() @attr.s(frozen=True, slots=True) diff --git a/trio/_tests/verify_types_darwin.json b/trio/_tests/verify_types_darwin.json index 2b491521f5..2b89d28d8e 100644 --- a/trio/_tests/verify_types_darwin.json +++ b/trio/_tests/verify_types_darwin.json @@ -76,7 +76,7 @@ ], "otherSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 682, + "withKnownType": 680, "withUnknownType": 0 }, "packageName": "trio" diff --git a/trio/_tests/verify_types_linux.json b/trio/_tests/verify_types_linux.json index a112e7edc9..ea5af77abc 100644 --- a/trio/_tests/verify_types_linux.json +++ b/trio/_tests/verify_types_linux.json @@ -64,7 +64,7 @@ ], "otherSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 682, + "withKnownType": 680, "withUnknownType": 0 }, "packageName": "trio" diff --git a/trio/_tests/verify_types_windows.json b/trio/_tests/verify_types_windows.json index 13c4756bd4..5d3e29a5dc 100644 --- a/trio/_tests/verify_types_windows.json +++ b/trio/_tests/verify_types_windows.json @@ -180,7 +180,7 @@ ], "otherSymbolCounts": { "withAmbiguousType": 0, - "withKnownType": 673, + "withKnownType": 671, "withUnknownType": 0 }, "packageName": "trio" From d9f71c4cd4ff527f41c30e575c0ded12a7158d4a Mon Sep 17 00:00:00 2001 From: GalaxySnail Date: Fri, 1 Sep 2023 03:42:03 +0800 Subject: [PATCH 152/162] Add py.typed in MANIFEST.in --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index 8b92523fb7..eb9c0173da 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,6 +2,7 @@ include LICENSE LICENSE.MIT LICENSE.APACHE2 include README.rst include CODE_OF_CONDUCT.md CONTRIBUTING.md include test-requirements.txt +include trio/py.typed recursive-include trio/_tests/test_ssl_certs *.pem recursive-include docs * prune docs/build From 82b649550800a460048e37f75b3ae3986dbc528f Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 1 Sep 2023 00:16:27 +0000 Subject: [PATCH 153/162] Dependency updates (#2782) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- docs-requirements.txt | 16 ++++++++-------- test-requirements.txt | 18 +++++++++--------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/docs-requirements.txt b/docs-requirements.txt index e0a131b58d..d533573c93 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -18,11 +18,11 @@ cffi==1.15.1 # via cryptography charset-normalizer==3.2.0 # via requests -click==8.1.6 +click==8.1.7 # via # click-default-group # towncrier -click-default-group==1.2.2 +click-default-group==1.2.4 # via towncrier cryptography==41.0.3 # via pyopenssl @@ -30,7 +30,7 @@ docutils==0.18.1 # via # sphinx # sphinx-rtd-theme -exceptiongroup==1.1.2 +exceptiongroup==1.1.3 # via -r docs-requirements.in idna==3.4 # via @@ -38,15 +38,15 @@ idna==3.4 # requests imagesize==1.4.1 # via sphinx -immutables==0.19 +immutables==0.20 # via -r docs-requirements.in importlib-metadata==6.8.0 # via sphinx -importlib-resources==6.0.0 +importlib-resources==6.0.1 # via towncrier incremental==22.10.0 # via towncrier -jinja2==3.0.3 +jinja2==3.1.2 # via # -r docs-requirements.in # sphinx @@ -59,7 +59,7 @@ packaging==23.1 # via sphinx pycparser==2.21 # via cffi -pygments==2.15.1 +pygments==2.16.1 # via sphinx pyopenssl==23.2.0 # via -r docs-requirements.in @@ -79,7 +79,7 @@ sphinx==6.1.3 # sphinx-rtd-theme # sphinxcontrib-jquery # sphinxcontrib-trio -sphinx-rtd-theme==1.2.2 +sphinx-rtd-theme==1.3.0 # via -r docs-requirements.in sphinxcontrib-applehelp==1.0.4 # via sphinx diff --git a/test-requirements.txt b/test-requirements.txt index 73d94f09c1..86a8f14aee 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -24,13 +24,13 @@ build==0.10.0 # via pip-tools cffi==1.15.1 # via cryptography -click==8.1.6 +click==8.1.7 # via # black # pip-tools codespell==2.2.5 # via -r test-requirements.in -coverage==7.2.7 +coverage==7.3.0 # via -r test-requirements.in cryptography==41.0.3 # via @@ -42,7 +42,7 @@ decorator==5.1.1 # via ipython dill==0.3.7 # via pylint -exceptiongroup==1.1.2 ; python_version < "3.11" +exceptiongroup==1.1.3 ; python_version < "3.11" # via # -r test-requirements.in # pytest @@ -76,7 +76,7 @@ mccabe==0.7.0 # via # flake8 # pylint -mypy==1.4.1 ; implementation_name == "cpython" +mypy==1.5.1 ; implementation_name == "cpython" # via -r test-requirements.in mypy-extensions==1.0.0 ; implementation_name == "cpython" # via @@ -100,13 +100,13 @@ pexpect==4.8.0 # via ipython pickleshare==0.7.5 # via ipython -pip-tools==7.1.0 +pip-tools==7.3.0 # via -r test-requirements.in platformdirs==3.10.0 # via # black # pylint -pluggy==1.2.0 +pluggy==1.3.0 # via pytest prompt-toolkit==3.0.39 # via ipython @@ -120,7 +120,7 @@ pycparser==2.21 # via cffi pyflakes==3.1.0 # via flake8 -pygments==2.15.1 +pygments==2.16.1 # via ipython pylint==2.17.5 # via -r test-requirements.in @@ -128,7 +128,7 @@ pyopenssl==23.2.0 # via -r test-requirements.in pyproject-hooks==1.0.0 # via build -pyright==1.1.318 +pyright==1.1.325 # via -r test-requirements.in pytest==7.4.0 # via -r test-requirements.in @@ -170,7 +170,7 @@ typing-extensions==4.7.1 # pylint wcwidth==0.2.6 # via prompt-toolkit -wheel==0.41.0 +wheel==0.41.2 # via pip-tools wrapt==1.15.0 # via astroid From d3255a0bd29d4d1adfafff21aeeaa60d0b4aba9a Mon Sep 17 00:00:00 2001 From: Vincent Vanlaer <13833860+VincentVanlaer@users.noreply.github.com> Date: Fri, 1 Sep 2023 13:08:30 +0200 Subject: [PATCH 154/162] Fix unconditional openssl import in DTLS (#2779) --- trio/_dtls.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/trio/_dtls.py b/trio/_dtls.py index 08b7672a2f..541144de07 100644 --- a/trio/_dtls.py +++ b/trio/_dtls.py @@ -30,7 +30,6 @@ from weakref import ReferenceType, WeakValueDictionary import attr -from OpenSSL import SSL import trio @@ -39,6 +38,8 @@ if TYPE_CHECKING: from types import TracebackType + # See DTLSEndpoint.__init__ for why this is imported here + from OpenSSL import SSL from OpenSSL.SSL import Context from typing_extensions import Self, TypeAlias From cfa1fb3c0f0c471ad572c1e630847acf1cccdfa3 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Sat, 2 Sep 2023 21:24:51 -0500 Subject: [PATCH 155/162] Ignore generics not matching --- trio/_core/_multierror.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/trio/_core/_multierror.py b/trio/_core/_multierror.py index 6e4cb8b923..a719873adb 100644 --- a/trio/_core/_multierror.py +++ b/trio/_core/_multierror.py @@ -261,11 +261,11 @@ def __str__(self) -> str: def __repr__(self) -> str: return f"" - @overload + @overload # type: ignore[override] # 'Exception' != '_ExceptionT' def derive(self, excs: Sequence[Exception], /) -> NonBaseMultiError: ... - @overload + @overload # type: ignore[override] # 'BaseException' != '_BaseExceptionT' def derive(self, excs: Sequence[BaseException], /) -> MultiError: ... From d3a5cb1d8c984a9d2101c84ed3e03c9f937b109c Mon Sep 17 00:00:00 2001 From: richardsheridan Date: Sat, 2 Sep 2023 22:31:24 -0400 Subject: [PATCH 156/162] repair bad merge in _run.py --- trio/_core/_run.py | 1 + 1 file changed, 1 insertion(+) diff --git a/trio/_core/_run.py b/trio/_core/_run.py index fbda8e1615..c5c16803b0 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -1493,6 +1493,7 @@ class GuestState: unrolled_run_next_send: Outcome[Any] = attr.ib(factory=_value_factory) def guest_tick(self) -> None: + prev_library, sniffio_library.name = sniffio_library.name, "trio" try: timeout = self.unrolled_run_next_send.send(self.unrolled_run_gen) except StopIteration: From 467c4f4f8521c29bb3851451d578267b1e37741f Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Sun, 3 Sep 2023 00:12:55 -0500 Subject: [PATCH 157/162] Start fixing type errors --- trio/_core/_run.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/trio/_core/_run.py b/trio/_core/_run.py index c5c16803b0..0ed5f78322 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -200,8 +200,7 @@ def collapse_exception_group( ) return exceptions[0] elif modified: - # derive() returns Any for some reason. - return excgroup.derive(exceptions) # type: ignore[no-any-return] + return excgroup.derive(exceptions) else: return excgroup From 90bbcf17891650ae90f493aa9aa02b3b347b8930 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Sun, 3 Sep 2023 00:15:17 -0500 Subject: [PATCH 158/162] Fix formatting --- trio/_threads.py | 1 - 1 file changed, 1 deletion(-) diff --git a/trio/_threads.py b/trio/_threads.py index 2780fb6446..24905cfbde 100644 --- a/trio/_threads.py +++ b/trio/_threads.py @@ -409,7 +409,6 @@ def callback( fn: Callable[..., RetT], args: tuple[object, ...], ) -> None: - @disable_ki_protection def unprotected_fn() -> RetT: ret = fn(*args) From 556fea4ebba3ff6ddfe3f0cbad6faa6ac4925ad7 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Sun, 3 Sep 2023 00:21:13 -0500 Subject: [PATCH 159/162] Fix type annotations --- trio/_core/_run.py | 9 +++++++-- trio/_core/_tests/test_run.py | 2 +- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/trio/_core/_run.py b/trio/_core/_run.py index 0ed5f78322..c51ffd6af2 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -2357,7 +2357,9 @@ def my_done_callback(run_outcome): # spawn_system_task. We don't actually run any user code during # this time, so it shouldn't be possible to get an exception here, # except for a TrioInternalError. - next_send = None + next_send: EventResult = cast( + EventResult, None + ) # First iteration must be `None`, every iteration after that is EventResult for tick in range(5): # expected need is 2 iterations + leave some wiggle room if runner.system_nursery is not None: # We're initialized enough to switch to async guest ticks @@ -2378,7 +2380,10 @@ def my_done_callback(run_outcome): # IOManager.get_events() if no I/O was waiting, which is # platform-dependent. We don't actually check for I/O during # this init phase because no one should be expecting any yet. - next_send = 0 if sys.platform == "win32" else () + if sys.platform == "win32": + next_send = 0 + else: + next_send = [] else: # pragma: no cover guest_state.unrolled_run_gen.throw( TrioInternalError( diff --git a/trio/_core/_tests/test_run.py b/trio/_core/_tests/test_run.py index 0b15b606fb..5c45cf828f 100644 --- a/trio/_core/_tests/test_run.py +++ b/trio/_core/_tests/test_run.py @@ -2066,7 +2066,7 @@ def check_function_returning_coroutine() -> Awaitable[object]: sniffio.current_async_library() @contextmanager - def alternate_sniffio_library(): + def alternate_sniffio_library() -> Generator[None, None, None]: prev_token = sniffio.current_async_library_cvar.set("nullio") prev_library, sniffio.thread_local.name = sniffio.thread_local.name, "nullio" try: From dacd4b3ff415938a6a7de1fb8a0bcd9221be6a61 Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Sun, 3 Sep 2023 00:50:21 -0500 Subject: [PATCH 160/162] Explicit typing not required after cast --- trio/_core/_run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trio/_core/_run.py b/trio/_core/_run.py index c51ffd6af2..b2f3a65ddd 100644 --- a/trio/_core/_run.py +++ b/trio/_core/_run.py @@ -2357,7 +2357,7 @@ def my_done_callback(run_outcome): # spawn_system_task. We don't actually run any user code during # this time, so it shouldn't be possible to get an exception here, # except for a TrioInternalError. - next_send: EventResult = cast( + next_send = cast( EventResult, None ) # First iteration must be `None`, every iteration after that is EventResult for tick in range(5): # expected need is 2 iterations + leave some wiggle room From 721bfcaada965018a89b8fadaf8c96ad5149051d Mon Sep 17 00:00:00 2001 From: CoolCat467 <52022020+CoolCat467@users.noreply.github.com> Date: Sun, 3 Sep 2023 00:54:05 -0500 Subject: [PATCH 161/162] Fix unused type ignore --- trio/_core/_multierror.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/trio/_core/_multierror.py b/trio/_core/_multierror.py index a719873adb..d55e89554d 100644 --- a/trio/_core/_multierror.py +++ b/trio/_core/_multierror.py @@ -265,7 +265,7 @@ def __repr__(self) -> str: def derive(self, excs: Sequence[Exception], /) -> NonBaseMultiError: ... - @overload # type: ignore[override] # 'BaseException' != '_BaseExceptionT' + @overload def derive(self, excs: Sequence[BaseException], /) -> MultiError: ... From 83671a4bedb514f31ab82bd00d81f6ef23be099b Mon Sep 17 00:00:00 2001 From: richardsheridan Date: Sat, 2 Sep 2023 21:03:15 -0400 Subject: [PATCH 162/162] Require static checks to pass in CI --- .github/workflows/ci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 40af0960f5..d5aeb3ec04 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -104,8 +104,7 @@ jobs: continue-on-error: >- ${{ ( - matrix.check_formatting == '1' - || endsWith(matrix.python, '-dev') + endsWith(matrix.python, '-dev') || endsWith(matrix.python, '-nightly') ) && true