diff --git a/.azure-pipelines/ci.yml b/.azure-pipelines/ci.yml
index 4c2f115cd9b600..9b638ddd004609 100644
--- a/.azure-pipelines/ci.yml
+++ b/.azure-pipelines/ci.yml
@@ -35,7 +35,9 @@ jobs:
- job: macOS_CI_Tests
displayName: macOS CI Tests
dependsOn: Prebuild
- condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
+ #condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
+ # bpo-39837: macOS tests on Azure Pipelines are disabled
+ condition: false
variables:
testRunTitle: '$(build.sourceBranchName)-macos'
@@ -59,7 +61,7 @@ jobs:
variables:
testRunTitle: '$(build.sourceBranchName)-linux'
testRunPlatform: linux
- openssl_version: 1.1.1d
+ openssl_version: 1.1.1f
steps:
- template: ./posix-steps.yml
@@ -116,7 +118,7 @@ jobs:
variables:
testRunTitle: '$(Build.SourceBranchName)-linux-coverage'
testRunPlatform: linux-coverage
- openssl_version: 1.1.1d
+ openssl_version: 1.1.1f
steps:
- template: ./posix-steps.yml
diff --git a/.azure-pipelines/macos-steps.yml b/.azure-pipelines/macos-steps.yml
index d2ca580a93d7dd..fa38a0df8c87b8 100644
--- a/.azure-pipelines/macos-steps.yml
+++ b/.azure-pipelines/macos-steps.yml
@@ -6,7 +6,7 @@ steps:
- script: ./configure --with-pydebug --with-openssl=/usr/local/opt/openssl --prefix=/opt/python-azdev
displayName: 'Configure CPython (debug)'
-- script: make -s -j4
+- script: make -j4
displayName: 'Build CPython'
- script: make pythoninfo
diff --git a/.azure-pipelines/posix-steps.yml b/.azure-pipelines/posix-steps.yml
index 3ed3abd02a7146..b6dde593019e6c 100644
--- a/.azure-pipelines/posix-steps.yml
+++ b/.azure-pipelines/posix-steps.yml
@@ -20,7 +20,7 @@ steps:
- script: ./configure --with-pydebug
displayName: 'Configure CPython (debug)'
-- script: make -s -j4
+- script: make -j4
displayName: 'Build CPython'
- ${{ if eq(parameters.coverage, 'true') }}:
@@ -49,7 +49,7 @@ steps:
- script: ./venv/bin/python -m coverage xml
displayName: 'Generate coverage.xml'
- - script: source ./venv/bin/activate && bash <(curl -s https://codecov.io/bash)
+ - script: source ./venv/bin/activate && bash <(curl -s https://codecov.io/bash) -y .github/codecov.yml
displayName: 'Publish code coverage results'
diff --git a/.azure-pipelines/pr.yml b/.azure-pipelines/pr.yml
index 73d4f55b864500..65f23eb62ee095 100644
--- a/.azure-pipelines/pr.yml
+++ b/.azure-pipelines/pr.yml
@@ -33,7 +33,9 @@ jobs:
- job: macOS_PR_Tests
displayName: macOS PR Tests
dependsOn: Prebuild
- condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
+ #condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
+ # bpo-39837: macOS tests on Azure Pipelines are disabled
+ condition: false
variables:
testRunTitle: '$(system.pullRequest.TargetBranch)-macos'
@@ -59,7 +61,7 @@ jobs:
variables:
testRunTitle: '$(system.pullRequest.TargetBranch)-linux'
testRunPlatform: linux
- openssl_version: 1.1.1d
+ openssl_version: 1.1.1f
steps:
- template: ./posix-steps.yml
@@ -116,7 +118,7 @@ jobs:
variables:
testRunTitle: '$(Build.SourceBranchName)-linux-coverage'
testRunPlatform: linux-coverage
- openssl_version: 1.1.1d
+ openssl_version: 1.1.1f
steps:
- template: ./posix-steps.yml
diff --git a/.azure-pipelines/windows-release/stage-build.yml b/.azure-pipelines/windows-release/stage-build.yml
index 9391a91e30b5e6..69f3b1e16451ec 100644
--- a/.azure-pipelines/windows-release/stage-build.yml
+++ b/.azure-pipelines/windows-release/stage-build.yml
@@ -3,7 +3,7 @@ jobs:
displayName: Docs build
pool:
name: 'Windows Release'
- #vmName: win2016-vs2017
+ #vmImage: windows-2019
workspace:
clean: all
@@ -45,7 +45,7 @@ jobs:
displayName: Python build
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
@@ -91,7 +91,7 @@ jobs:
condition: and(succeeded(), ne(variables['DoPGO'], 'true'))
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
@@ -141,7 +141,7 @@ jobs:
displayName: Publish Tcl/Tk Library
pool:
- vmName: windows-latest
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.azure-pipelines/windows-release/stage-layout-embed.yml b/.azure-pipelines/windows-release/stage-layout-embed.yml
index 3306e1cbc49d98..dbccdead143b21 100644
--- a/.azure-pipelines/windows-release/stage-layout-embed.yml
+++ b/.azure-pipelines/windows-release/stage-layout-embed.yml
@@ -4,7 +4,7 @@ jobs:
condition: and(succeeded(), eq(variables['DoEmbed'], 'true'))
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.azure-pipelines/windows-release/stage-layout-full.yml b/.azure-pipelines/windows-release/stage-layout-full.yml
index 78bc1b3975e93d..8fc8da3e52fe03 100644
--- a/.azure-pipelines/windows-release/stage-layout-full.yml
+++ b/.azure-pipelines/windows-release/stage-layout-full.yml
@@ -4,7 +4,7 @@ jobs:
condition: and(succeeded(), eq(variables['DoLayout'], 'true'))
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.azure-pipelines/windows-release/stage-layout-msix.yml b/.azure-pipelines/windows-release/stage-layout-msix.yml
index 60a5c9ea5435c8..def4f7d3c6bee5 100644
--- a/.azure-pipelines/windows-release/stage-layout-msix.yml
+++ b/.azure-pipelines/windows-release/stage-layout-msix.yml
@@ -3,7 +3,7 @@ jobs:
displayName: Make MSIX layout
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.azure-pipelines/windows-release/stage-layout-nuget.yml b/.azure-pipelines/windows-release/stage-layout-nuget.yml
index 7e20f89530349c..41cdff850e83be 100644
--- a/.azure-pipelines/windows-release/stage-layout-nuget.yml
+++ b/.azure-pipelines/windows-release/stage-layout-nuget.yml
@@ -4,7 +4,7 @@ jobs:
condition: and(succeeded(), eq(variables['DoNuget'], 'true'))
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.azure-pipelines/windows-release/stage-msi.yml b/.azure-pipelines/windows-release/stage-msi.yml
index 7afc816a0c6e9c..9b965b09c14748 100644
--- a/.azure-pipelines/windows-release/stage-msi.yml
+++ b/.azure-pipelines/windows-release/stage-msi.yml
@@ -4,7 +4,7 @@ jobs:
condition: and(succeeded(), not(variables['SigningCertificate']))
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
variables:
ReleaseUri: http://www.python.org/{arch}
diff --git a/.azure-pipelines/windows-release/stage-pack-msix.yml b/.azure-pipelines/windows-release/stage-pack-msix.yml
index f17ba9628e21b0..07e343a0b4e0c7 100644
--- a/.azure-pipelines/windows-release/stage-pack-msix.yml
+++ b/.azure-pipelines/windows-release/stage-pack-msix.yml
@@ -3,7 +3,7 @@ jobs:
displayName: Pack MSIX bundles
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.azure-pipelines/windows-release/stage-pack-nuget.yml b/.azure-pipelines/windows-release/stage-pack-nuget.yml
index 34619fc5fdc318..b100364820d95b 100644
--- a/.azure-pipelines/windows-release/stage-pack-nuget.yml
+++ b/.azure-pipelines/windows-release/stage-pack-nuget.yml
@@ -4,7 +4,7 @@ jobs:
condition: and(succeeded(), eq(variables['DoNuget'], 'true'))
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.azure-pipelines/windows-release/stage-publish-nugetorg.yml b/.azure-pipelines/windows-release/stage-publish-nugetorg.yml
index b78bd493a0fd1b..d5edf44ef5c2ec 100644
--- a/.azure-pipelines/windows-release/stage-publish-nugetorg.yml
+++ b/.azure-pipelines/windows-release/stage-publish-nugetorg.yml
@@ -4,7 +4,7 @@ jobs:
condition: and(succeeded(), eq(variables['DoNuget'], 'true'))
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.azure-pipelines/windows-release/stage-publish-pythonorg.yml b/.azure-pipelines/windows-release/stage-publish-pythonorg.yml
index 8c95f1b950cd75..4b88bdebf8cc41 100644
--- a/.azure-pipelines/windows-release/stage-publish-pythonorg.yml
+++ b/.azure-pipelines/windows-release/stage-publish-pythonorg.yml
@@ -4,7 +4,7 @@ jobs:
condition: and(succeeded(), and(eq(variables['DoMSI'], 'true'), eq(variables['DoEmbed'], 'true')))
pool:
- #vmName: win2016-vs2017
+ #vmImage: windows-2019
name: 'Windows Release'
workspace:
@@ -39,11 +39,6 @@ jobs:
artifactName: embed
downloadPath: $(Build.BinariesDirectory)
- - powershell: 'gci *embed-arm*.zip | %{ Write-Host "Not publishing: $($_.Name)"; gi $_ } | del'
- displayName: 'Prevent publishing ARM/ARM64 packages'
- workingDirectory: '$(Build.BinariesDirectory)\embed'
- condition: and(succeeded(), not(variables['PublishArmPackages']))
-
- task: DownloadPipelineArtifact@1
displayName: 'Download artifact from $(BuildToPublish): Doc'
condition: and(succeeded(), variables['BuildToPublish'])
@@ -80,6 +75,11 @@ jobs:
buildVersionToDownload: specific
buildId: $(BuildToPublish)
+ - powershell: 'gci *embed-arm*.zip | %{ Write-Host "Not publishing: $($_.Name)"; gi $_ } | del'
+ displayName: 'Prevent publishing ARM/ARM64 packages'
+ workingDirectory: '$(Build.BinariesDirectory)\embed'
+ condition: and(succeeded(), not(variables['PublishArmPackages']))
+
- template: ./gpg-sign.yml
parameters:
diff --git a/.azure-pipelines/windows-release/stage-publish-store.yml b/.azure-pipelines/windows-release/stage-publish-store.yml
index b22147b1ab45e7..e0512b95f27da8 100644
--- a/.azure-pipelines/windows-release/stage-publish-store.yml
+++ b/.azure-pipelines/windows-release/stage-publish-store.yml
@@ -4,7 +4,7 @@ jobs:
condition: and(succeeded(), eq(variables['DoMSIX'], 'true'))
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.azure-pipelines/windows-release/stage-sign.yml b/.azure-pipelines/windows-release/stage-sign.yml
index a0adc0581229d9..4d757ae8fca032 100644
--- a/.azure-pipelines/windows-release/stage-sign.yml
+++ b/.azure-pipelines/windows-release/stage-sign.yml
@@ -114,7 +114,7 @@ jobs:
condition: and(succeeded(), not(variables['SigningCertificate']))
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
steps:
- checkout: none
diff --git a/.azure-pipelines/windows-release/stage-test-embed.yml b/.azure-pipelines/windows-release/stage-test-embed.yml
index b33176266a20da..d99bd74722bacb 100644
--- a/.azure-pipelines/windows-release/stage-test-embed.yml
+++ b/.azure-pipelines/windows-release/stage-test-embed.yml
@@ -4,7 +4,7 @@ jobs:
condition: and(succeeded(), eq(variables['DoEmbed'], 'true'))
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.azure-pipelines/windows-release/stage-test-msi.yml b/.azure-pipelines/windows-release/stage-test-msi.yml
index 27b0c96987a7a0..21e38c39590f70 100644
--- a/.azure-pipelines/windows-release/stage-test-msi.yml
+++ b/.azure-pipelines/windows-release/stage-test-msi.yml
@@ -3,7 +3,7 @@ jobs:
displayName: Test MSI
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.azure-pipelines/windows-release/stage-test-nuget.yml b/.azure-pipelines/windows-release/stage-test-nuget.yml
index 1f8b601d0d023b..94d815e95226ef 100644
--- a/.azure-pipelines/windows-release/stage-test-nuget.yml
+++ b/.azure-pipelines/windows-release/stage-test-nuget.yml
@@ -4,7 +4,7 @@ jobs:
condition: and(succeeded(), eq(variables['DoNuget'], 'true'))
pool:
- vmName: win2016-vs2017
+ vmImage: windows-2019
workspace:
clean: all
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 9354cc85d28837..4d80698eff39c4 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -74,6 +74,8 @@ Include/pytime.h @pganssle @abalkin
# Parser/Pgen
/Parser/pgen/ @pablogsal
+/Parser/pegen/ @pablogsal
+/Tools/peg_generator/ @pablogsal
# SQLite 3
**/*sqlite* @berkerpeksag
diff --git a/.github/CONTRIBUTING.rst b/.github/CONTRIBUTING.rst
index 7f912e87084190..a81935d3c9da76 100644
--- a/.github/CONTRIBUTING.rst
+++ b/.github/CONTRIBUTING.rst
@@ -8,17 +8,17 @@ Build Status
+ `Stable buildbots `_
-- 3.7
+- 3.9
- + `Stable buildbots `_
+ + `Stable buildbots `_
-- 3.6
+- 3.8
- + `Stable buildbots `_
+ + `Stable buildbots `_
-- 2.7
+- 3.7
- + `Stable buildbots `_
+ + `Stable buildbots `_
Thank You
diff --git a/.github/problem-matchers/sphinx.json b/.github/problem-matchers/sphinx.json
new file mode 100644
index 00000000000000..228415f5b7b03f
--- /dev/null
+++ b/.github/problem-matchers/sphinx.json
@@ -0,0 +1,40 @@
+{
+ "problemMatcher": [
+ {
+ "owner": "sphinx-problem-matcher",
+ "pattern": [
+ {
+ "regexp": "^(.*):(\\d+):\\s+(\\w*):\\s+(.*)$",
+ "file": 1,
+ "line": 2,
+ "severity": 3,
+ "message": 4
+ }
+ ]
+ },
+ {
+ "owner": "sphinx-problem-matcher-loose",
+ "pattern": [
+ {
+ "_comment": "A bit of a looser pattern, doesn't look for line numbers, just looks for file names relying on them to start with / and end with .rst",
+ "regexp": "(\/.*\\.rst):\\s+(\\w*):\\s+(.*)$",
+ "file": 1,
+ "severity": 2,
+ "message": 3
+ }
+ ]
+ },
+ {
+ "owner": "sphinx-problem-matcher-loose-no-severity",
+ "pattern": [
+ {
+ "_comment": "Looks for file names ending with .rst and line numbers but without severity",
+ "regexp": "^(.*\\.rst):(\\d+):(.*)$",
+ "file": 1,
+ "line": 2,
+ "message": 3
+ }
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 7f13cfbc1ff4ed..6bb52cb6a5daa5 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -1,33 +1,46 @@
name: Tests
+# bpo-40548: "paths-ignore" is not used to skip documentation-only PRs, because
+# it prevents to mark a job as mandatory. A PR cannot be merged if a job is
+# mandatory but not scheduled because of "paths-ignore".
on:
push:
branches:
- master
+ - 3.9
- 3.8
- 3.7
- paths-ignore:
- - 'Doc/**'
- - 'Misc/**'
- - '**/*.md'
- - '**/*.rst'
pull_request:
branches:
- master
+ - 3.9
- 3.8
- 3.7
- paths-ignore:
- - 'Doc/**'
- - 'Misc/**'
- - '**/*.md'
- - '**/*.rst'
jobs:
+ check_source:
+ name: 'Check for source changes'
+ runs-on: ubuntu-latest
+ outputs:
+ run_tests: ${{ steps.check.outputs.run_tests }}
+ steps:
+ - uses: actions/checkout@v2
+ - name: Check for source changes
+ id: check
+ run: |
+ if [ -z "GITHUB_BASE_REF" ]; then
+ echo '::set-output name=run_tests::true'
+ else
+ git fetch origin $GITHUB_BASE_REF --depth=1
+ git diff --name-only origin/$GITHUB_BASE_REF... | grep -qvE '(\.rst$|^Doc|^Misc)' && echo '::set-output name=run_tests::true' || true
+ fi
build_win32:
name: 'Windows (x86)'
runs-on: windows-latest
+ needs: check_source
+ if: needs.check_source.outputs.run_tests == 'true'
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v2
- name: Build CPython
run: .\PCbuild\build.bat -e -p Win32
- name: Display build info
@@ -38,8 +51,10 @@ jobs:
build_win_amd64:
name: 'Windows (x64)'
runs-on: windows-latest
+ needs: check_source
+ if: needs.check_source.outputs.run_tests == 'true'
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v2
- name: Build CPython
run: .\PCbuild\build.bat -e -p x64
- name: Display build info
@@ -50,12 +65,14 @@ jobs:
build_macos:
name: 'macOS'
runs-on: macos-latest
+ needs: check_source
+ if: needs.check_source.outputs.run_tests == 'true'
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v2
- name: Configure CPython
run: ./configure --with-pydebug --with-openssl=/usr/local/opt/openssl --prefix=/opt/python-dev
- name: Build CPython
- run: make -s -j4
+ run: make -j4
- name: Display build info
run: make pythoninfo
- name: Tests
@@ -64,10 +81,12 @@ jobs:
build_ubuntu:
name: 'Ubuntu'
runs-on: ubuntu-latest
+ needs: check_source
+ if: needs.check_source.outputs.run_tests == 'true'
env:
- OPENSSL_VER: 1.1.1d
+ OPENSSL_VER: 1.1.1f
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v2
- name: Install Dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: 'Restore OpenSSL build'
@@ -82,7 +101,7 @@ jobs:
- name: Configure CPython
run: ./configure --with-pydebug --with-openssl=$PWD/multissl/openssl/$OPENSSL_VER
- name: Build CPython
- run: make -s -j4
+ run: make -j4
- name: Display build info
run: make pythoninfo
- name: Tests
diff --git a/.github/workflows/build_msi.yml b/.github/workflows/build_msi.yml
new file mode 100644
index 00000000000000..182eb7ce571671
--- /dev/null
+++ b/.github/workflows/build_msi.yml
@@ -0,0 +1,36 @@
+name: TestsMSI
+
+on:
+ push:
+ branches:
+ - master
+ - 3.9
+ - 3.8
+ - 3.7
+ paths:
+ - 'Tools/msi/**'
+ pull_request:
+ branches:
+ - master
+ - 3.9
+ - 3.8
+ - 3.7
+ paths:
+ - 'Tools/msi/**'
+
+jobs:
+ build_win32:
+ name: 'Windows (x86) Installer'
+ runs-on: windows-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Build CPython installer
+ run: .\Tools\msi\build.bat -x86
+
+ build_win_amd64:
+ name: 'Windows (x64) Installer'
+ runs-on: windows-latest
+ steps:
+ - uses: actions/checkout@v2
+ - name: Build CPython installer
+ run: .\Tools\msi\build.bat -x64
diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml
index e8b47b390e5a79..035348e8a69325 100644
--- a/.github/workflows/coverage.yml
+++ b/.github/workflows/coverage.yml
@@ -4,6 +4,7 @@ on:
push:
branches:
- master
+ - 3.9
- 3.8
- 3.7
paths-ignore:
@@ -12,6 +13,7 @@ on:
#pull_request:
# branches:
# - master
+ # - 3.9
# - 3.8
# - 3.7
# paths-ignore:
@@ -23,9 +25,9 @@ jobs:
name: 'Ubuntu (Coverage)'
runs-on: ubuntu-latest
env:
- OPENSSL_VER: 1.1.1d
+ OPENSSL_VER: 1.1.1f
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v2
- name: Install Dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: 'Restore OpenSSL build'
@@ -40,7 +42,7 @@ jobs:
- name: Configure CPython
run: ./configure --with-openssl=$PWD/multissl/openssl/$OPENSSL_VER
- name: Build CPython
- run: make -s -j4
+ run: make -j4
- name: Display build info
run: make pythoninfo
- name: 'Coverage Preparation'
@@ -48,7 +50,9 @@ jobs:
./python -m venv .venv
source ./.venv/bin/activate
python -m pip install -U coverage
+ python -m pip install -r Misc/requirements-test.txt
python -m test.pythoninfo
+ export PYTHONPATH=`find .venv -name fullcoverage`
- name: 'Tests with coverage'
run: >
source ./.venv/bin/activate &&
@@ -64,8 +68,9 @@ jobs:
|| true
- name: 'Publish code coverage results'
run: |
+ export PYTHONPATH=
source ./.venv/bin/activate
- bash <(curl -s https://codecov.io/bash)
+ bash <(curl -s https://codecov.io/bash) -y .github/codecov.yml
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
@@ -73,7 +78,7 @@ jobs:
name: 'Ubuntu (C Coverage)'
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v2
- name: Install Dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Configure CPython
@@ -84,6 +89,6 @@ jobs:
if: always()
run: |
make pythoninfo
- bash <(curl -s https://codecov.io/bash)
+ bash <(curl -s https://codecov.io/bash) -y .github/codecov.yml
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml
index 5bba8e690655fa..88c9cbd797ab2d 100644
--- a/.github/workflows/doc.yml
+++ b/.github/workflows/doc.yml
@@ -4,6 +4,7 @@ on:
#push:
# branches:
# - master
+ # - 3.9
# - 3.8
# - 3.7
# paths:
@@ -11,6 +12,7 @@ on:
pull_request:
branches:
- master
+ - 3.9
- 3.8
- 3.7
paths:
@@ -22,17 +24,19 @@ jobs:
name: 'Docs'
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v2
+ - name: Register Sphinx problem matcher
+ run: echo "::add-matcher::.github/problem-matchers/sphinx.json"
- name: 'Install Dependencies'
run: sudo ./.github/workflows/posix-deps-apt.sh && sudo apt-get install wamerican
- name: 'Configure CPython'
run: ./configure --with-pydebug
- name: 'Build CPython'
- run: make -s -j4
+ run: make -j4
- name: 'Install build dependencies'
run: make -C Doc/ PYTHON=../python venv
- name: 'Build documentation'
- run: xvfb-run make -C Doc/ PYTHON=../python SPHINXOPTS="-q -W -j4" doctest suspicious html
+ run: xvfb-run make -C Doc/ PYTHON=../python SPHINXOPTS="-q -W --keep-going -j4" doctest suspicious html
- name: 'Upload'
uses: actions/upload-artifact@v1
with:
diff --git a/.gitignore b/.gitignore
index 8c1f8a43075a48..0962d3a1841fc4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -71,6 +71,7 @@ Programs/_freeze_importlib
Programs/_testembed
PC/python_nt*.h
PC/pythonnt_rc*.h
+Modules/python.exp
PC/*/*.exp
PC/*/*.lib
PC/*/*.bsc
diff --git a/.travis.yml b/.travis.yml
index b1d50d49ca5cc3..5d57150e61c186 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,5 +1,5 @@
language: c
-dist: xenial
+dist: bionic
# To cache doc-building dependencies and C compiler output.
cache:
@@ -10,7 +10,7 @@ cache:
env:
global:
- - OPENSSL=1.1.1d
+ - OPENSSL=1.1.1f
- OPENSSL_DIR="$HOME/multissl/openssl/${OPENSSL}"
- PATH="${OPENSSL_DIR}/bin:$PATH"
- CFLAGS="-I${OPENSSL_DIR}/include"
@@ -87,14 +87,17 @@ matrix:
# Need a venv that can parse covered code.
- ./python -m venv venv
- ./venv/bin/python -m pip install -U coverage
+ - ./venv/bin/python -m pip install -r Misc/requirements-test.txt
- ./venv/bin/python -m test.pythoninfo
+ - export PYTHONPATH=`find venv -name fullcoverage`
script:
# Skip tests that re-run the entire test suite.
- xvfb-run ./venv/bin/python -m coverage run --branch --pylib -m test --fail-env-changed -uall,-cpu -x test_multiprocessing_fork -x test_multiprocessing_forkserver -x test_multiprocessing_spawn -x test_concurrent_futures || true
after_script: # Probably should be after_success once test suite updated to run under coverage.py.
# Make the `coverage` command available to Codecov w/ a version of Python that can parse all source files.
+ - export PYTHONPATH=
- source ./venv/bin/activate
- - bash <(curl -s https://codecov.io/bash)
+ - bash <(curl -s https://codecov.io/bash) -y .github/codecov.yml
- name: "Test code coverage (C)"
os: linux
language: c
@@ -111,7 +114,7 @@ matrix:
- xvfb-run make -j4 coverage-report
after_script: # Probably should be after_success once test suite updated to run under coverage.py.
- make pythoninfo
- - bash <(curl -s https://codecov.io/bash)
+ - bash <(curl -s https://codecov.io/bash) -y .github/codecov.yml
before_install:
@@ -157,7 +160,9 @@ install:
before_script:
# -Og is much faster than -O0
- CFLAGS="${CFLAGS} -Og" ./configure --with-pydebug
- - make -j4 regen-all
+ - eval "$(pyenv init -)"
+ - pyenv global 3.8
+ - PYTHON_FOR_REGEN=python3.8 make -j4 regen-all
- changes=`git status --porcelain`
- |
# Check for changes in regenerated files
diff --git a/Doc/c-api/arg.rst b/Doc/c-api/arg.rst
index f17c63d0b9dc93..b7baad589a72c8 100644
--- a/Doc/c-api/arg.rst
+++ b/Doc/c-api/arg.rst
@@ -105,7 +105,7 @@ which disallows mutable objects such as :class:`bytearray`.
Like ``s*``, but the Python object may also be ``None``, in which case the
``buf`` member of the :c:type:`Py_buffer` structure is set to ``NULL``.
-``z#`` (:class:`str`, read-only :term:`bytes-like object` or ``None``) [const char \*, int]
+``z#`` (:class:`str`, read-only :term:`bytes-like object` or ``None``) [const char \*, int or :c:type:`Py_ssize_t`]
Like ``s#``, but the Python object may also be ``None``, in which case the C
pointer is set to ``NULL``.
@@ -124,7 +124,7 @@ which disallows mutable objects such as :class:`bytearray`.
bytes-like objects. **This is the recommended way to accept
binary data.**
-``y#`` (read-only :term:`bytes-like object`) [const char \*, int]
+``y#`` (read-only :term:`bytes-like object`) [const char \*, int or :c:type:`Py_ssize_t`]
This variant on ``s#`` doesn't accept Unicode objects, only bytes-like
objects.
@@ -155,7 +155,7 @@ which disallows mutable objects such as :class:`bytearray`.
Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
:c:func:`PyUnicode_AsWideCharString`.
-``u#`` (:class:`str`) [const Py_UNICODE \*, int]
+``u#`` (:class:`str`) [const Py_UNICODE \*, int or :c:type:`Py_ssize_t`]
This variant on ``u`` stores into two C variables, the first one a pointer to a
Unicode data buffer, the second one its length. This variant allows
null code points.
@@ -172,7 +172,7 @@ which disallows mutable objects such as :class:`bytearray`.
Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using
:c:func:`PyUnicode_AsWideCharString`.
-``Z#`` (:class:`str` or ``None``) [const Py_UNICODE \*, int]
+``Z#`` (:class:`str` or ``None``) [const Py_UNICODE \*, int or :c:type:`Py_ssize_t`]
Like ``u#``, but the Python object may also be ``None``, in which case the
:c:type:`Py_UNICODE` pointer is set to ``NULL``.
@@ -213,7 +213,7 @@ which disallows mutable objects such as :class:`bytearray`.
recoding them. Instead, the implementation assumes that the byte string object uses
the encoding passed in as parameter.
-``es#`` (:class:`str`) [const char \*encoding, char \*\*buffer, int \*buffer_length]
+``es#`` (:class:`str`) [const char \*encoding, char \*\*buffer, int or :c:type:`Py_ssize_t` \*buffer_length]
This variant on ``s#`` is used for encoding Unicode into a character buffer.
Unlike the ``es`` format, this variant allows input data which contains NUL
characters.
@@ -244,7 +244,7 @@ which disallows mutable objects such as :class:`bytearray`.
In both cases, *\*buffer_length* is set to the length of the encoded data
without the trailing NUL byte.
-``et#`` (:class:`str`, :class:`bytes` or :class:`bytearray`) [const char \*encoding, char \*\*buffer, int \*buffer_length]
+``et#`` (:class:`str`, :class:`bytes` or :class:`bytearray`) [const char \*encoding, char \*\*buffer, int or :c:type:`Py_ssize_t` \*buffer_length]
Same as ``es#`` except that byte string objects are passed through without recoding
them. Instead, the implementation assumes that the byte string object uses the
encoding passed in as parameter.
@@ -549,7 +549,7 @@ Building values
Convert a null-terminated C string to a Python :class:`str` object using ``'utf-8'``
encoding. If the C string pointer is ``NULL``, ``None`` is used.
- ``s#`` (:class:`str` or ``None``) [const char \*, int]
+ ``s#`` (:class:`str` or ``None``) [const char \*, int or :c:type:`Py_ssize_t`]
Convert a C string and its length to a Python :class:`str` object using ``'utf-8'``
encoding. If the C string pointer is ``NULL``, the length is ignored and
``None`` is returned.
@@ -558,14 +558,14 @@ Building values
This converts a C string to a Python :class:`bytes` object. If the C
string pointer is ``NULL``, ``None`` is returned.
- ``y#`` (:class:`bytes`) [const char \*, int]
+ ``y#`` (:class:`bytes`) [const char \*, int or :c:type:`Py_ssize_t`]
This converts a C string and its lengths to a Python object. If the C
string pointer is ``NULL``, ``None`` is returned.
``z`` (:class:`str` or ``None``) [const char \*]
Same as ``s``.
- ``z#`` (:class:`str` or ``None``) [const char \*, int]
+ ``z#`` (:class:`str` or ``None``) [const char \*, int or :c:type:`Py_ssize_t`]
Same as ``s#``.
``u`` (:class:`str`) [const wchar_t \*]
@@ -573,7 +573,7 @@ Building values
data to a Python Unicode object. If the Unicode buffer pointer is ``NULL``,
``None`` is returned.
- ``u#`` (:class:`str`) [const wchar_t \*, int]
+ ``u#`` (:class:`str`) [const wchar_t \*, int or :c:type:`Py_ssize_t`]
Convert a Unicode (UTF-16 or UCS-4) data buffer and its length to a Python
Unicode object. If the Unicode buffer pointer is ``NULL``, the length is ignored
and ``None`` is returned.
@@ -581,7 +581,7 @@ Building values
``U`` (:class:`str` or ``None``) [const char \*]
Same as ``s``.
- ``U#`` (:class:`str` or ``None``) [const char \*, int]
+ ``U#`` (:class:`str` or ``None``) [const char \*, int or :c:type:`Py_ssize_t`]
Same as ``s#``.
``i`` (:class:`int`) [int]
diff --git a/Doc/c-api/call.rst b/Doc/c-api/call.rst
index 0833531b1d5ee1..0832e7e2193600 100644
--- a/Doc/c-api/call.rst
+++ b/Doc/c-api/call.rst
@@ -35,17 +35,11 @@ To call an object, use :c:func:`PyObject_Call` or other
The Vectorcall Protocol
-----------------------
-.. versionadded:: 3.8
+.. versionadded:: 3.9
The vectorcall protocol was introduced in :pep:`590` as an additional protocol
for making calls more efficient.
-.. warning::
-
- The vectorcall API is provisional and expected to become public in
- Python 3.9, with a different names and, possibly, changed semantics.
- If you use the it, plan for updating your code for Python 3.9.
-
As rule of thumb, CPython will prefer the vectorcall for internal calls
if the callable supports it. However, this is not a hard rule.
Additionally, some third-party extensions use *tp_call* directly
@@ -69,7 +63,7 @@ the arguments to an args tuple and kwargs dict anyway, then there is no point
in implementing vectorcall.
Classes can implement the vectorcall protocol by enabling the
-:const:`_Py_TPFLAGS_HAVE_VECTORCALL` flag and setting
+:const:`Py_TPFLAGS_HAVE_VECTORCALL` flag and setting
:c:member:`~PyTypeObject.tp_vectorcall_offset` to the offset inside the
object structure where a *vectorcallfunc* appears.
This is a pointer to a function with the following signature:
@@ -97,7 +91,7 @@ This is a pointer to a function with the following signature:
argument 1 (not 0) in the allocated vector.
The callee must restore the value of ``args[-1]`` before returning.
- For :c:func:`_PyObject_VectorcallMethod`, this flag means instead that
+ For :c:func:`PyObject_VectorcallMethod`, this flag means instead that
``args[0]`` may be changed.
Whenever they can do so cheaply (without additional allocation), callers
@@ -107,7 +101,20 @@ This is a pointer to a function with the following signature:
To call an object that implements vectorcall, use a :ref:`call API `
function as with any other callable.
-:c:func:`_PyObject_Vectorcall` will usually be most efficient.
+:c:func:`PyObject_Vectorcall` will usually be most efficient.
+
+
+.. note::
+
+ In CPython 3.8, the vectorcall API and related functions were available
+ provisionally under names with a leading underscore:
+ ``_PyObject_Vectorcall``, ``_Py_TPFLAGS_HAVE_VECTORCALL``,
+ ``_PyObject_VectorcallMethod``, ``_PyVectorcall_Function``,
+ ``_PyObject_CallOneArg``, ``_PyObject_CallMethodNoArgs``,
+ ``_PyObject_CallMethodOneArg``.
+ Additionally, ``PyObject_VectorcallDict`` was available as
+ ``_PyObject_FastCallDict``.
+ The old names are still defined as aliases of the new, non-underscored names.
Recursion Control
@@ -137,9 +144,11 @@ Vectorcall Support API
However, the function ``PyVectorcall_NARGS`` should be used to allow
for future extensions.
+ This function is not part of the :ref:`limited API `.
+
.. versionadded:: 3.8
-.. c:function:: vectorcallfunc _PyVectorcall_Function(PyObject *op)
+.. c:function:: vectorcallfunc PyVectorcall_Function(PyObject *op)
If *op* does not support the vectorcall protocol (either because the type
does not or because the specific instance does not), return *NULL*.
@@ -147,7 +156,9 @@ Vectorcall Support API
This function never raises an exception.
This is mostly useful to check whether or not *op* supports vectorcall,
- which can be done by checking ``_PyVectorcall_Function(op) != NULL``.
+ which can be done by checking ``PyVectorcall_Function(op) != NULL``.
+
+ This function is not part of the :ref:`limited API `.
.. versionadded:: 3.8
@@ -158,9 +169,11 @@ Vectorcall Support API
This is a specialized function, intended to be put in the
:c:member:`~PyTypeObject.tp_call` slot or be used in an implementation of ``tp_call``.
- It does not check the :const:`_Py_TPFLAGS_HAVE_VECTORCALL` flag
+ It does not check the :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag
and it does not fall back to ``tp_call``.
+ This function is not part of the :ref:`limited API `.
+
.. versionadded:: 3.8
@@ -185,7 +198,7 @@ please see individual documentation for details.
+------------------------------------------+------------------+--------------------+---------------+
| :c:func:`PyObject_CallNoArgs` | ``PyObject *`` | --- | --- |
+------------------------------------------+------------------+--------------------+---------------+
-| :c:func:`_PyObject_CallOneArg` | ``PyObject *`` | 1 object | --- |
+| :c:func:`PyObject_CallOneArg` | ``PyObject *`` | 1 object | --- |
+------------------------------------------+------------------+--------------------+---------------+
| :c:func:`PyObject_CallObject` | ``PyObject *`` | tuple/``NULL`` | --- |
+------------------------------------------+------------------+--------------------+---------------+
@@ -197,15 +210,15 @@ please see individual documentation for details.
+------------------------------------------+------------------+--------------------+---------------+
| :c:func:`PyObject_CallMethodObjArgs` | obj + name | variadic | --- |
+------------------------------------------+------------------+--------------------+---------------+
-| :c:func:`_PyObject_CallMethodNoArgs` | obj + name | --- | --- |
+| :c:func:`PyObject_CallMethodNoArgs` | obj + name | --- | --- |
+------------------------------------------+------------------+--------------------+---------------+
-| :c:func:`_PyObject_CallMethodOneArg` | obj + name | 1 object | --- |
+| :c:func:`PyObject_CallMethodOneArg` | obj + name | 1 object | --- |
+------------------------------------------+------------------+--------------------+---------------+
-| :c:func:`_PyObject_Vectorcall` | ``PyObject *`` | vectorcall | vectorcall |
+| :c:func:`PyObject_Vectorcall` | ``PyObject *`` | vectorcall | vectorcall |
+------------------------------------------+------------------+--------------------+---------------+
-| :c:func:`_PyObject_FastCallDict` | ``PyObject *`` | vectorcall | dict/``NULL`` |
+| :c:func:`PyObject_VectorcallDict` | ``PyObject *`` | vectorcall | dict/``NULL`` |
+------------------------------------------+------------------+--------------------+---------------+
-| :c:func:`_PyObject_VectorcallMethod` | arg + name | vectorcall | vectorcall |
+| :c:func:`PyObject_VectorcallMethod` | arg + name | vectorcall | vectorcall |
+------------------------------------------+------------------+--------------------+---------------+
@@ -235,7 +248,7 @@ please see individual documentation for details.
.. versionadded:: 3.9
-.. c:function:: PyObject* _PyObject_CallOneArg(PyObject *callable, PyObject *arg)
+.. c:function:: PyObject* PyObject_CallOneArg(PyObject *callable, PyObject *arg)
Call a callable Python object *callable* with exactly 1 positional argument
*arg* and no keyword arguments.
@@ -243,6 +256,8 @@ please see individual documentation for details.
Return the result of the call on success, or raise an exception and return
*NULL* on failure.
+ This function is not part of the :ref:`limited API `.
+
.. versionadded:: 3.9
@@ -320,7 +335,7 @@ please see individual documentation for details.
*NULL* on failure.
-.. c:function:: PyObject* _PyObject_CallMethodNoArgs(PyObject *obj, PyObject *name)
+.. c:function:: PyObject* PyObject_CallMethodNoArgs(PyObject *obj, PyObject *name)
Call a method of the Python object *obj* without arguments,
where the name of the method is given as a Python string object in *name*.
@@ -328,10 +343,12 @@ please see individual documentation for details.
Return the result of the call on success, or raise an exception and return
*NULL* on failure.
+ This function is not part of the :ref:`limited API `.
+
.. versionadded:: 3.9
-.. c:function:: PyObject* _PyObject_CallMethodOneArg(PyObject *obj, PyObject *name, PyObject *arg)
+.. c:function:: PyObject* PyObject_CallMethodOneArg(PyObject *obj, PyObject *name, PyObject *arg)
Call a method of the Python object *obj* with a single positional argument
*arg*, where the name of the method is given as a Python string object in
@@ -340,10 +357,12 @@ please see individual documentation for details.
Return the result of the call on success, or raise an exception and return
*NULL* on failure.
+ This function is not part of the :ref:`limited API `.
+
.. versionadded:: 3.9
-.. c:function:: PyObject* _PyObject_Vectorcall(PyObject *callable, PyObject *const *args, size_t nargsf, PyObject *kwnames)
+.. c:function:: PyObject* PyObject_Vectorcall(PyObject *callable, PyObject *const *args, size_t nargsf, PyObject *kwnames)
Call a callable Python object *callable*.
The arguments are the same as for :c:type:`vectorcallfunc`.
@@ -353,15 +372,11 @@ please see individual documentation for details.
Return the result of the call on success, or raise an exception and return
*NULL* on failure.
- .. note::
-
- This function is provisional and expected to become public in Python 3.9,
- with a different name and, possibly, changed semantics.
- If you use the function, plan for updating your code for Python 3.9.
+ This function is not part of the :ref:`limited API `.
- .. versionadded:: 3.8
+ .. versionadded:: 3.9
-.. c:function:: PyObject* _PyObject_FastCallDict(PyObject *callable, PyObject *const *args, size_t nargsf, PyObject *kwdict)
+.. c:function:: PyObject* PyObject_VectorcallDict(PyObject *callable, PyObject *const *args, size_t nargsf, PyObject *kwdict)
Call *callable* with positional arguments passed exactly as in the vectorcall_ protocol,
but with keyword arguments passed as a dictionary *kwdict*.
@@ -373,15 +388,11 @@ please see individual documentation for details.
already has a dictionary ready to use for the keyword arguments,
but not a tuple for the positional arguments.
- .. note::
+ This function is not part of the :ref:`limited API `.
- This function is provisional and expected to become public in Python 3.9,
- with a different name and, possibly, changed semantics.
- If you use the function, plan for updating your code for Python 3.9.
-
- .. versionadded:: 3.8
+ .. versionadded:: 3.9
-.. c:function:: PyObject* _PyObject_VectorcallMethod(PyObject *name, PyObject *const *args, size_t nargsf, PyObject *kwnames)
+.. c:function:: PyObject* PyObject_VectorcallMethod(PyObject *name, PyObject *const *args, size_t nargsf, PyObject *kwnames)
Call a method using the vectorcall calling convention. The name of the method
is given as a Python string *name*. The object whose method is called is
@@ -390,7 +401,7 @@ please see individual documentation for details.
*nargsf* is the number of positional arguments including *args[0]*,
plus :const:`PY_VECTORCALL_ARGUMENTS_OFFSET` if the value of ``args[0]`` may
temporarily be changed. Keyword arguments can be passed just like in
- :c:func:`_PyObject_Vectorcall`.
+ :c:func:`PyObject_Vectorcall`.
If the object has the :const:`Py_TPFLAGS_METHOD_DESCRIPTOR` feature,
this will call the unbound method object with the full
@@ -399,6 +410,8 @@ please see individual documentation for details.
Return the result of the call on success, or raise an exception and return
*NULL* on failure.
+ This function is not part of the :ref:`limited API `.
+
.. versionadded:: 3.9
diff --git a/Doc/c-api/contextvars.rst b/Doc/c-api/contextvars.rst
index 38256a3b0f2a06..9c088814314a81 100644
--- a/Doc/c-api/contextvars.rst
+++ b/Doc/c-api/contextvars.rst
@@ -101,11 +101,6 @@ Context object management functions:
current context for the current thread. Returns ``0`` on success,
and ``-1`` on error.
-.. c:function:: int PyContext_ClearFreeList()
-
- Clear the context variable free list. Return the total number of
- freed items. This function always succeeds.
-
Context variable functions:
diff --git a/Doc/c-api/dict.rst b/Doc/c-api/dict.rst
index e48c11d336b8ce..2fb29cdd617789 100644
--- a/Doc/c-api/dict.rst
+++ b/Doc/c-api/dict.rst
@@ -232,10 +232,3 @@ Dictionary Objects
for key, value in seq2:
if override or key not in a:
a[key] = value
-
-
-.. c:function:: int PyDict_ClearFreeList()
-
- Clear the free list. Return the total number of freed items.
-
- .. versionadded:: 3.3
diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst
index 2edcbf788d2ade..e7805ba143c584 100644
--- a/Doc/c-api/exceptions.rst
+++ b/Doc/c-api/exceptions.rst
@@ -358,7 +358,7 @@ an error value).
.. c:function:: int PyErr_ResourceWarning(PyObject *source, Py_ssize_t stack_level, const char *format, ...)
Function similar to :c:func:`PyErr_WarnFormat`, but *category* is
- :exc:`ResourceWarning` and pass *source* to :func:`warnings.WarningMessage`.
+ :exc:`ResourceWarning` and it passes *source* to :func:`warnings.WarningMessage`.
.. versionadded:: 3.6
@@ -983,9 +983,6 @@ Notes:
This is a base class for other standard exceptions.
(2)
- This is the same as :exc:`weakref.ReferenceError`.
-
-(3)
Only defined on Windows; protect code that uses this by testing that the
preprocessor macro ``MS_WINDOWS`` is defined.
diff --git a/Doc/c-api/float.rst b/Doc/c-api/float.rst
index bfc28a79ecfdc7..b29937dbecdcfd 100644
--- a/Doc/c-api/float.rst
+++ b/Doc/c-api/float.rst
@@ -76,8 +76,3 @@ Floating Point Objects
.. c:function:: double PyFloat_GetMin()
Return the minimum normalized positive float *DBL_MIN* as C :c:type:`double`.
-
-.. c:function:: int PyFloat_ClearFreeList()
-
- Clear the float free list. Return the number of items that could not
- be freed.
diff --git a/Doc/c-api/gcsupport.rst b/Doc/c-api/gcsupport.rst
index 924a7fd2fda4d5..eee114c19d5904 100644
--- a/Doc/c-api/gcsupport.rst
+++ b/Doc/c-api/gcsupport.rst
@@ -61,6 +61,33 @@ Constructors for container types must conform to two rules:
end of the constructor.
+.. c:function:: int PyObject_IS_GC(PyObject *obj)
+
+ Returns non-zero if the object implements the garbage collector protocol,
+ otherwise returns 0.
+
+ The object cannot be tracked by the garbage collector if this function returns 0.
+
+
+.. c:function:: int PyObject_GC_IsTracked(PyObject *op)
+
+ Returns 1 if the object type of *op* implements the GC protocol and *op* is being
+ currently tracked by the garbage collector and 0 otherwise.
+
+ This is analogous to the Python function :func:`gc.is_tracked`.
+
+ .. versionadded:: 3.9
+
+
+.. c:function:: int PyObject_GC_IsFinalized(PyObject *op)
+
+ Returns 1 if the object type of *op* implements the GC protocol and *op* has been
+ already finalized by the garbage collector and 0 otherwise.
+
+ This is analogous to the Python function :func:`gc.is_finalized`.
+
+ .. versionadded:: 3.9
+
Similarly, the deallocator for the object must conform to a similar pair of
rules:
diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst
index 14049ee64205f1..68fed2acc447ee 100644
--- a/Doc/c-api/init.rst
+++ b/Doc/c-api/init.rst
@@ -842,12 +842,12 @@ code, or when embedding the Python interpreter:
single: PyEval_SaveThread()
single: PyEval_RestoreThread()
- Initialize and acquire the global interpreter lock. It should be called in the
- main thread before creating a second thread or engaging in any other thread
- operations such as ``PyEval_ReleaseThread(tstate)``. It is not needed before
- calling :c:func:`PyEval_SaveThread` or :c:func:`PyEval_RestoreThread`.
+ Deprecated function which does nothing.
- This is a no-op when called for a second time.
+ In Python 3.6 and older, this function created the GIL if it didn't exist.
+
+ .. versionchanged:: 3.9
+ The function now does nothing.
.. versionchanged:: 3.7
This function is now called by :c:func:`Py_Initialize()`, so you don't
@@ -856,6 +856,8 @@ code, or when embedding the Python interpreter:
.. versionchanged:: 3.2
This function cannot be called before :c:func:`Py_Initialize()` anymore.
+ .. deprecated-removed:: 3.9 3.11
+
.. index:: module: _thread
@@ -868,6 +870,8 @@ code, or when embedding the Python interpreter:
.. versionchanged:: 3.7
The :term:`GIL` is now initialized by :c:func:`Py_Initialize()`.
+ .. deprecated-removed:: 3.9 3.11
+
.. c:function:: PyThreadState* PyEval_SaveThread()
@@ -1060,19 +1064,65 @@ All of the following functions must be called after :c:func:`Py_Initialize`.
:c:func:`PyThreadState_Clear`.
- .. c:function:: void PyThreadState_DeleteCurrent()
+.. c:function:: void PyThreadState_DeleteCurrent(void)
+
+ Destroy the current thread state and release the global interpreter lock.
+ Like :c:func:`PyThreadState_Delete`, the global interpreter lock need not
+ be held. The thread state must have been reset with a previous call
+ to :c:func:`PyThreadState_Clear`.
+
+
+.. c:function:: PyFrameObject* PyThreadState_GetFrame(PyThreadState *tstate)
+
+ Get the current frame of the Python thread state *tstate*.
+
+ Return a strong reference. Return ``NULL`` if no frame is currently
+ executing.
+
+ See also :c:func:`PyEval_GetFrame`.
+
+ *tstate* must not be ``NULL``.
+
+ .. versionadded:: 3.9
+
+
+.. c:function:: uint64_t PyThreadState_GetID(PyThreadState *tstate)
+
+ Get the unique thread state identifier of the Python thread state *tstate*.
+
+ *tstate* must not be ``NULL``.
+
+ .. versionadded:: 3.9
+
+
+.. c:function:: PyInterpreterState* PyThreadState_GetInterpreter(PyThreadState *tstate)
+
+ Get the interpreter of the Python thread state *tstate*.
+
+ *tstate* must not be ``NULL``.
+
+ .. versionadded:: 3.9
+
+
+.. c:function:: PyInterpreterState* PyInterpreterState_Get(void)
- Destroy the current thread state and release the global interpreter lock.
- Like :c:func:`PyThreadState_Delete`, the global interpreter lock need not
- be held. The thread state must have been reset with a previous call
- to :c:func:`PyThreadState_Clear`.
+ Get the current interpreter.
+ Issue a fatal error if there no current Python thread state or no current
+ interpreter. It cannot return NULL.
-.. c:function:: PY_INT64_T PyInterpreterState_GetID(PyInterpreterState *interp)
+ The caller must hold the GIL.
+
+ .. versionadded:: 3.9
+
+
+.. c:function:: int64_t PyInterpreterState_GetID(PyInterpreterState *interp)
Return the interpreter's unique ID. If there was any error in doing
so then ``-1`` is returned and an error is set.
+ The caller must hold the GIL.
+
.. versionadded:: 3.7
@@ -1087,6 +1137,32 @@ All of the following functions must be called after :c:func:`Py_Initialize`.
.. versionadded:: 3.8
+.. c:type:: PyObject* (*_PyFrameEvalFunction)(PyThreadState *tstate, PyFrameObject *frame, int throwflag)
+
+ Type of a frame evaluation function.
+
+ The *throwflag* parameter is used by the ``throw()`` method of generators:
+ if non-zero, handle the current exception.
+
+ .. versionchanged:: 3.9
+ The function now takes a *tstate* parameter.
+
+.. c:function:: _PyFrameEvalFunction _PyInterpreterState_GetEvalFrameFunc(PyInterpreterState *interp)
+
+ Get the frame evaluation function.
+
+ See the :pep:`523` "Adding a frame evaluation API to CPython".
+
+ .. versionadded:: 3.9
+
+.. c:function:: void _PyInterpreterState_SetEvalFrameFunc(PyInterpreterState *interp, _PyFrameEvalFunction eval_frame);
+
+ Set the frame evaluation function.
+
+ See the :pep:`523` "Adding a frame evaluation API to CPython".
+
+ .. versionadded:: 3.9
+
.. c:function:: PyObject* PyThreadState_GetDict()
@@ -1338,6 +1414,10 @@ pointer and a void pointer argument.
This function doesn't need a current thread state to run, and it doesn't
need the global interpreter lock.
+ To call this function in a subinterpreter, the caller must hold the GIL.
+ Otherwise, the function *func* can be scheduled to be called from the wrong
+ interpreter.
+
.. warning::
This is a low-level function, only useful for very special cases.
There is no guarantee that *func* will be called as quick as
@@ -1346,6 +1426,12 @@ pointer and a void pointer argument.
function is generally **not** suitable for calling Python code from
arbitrary C threads. Instead, use the :ref:`PyGILState API`.
+ .. versionchanged:: 3.9
+ If this function is called in a subinterpreter, the function *func* is
+ now scheduled to be called from the subinterpreter, rather than being
+ called from the main interpreter. Each subinterpreter now has its own
+ list of scheduled calls.
+
.. versionadded:: 3.1
.. _profiling:
@@ -1468,6 +1554,8 @@ Python-level trace functions in previous versions.
profile function is called for all monitored events except :const:`PyTrace_LINE`
:const:`PyTrace_OPCODE` and :const:`PyTrace_EXCEPTION`.
+ The caller must hold the :term:`GIL`.
+
.. c:function:: void PyEval_SetTrace(Py_tracefunc func, PyObject *obj)
@@ -1478,6 +1566,9 @@ Python-level trace functions in previous versions.
will not receive :const:`PyTrace_C_CALL`, :const:`PyTrace_C_EXCEPTION` or
:const:`PyTrace_C_RETURN` as a value for the *what* parameter.
+ The caller must hold the :term:`GIL`.
+
+
.. _advanced-debugging:
Advanced Debugger Support
diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst
index c589a6fe3f0ab6..fc82c3eb590248 100644
--- a/Doc/c-api/init_config.rst
+++ b/Doc/c-api/init_config.rst
@@ -472,7 +472,7 @@ PyConfig
If non-zero, dump all objects which are still alive at exit.
- Require a debug build of Python (``Py_REF_DEBUG`` macro must be defined).
+ ``Py_TRACE_REFS`` macro must be defined in build.
.. c:member:: wchar_t* exec_prefix
@@ -686,6 +686,16 @@ PyConfig
:data:`sys._xoptions`.
+ .. c:member:: int _use_peg_parser
+
+ Enable PEG parser? Default: 1.
+
+ Set to 0 by :option:`-X oldparser <-X>` and :envvar:`PYTHONOLDPARSER`.
+
+ See also :pep:`617`.
+
+ .. deprecated-removed:: 3.9 3.10
+
If ``parse_argv`` is non-zero, ``argv`` arguments are parsed the same
way the regular Python parses command line arguments, and Python
arguments are stripped from ``argv``: see :ref:`Command Line Arguments
@@ -994,6 +1004,8 @@ Private provisional API:
* :c:member:`PyConfig._init_main`: if set to 0,
:c:func:`Py_InitializeFromConfig` stops at the "Core" initialization phase.
+* :c:member:`PyConfig._isolated_interpreter`: if non-zero,
+ disallow threads, subprocesses and fork.
.. c:function:: PyStatus _Py_InitializeMain(void)
diff --git a/Doc/c-api/intro.rst b/Doc/c-api/intro.rst
index d08d4f97a308e7..e89a788de0d503 100644
--- a/Doc/c-api/intro.rst
+++ b/Doc/c-api/intro.rst
@@ -107,11 +107,24 @@ complete listing.
.. c:macro:: Py_UNREACHABLE()
- Use this when you have a code path that you do not expect to be reached.
+ Use this when you have a code path that cannot be reached by design.
For example, in the ``default:`` clause in a ``switch`` statement for which
all possible values are covered in ``case`` statements. Use this in places
where you might be tempted to put an ``assert(0)`` or ``abort()`` call.
+ In release mode, the macro helps the compiler to optimize the code, and
+ avoids a warning about unreachable code. For example, the macro is
+ implemented with ``__builtin_unreachable()`` on GCC in release mode.
+
+ A use for ``Py_UNREACHABLE()`` is following a call a function that
+ never returns but that is not declared :c:macro:`_Py_NO_RETURN`.
+
+ If a code path is very unlikely code but can be reached under exceptional
+ case, this macro must not be used. For example, under low memory condition
+ or if a system call returns a value out of the expected range. In this
+ case, it's better to report the error to the caller. If the error cannot
+ be reported to caller, :c:func:`Py_FatalError` can be used.
+
.. versionadded:: 3.7
.. c:macro:: Py_ABS(x)
@@ -174,6 +187,39 @@ complete listing.
.. versionchanged:: 3.8
MSVC support was added.
+.. c:macro:: PyDoc_STRVAR(name, str)
+
+ Creates a variable with name ``name`` that can be used in docstrings.
+ If Python is built without docstrings, the value will be empty.
+
+ Use :c:macro:`PyDoc_STRVAR` for docstrings to support building
+ Python without docstrings, as specified in :pep:`7`.
+
+ Example::
+
+ PyDoc_STRVAR(pop_doc, "Remove and return the rightmost element.");
+
+ static PyMethodDef deque_methods[] = {
+ // ...
+ {"pop", (PyCFunction)deque_pop, METH_NOARGS, pop_doc},
+ // ...
+ }
+
+.. c:macro:: PyDoc_STR(str)
+
+ Creates a docstring for the given input string or an empty string
+ if docstrings are disabled.
+
+ Use :c:macro:`PyDoc_STR` in specifying docstrings to support
+ building Python without docstrings, as specified in :pep:`7`.
+
+ Example::
+
+ static PyMethodDef pysqlite_row_methods[] = {
+ {"keys", (PyCFunction)pysqlite_row_keys, METH_NOARGS,
+ PyDoc_STR("Returns the keys of the row.")},
+ {NULL, NULL}
+ };
.. _api-objects:
diff --git a/Doc/c-api/list.rst b/Doc/c-api/list.rst
index b247cdfba0187e..0bc0785f200d4c 100644
--- a/Doc/c-api/list.rst
+++ b/Doc/c-api/list.rst
@@ -142,10 +142,3 @@ List Objects
Return a new tuple object containing the contents of *list*; equivalent to
``tuple(list)``.
-
-
-.. c:function:: int PyList_ClearFreeList()
-
- Clear the free list. Return the total number of freed items.
-
- .. versionadded:: 3.3
diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst
index c5c2aa60dcc35c..a7bd43df90689d 100644
--- a/Doc/c-api/long.rst
+++ b/Doc/c-api/long.rst
@@ -129,9 +129,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
single: OverflowError (built-in exception)
Return a C :c:type:`long` representation of *obj*. If *obj* is not an
- instance of :c:type:`PyLongObject`, first call its :meth:`__index__` or
- :meth:`__int__` method (if present) to convert it to a
- :c:type:`PyLongObject`.
+ instance of :c:type:`PyLongObject`, first call its :meth:`__index__` method
+ (if present) to convert it to a :c:type:`PyLongObject`.
Raise :exc:`OverflowError` if the value of *obj* is out of range for a
:c:type:`long`.
@@ -141,16 +140,15 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. versionchanged:: 3.8
Use :meth:`__index__` if available.
- .. deprecated:: 3.8
- Using :meth:`__int__` is deprecated.
+ .. versionchanged:: 3.10
+ This function will no longer use :meth:`__int__`.
.. c:function:: long PyLong_AsLongAndOverflow(PyObject *obj, int *overflow)
Return a C :c:type:`long` representation of *obj*. If *obj* is not an
- instance of :c:type:`PyLongObject`, first call its :meth:`__index__` or
- :meth:`__int__` method (if present) to convert it to a
- :c:type:`PyLongObject`.
+ instance of :c:type:`PyLongObject`, first call its :meth:`__index__`
+ method (if present) to convert it to a :c:type:`PyLongObject`.
If the value of *obj* is greater than :const:`LONG_MAX` or less than
:const:`LONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, and
@@ -162,8 +160,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. versionchanged:: 3.8
Use :meth:`__index__` if available.
- .. deprecated:: 3.8
- Using :meth:`__int__` is deprecated.
+ .. versionchanged:: 3.10
+ This function will no longer use :meth:`__int__`.
.. c:function:: long long PyLong_AsLongLong(PyObject *obj)
@@ -172,9 +170,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
single: OverflowError (built-in exception)
Return a C :c:type:`long long` representation of *obj*. If *obj* is not an
- instance of :c:type:`PyLongObject`, first call its :meth:`__index__` or
- :meth:`__int__` method (if present) to convert it to a
- :c:type:`PyLongObject`.
+ instance of :c:type:`PyLongObject`, first call its :meth:`__index__` method
+ (if present) to convert it to a :c:type:`PyLongObject`.
Raise :exc:`OverflowError` if the value of *obj* is out of range for a
:c:type:`long long`.
@@ -184,16 +181,15 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. versionchanged:: 3.8
Use :meth:`__index__` if available.
- .. deprecated:: 3.8
- Using :meth:`__int__` is deprecated.
+ .. versionchanged:: 3.10
+ This function will no longer use :meth:`__int__`.
.. c:function:: long long PyLong_AsLongLongAndOverflow(PyObject *obj, int *overflow)
Return a C :c:type:`long long` representation of *obj*. If *obj* is not an
- instance of :c:type:`PyLongObject`, first call its :meth:`__index__` or
- :meth:`__int__` method (if present) to convert it to a
- :c:type:`PyLongObject`.
+ instance of :c:type:`PyLongObject`, first call its :meth:`__index__` method
+ (if present) to convert it to a :c:type:`PyLongObject`.
If the value of *obj* is greater than :const:`LLONG_MAX` or less than
:const:`LLONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively,
@@ -207,8 +203,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. versionchanged:: 3.8
Use :meth:`__index__` if available.
- .. deprecated:: 3.8
- Using :meth:`__int__` is deprecated.
+ .. versionchanged:: 3.10
+ This function will no longer use :meth:`__int__`.
.. c:function:: Py_ssize_t PyLong_AsSsize_t(PyObject *pylong)
@@ -278,10 +274,9 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. c:function:: unsigned long PyLong_AsUnsignedLongMask(PyObject *obj)
- Return a C :c:type:`unsigned long` representation of *obj*. If *obj*
- is not an instance of :c:type:`PyLongObject`, first call its
- :meth:`__index__` or :meth:`__int__` method (if present) to convert
- it to a :c:type:`PyLongObject`.
+ Return a C :c:type:`unsigned long` representation of *obj*. If *obj* is not
+ an instance of :c:type:`PyLongObject`, first call its :meth:`__index__`
+ method (if present) to convert it to a :c:type:`PyLongObject`.
If the value of *obj* is out of range for an :c:type:`unsigned long`,
return the reduction of that value modulo ``ULONG_MAX + 1``.
@@ -292,16 +287,16 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. versionchanged:: 3.8
Use :meth:`__index__` if available.
- .. deprecated:: 3.8
- Using :meth:`__int__` is deprecated.
+ .. versionchanged:: 3.10
+ This function will no longer use :meth:`__int__`.
.. c:function:: unsigned long long PyLong_AsUnsignedLongLongMask(PyObject *obj)
Return a C :c:type:`unsigned long long` representation of *obj*. If *obj*
is not an instance of :c:type:`PyLongObject`, first call its
- :meth:`__index__` or :meth:`__int__` method (if present) to convert
- it to a :c:type:`PyLongObject`.
+ :meth:`__index__` method (if present) to convert it to a
+ :c:type:`PyLongObject`.
If the value of *obj* is out of range for an :c:type:`unsigned long long`,
return the reduction of that value modulo ``ULLONG_MAX + 1``.
@@ -312,8 +307,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. versionchanged:: 3.8
Use :meth:`__index__` if available.
- .. deprecated:: 3.8
- Using :meth:`__int__` is deprecated.
+ .. versionchanged:: 3.10
+ This function will no longer use :meth:`__int__`.
.. c:function:: double PyLong_AsDouble(PyObject *pylong)
diff --git a/Doc/c-api/memory.rst b/Doc/c-api/memory.rst
index ba7bd3b9a53878..8a8542f0479ec5 100644
--- a/Doc/c-api/memory.rst
+++ b/Doc/c-api/memory.rst
@@ -533,7 +533,7 @@ tracemalloc C API
.. versionadded:: 3.7
-.. c:function: int PyTraceMalloc_Track(unsigned int domain, uintptr_t ptr, size_t size)
+.. c:function:: int PyTraceMalloc_Track(unsigned int domain, uintptr_t ptr, size_t size)
Track an allocated memory block in the :mod:`tracemalloc` module.
@@ -542,7 +542,7 @@ tracemalloc C API
If memory block is already tracked, update the existing trace.
-.. c:function: int PyTraceMalloc_Untrack(unsigned int domain, uintptr_t ptr)
+.. c:function:: int PyTraceMalloc_Untrack(unsigned int domain, uintptr_t ptr)
Untrack an allocated memory block in the :mod:`tracemalloc` module.
Do nothing if the block was not tracked.
diff --git a/Doc/c-api/method.rst b/Doc/c-api/method.rst
index b1862d796c9f41..0a5341cbbdf152 100644
--- a/Doc/c-api/method.rst
+++ b/Doc/c-api/method.rst
@@ -92,9 +92,3 @@ no longer available.
.. c:function:: PyObject* PyMethod_GET_SELF(PyObject *meth)
Macro version of :c:func:`PyMethod_Self` which avoids error checking.
-
-
-.. c:function:: int PyMethod_ClearFreeList()
-
- Clear the free list. Return the total number of freed items.
-
diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst
index 57902a9c7f8384..8a415dfa30a35e 100644
--- a/Doc/c-api/module.rst
+++ b/Doc/c-api/module.rst
@@ -153,7 +153,7 @@ or request "multi-phase initialization" by returning the definition struct itsel
.. c:member:: const char *m_doc
Docstring for the module; usually a docstring variable created with
- :c:func:`PyDoc_STRVAR` is used.
+ :c:macro:`PyDoc_STRVAR` is used.
.. c:member:: Py_ssize_t m_size
@@ -196,23 +196,47 @@ or request "multi-phase initialization" by returning the definition struct itsel
.. c:member:: traverseproc m_traverse
A traversal function to call during GC traversal of the module object, or
- ``NULL`` if not needed. This function may be called before module state
- is allocated (:c:func:`PyModule_GetState()` may return `NULL`),
- and before the :c:member:`Py_mod_exec` function is executed.
+ ``NULL`` if not needed.
+
+ This function is not called if the module state was requested but is not
+ allocated yet. This is the case immediately after the module is created
+ and before the module is executed (:c:data:`Py_mod_exec` function). More
+ precisely, this function is not called if :c:member:`m_size` is greater
+ than 0 and the module state (as returned by :c:func:`PyModule_GetState`)
+ is ``NULL``.
+
+ .. versionchanged:: 3.9
+ No longer called before the module state is allocated.
.. c:member:: inquiry m_clear
A clear function to call during GC clearing of the module object, or
- ``NULL`` if not needed. This function may be called before module state
- is allocated (:c:func:`PyModule_GetState()` may return `NULL`),
- and before the :c:member:`Py_mod_exec` function is executed.
+ ``NULL`` if not needed.
+
+ This function is not called if the module state was requested but is not
+ allocated yet. This is the case immediately after the module is created
+ and before the module is executed (:c:data:`Py_mod_exec` function). More
+ precisely, this function is not called if :c:member:`m_size` is greater
+ than 0 and the module state (as returned by :c:func:`PyModule_GetState`)
+ is ``NULL``.
+
+ .. versionchanged:: 3.9
+ No longer called before the module state is allocated.
.. c:member:: freefunc m_free
- A function to call during deallocation of the module object, or ``NULL`` if
- not needed. This function may be called before module state
- is allocated (:c:func:`PyModule_GetState()` may return `NULL`),
- and before the :c:member:`Py_mod_exec` function is executed.
+ A function to call during deallocation of the module object, or ``NULL``
+ if not needed.
+
+ This function is not called if the module state was requested but is not
+ allocated yet. This is the case immediately after the module is created
+ and before the module is executed (:c:data:`Py_mod_exec` function). More
+ precisely, this function is not called if :c:member:`m_size` is greater
+ than 0 and the module state (as returned by :c:func:`PyModule_GetState`)
+ is ``NULL``.
+
+ .. versionchanged:: 3.9
+ No longer called before the module state is allocated.
Single-phase initialization
...........................
@@ -417,7 +441,7 @@ state:
Add an object to *module* as *name*. This is a convenience function which can
be used from the module's initialization function. This steals a reference to
- *value* on success. Return ``-1`` on error, ``0`` on success.
+ *value* on success. Return ``-1`` on error, ``0`` on success.
.. note::
@@ -460,6 +484,16 @@ state:
Add a string constant to *module*.
+.. c:function:: int PyModule_AddType(PyObject *module, PyTypeObject *type)
+
+ Add a type object to *module*.
+ The type object is finalized by calling internally :c:func:`PyType_Ready`.
+ The name of the type object is taken from the last component of
+ :c:member:`~PyTypeObject.tp_name` after dot.
+ Return ``-1`` on error, ``0`` on success.
+
+ .. versionadded:: 3.9
+
Module lookup
^^^^^^^^^^^^^
@@ -493,6 +527,8 @@ since multiple such modules can be created from a single definition.
mechanisms (either by calling it directly, or by referring to its
implementation for details of the required state updates).
+ The caller must hold the GIL.
+
Return 0 on success or -1 on failure.
.. versionadded:: 3.3
@@ -502,4 +538,6 @@ since multiple such modules can be created from a single definition.
Removes the module object created from *def* from the interpreter state.
Return 0 on success or -1 on failure.
+ The caller must hold the GIL.
+
.. versionadded:: 3.3
diff --git a/Doc/c-api/number.rst b/Doc/c-api/number.rst
index 620204ca8e2295..37979bb506bcfa 100644
--- a/Doc/c-api/number.rst
+++ b/Doc/c-api/number.rst
@@ -256,6 +256,10 @@ Number Protocol
Returns the *o* converted to a Python int on success or ``NULL`` with a
:exc:`TypeError` exception raised on failure.
+ .. versionchanged:: 3.10
+ The result always has exact type :class:`int`. Previously, the result
+ could have been an instance of a subclass of ``int``.
+
.. c:function:: PyObject* PyNumber_ToBase(PyObject *n, int base)
diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst
index 2905fbbd0f70c3..b9c137ea352e78 100644
--- a/Doc/c-api/object.rst
+++ b/Doc/c-api/object.rst
@@ -128,7 +128,7 @@ Object Protocol
.. versionadded:: 3.3
-.. c:function:: int PyObject_GenericSetDict(PyObject *o, void *context)
+.. c:function:: int PyObject_GenericSetDict(PyObject *o, PyObject *value, void *context)
A generic implementation for the setter of a ``__dict__`` descriptor. This
implementation does not allow the dictionary to be deleted.
diff --git a/Doc/c-api/reflection.rst b/Doc/c-api/reflection.rst
index 1d86de66ed3eda..9207d86012c8b3 100644
--- a/Doc/c-api/reflection.rst
+++ b/Doc/c-api/reflection.rst
@@ -5,34 +5,60 @@
Reflection
==========
-.. c:function:: PyObject* PyEval_GetBuiltins()
+.. c:function:: PyObject* PyEval_GetBuiltins(void)
Return a dictionary of the builtins in the current execution frame,
or the interpreter of the thread state if no frame is currently executing.
-.. c:function:: PyObject* PyEval_GetLocals()
+.. c:function:: PyObject* PyEval_GetLocals(void)
Return a dictionary of the local variables in the current execution frame,
or ``NULL`` if no frame is currently executing.
-.. c:function:: PyObject* PyEval_GetGlobals()
+.. c:function:: PyObject* PyEval_GetGlobals(void)
Return a dictionary of the global variables in the current execution frame,
or ``NULL`` if no frame is currently executing.
-.. c:function:: PyFrameObject* PyEval_GetFrame()
+.. c:function:: PyFrameObject* PyEval_GetFrame(void)
Return the current thread state's frame, which is ``NULL`` if no frame is
currently executing.
+ See also :c:func:`PyThreadState_GetFrame`.
+
+
+.. c:function:: int PyFrame_GetBack(PyFrameObject *frame)
+
+ Get the *frame* next outer frame.
+
+ Return a strong reference, or ``NULL`` if *frame* has no outer frame.
+
+ *frame* must not be ``NULL``.
+
+ .. versionadded:: 3.9
+
+
+.. c:function:: int PyFrame_GetCode(PyFrameObject *frame)
+
+ Get the *frame* code.
+
+ Return a strong reference.
+
+ *frame* must not be ``NULL``. The result (frame code) cannot be ``NULL``.
+
+ .. versionadded:: 3.9
+
.. c:function:: int PyFrame_GetLineNumber(PyFrameObject *frame)
Return the line number that *frame* is currently executing.
+ *frame* must not be ``NULL``.
+
.. c:function:: const char* PyEval_GetFuncName(PyObject *func)
diff --git a/Doc/c-api/set.rst b/Doc/c-api/set.rst
index 54819e8fd6cbdc..879f394d966cd0 100644
--- a/Doc/c-api/set.rst
+++ b/Doc/c-api/set.rst
@@ -157,10 +157,3 @@ subtypes but not for instances of :class:`frozenset` or its subtypes.
.. c:function:: int PySet_Clear(PyObject *set)
Empty an existing set of all elements.
-
-
-.. c:function:: int PySet_ClearFreeList()
-
- Clear the free list. Return the total number of freed items.
-
- .. versionadded:: 3.3
diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst
index 0c661389021eff..b2392fa5e19c5a 100644
--- a/Doc/c-api/structures.rst
+++ b/Doc/c-api/structures.rst
@@ -62,29 +62,62 @@ the definition of all other Python objects.
See documentation of :c:type:`PyVarObject` above.
-.. c:macro:: Py_TYPE(o)
+.. c:function:: PyTypeObject* Py_TYPE(const PyObject *o)
- This macro is used to access the :attr:`ob_type` member of a Python object.
- It expands to::
+ Get the type of the Python object *o*.
- (((PyObject*)(o))->ob_type)
+ Return a borrowed reference.
+ .. versionchanged:: 3.10
+ :c:func:`Py_TYPE()` is changed to the inline static function.
+ Use :c:func:`Py_SET_TYPE()` to set an object type.
-.. c:macro:: Py_REFCNT(o)
- This macro is used to access the :attr:`ob_refcnt` member of a Python
- object.
- It expands to::
+.. c:function:: int Py_IS_TYPE(PyObject *o, PyTypeObject *type)
- (((PyObject*)(o))->ob_refcnt)
+ Return non-zero if the object *o* type is *type*. Return zero otherwise.
+ Equivalent to: ``Py_TYPE(o) == type``.
+ .. versionadded:: 3.9
-.. c:macro:: Py_SIZE(o)
- This macro is used to access the :attr:`ob_size` member of a Python object.
- It expands to::
+.. c:function:: void Py_SET_TYPE(PyObject *o, PyTypeObject *type)
- (((PyVarObject*)(o))->ob_size)
+ Set the object *o* type to *type*.
+
+ .. versionadded:: 3.9
+
+
+.. c:function:: Py_ssize_t Py_REFCNT(const PyObject *o)
+
+ Get the reference count of the Python object *o*.
+
+ .. versionchanged:: 3.10
+ :c:func:`Py_REFCNT()` is changed to the inline static function.
+ Use :c:func:`Py_SET_REFCNT()` to set an object reference count.
+
+
+.. c:function:: void Py_SET_REFCNT(PyObject *o, Py_ssize_t refcnt)
+
+ Set the object *o* reference counter to *refcnt*.
+
+ .. versionadded:: 3.9
+
+
+.. c:function:: Py_ssize_t Py_SIZE(const PyVarObject *o)
+
+ Get the size of the Python object *o*.
+
+ .. versionchanged:: 3.10
+ :c:func:`Py_SIZE()` is changed to the inline static function.
+ Use :c:func:`Py_SET_SIZE()` to set an object size.
+
+
+.. c:function:: void Py_SET_SIZE(PyVarObject *o, Py_ssize_t size)
+
+ Set the object *o* size to *size*.
+
+ .. versionadded:: 3.9
.. c:macro:: PyObject_HEAD_INIT(type)
@@ -118,23 +151,56 @@ Implementing functions and methods
value of the function as exposed in Python. The function must return a new
reference.
+ The function signature is::
+
+ PyObject *PyCFunction(PyObject *self,
+ PyObject *args);
.. c:type:: PyCFunctionWithKeywords
Type of the functions used to implement Python callables in C
with signature :const:`METH_VARARGS | METH_KEYWORDS`.
+ The function signature is::
+
+ PyObject *PyCFunctionWithKeywords(PyObject *self,
+ PyObject *args,
+ PyObject *kwargs);
.. c:type:: _PyCFunctionFast
Type of the functions used to implement Python callables in C
with signature :const:`METH_FASTCALL`.
+ The function signature is::
+ PyObject *_PyCFunctionFast(PyObject *self,
+ PyObject *const *args,
+ Py_ssize_t nargs);
.. c:type:: _PyCFunctionFastWithKeywords
Type of the functions used to implement Python callables in C
with signature :const:`METH_FASTCALL | METH_KEYWORDS`.
+ The function signature is::
+
+ PyObject *_PyCFunctionFastWithKeywords(PyObject *self,
+ PyObject *const *args,
+ Py_ssize_t nargs,
+ PyObject *kwnames);
+
+.. c:type:: PyCMethod
+
+ Type of the functions used to implement Python callables in C
+ with signature :const:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS`.
+ The function signature is::
+
+ PyObject *PyCMethod(PyObject *self,
+ PyTypeObject *defining_class,
+ PyObject *const *args,
+ Py_ssize_t nargs,
+ PyObject *kwnames)
+
+ .. versionadded:: 3.9
.. c:type:: PyMethodDef
@@ -168,9 +234,7 @@ The :attr:`ml_flags` field is a bitfield which can include the following flags.
The individual flags indicate either a calling convention or a binding
convention.
-There are four basic calling conventions for positional arguments
-and two of them can be combined with :const:`METH_KEYWORDS` to support
-also keyword arguments. So there are a total of 6 calling conventions:
+There are these calling conventions:
.. data:: METH_VARARGS
@@ -221,6 +285,19 @@ also keyword arguments. So there are a total of 6 calling conventions:
.. versionadded:: 3.7
+.. data:: METH_METHOD | METH_FASTCALL | METH_KEYWORDS
+
+ Extension of :const:`METH_FASTCALL | METH_KEYWORDS` supporting the *defining
+ class*, that is, the class that contains the method in question.
+ The defining class might be a superclass of ``Py_TYPE(self)``.
+
+ The method needs to be of type :c:type:`PyCMethod`, the same as for
+ ``METH_FASTCALL | METH_KEYWORDS`` with ``defining_class`` argument added after
+ ``self``.
+
+ .. versionadded:: 3.9
+
+
.. data:: METH_NOARGS
Methods without parameters don't need to check whether arguments are given if
@@ -351,9 +428,11 @@ Accessing attributes of extension types
Heap allocated types (created using :c:func:`PyType_FromSpec` or similar),
``PyMemberDef`` may contain definitions for the special members
- ``__dictoffset__`` and ``__weaklistoffset__``, corresponding to
- :c:member:`~PyTypeObject.tp_dictoffset` and
- :c:member:`~PyTypeObject.tp_weaklistoffset` in type objects.
+ ``__dictoffset__``, ``__weaklistoffset__`` and ``__vectorcalloffset__``,
+ corresponding to
+ :c:member:`~PyTypeObject.tp_dictoffset`,
+ :c:member:`~PyTypeObject.tp_weaklistoffset` and
+ :c:member:`~PyTypeObject.tp_vectorcall_offset` in type objects.
These must be defined with ``T_PYSSIZET`` and ``READONLY``, for example::
static PyMemberDef spam_type_members[] = {
diff --git a/Doc/c-api/sys.rst b/Doc/c-api/sys.rst
index c851ff66487d5c..9ac91790978926 100644
--- a/Doc/c-api/sys.rst
+++ b/Doc/c-api/sys.rst
@@ -388,6 +388,13 @@ Process Control
function :c:func:`abort` is called which will attempt to produce a :file:`core`
file.
+ The ``Py_FatalError()`` function is replaced with a macro which logs
+ automatically the name of the current function, unless the
+ ``Py_LIMITED_API`` macro is defined.
+
+ .. versionchanged:: 3.9
+ Log the function name automatically.
+
.. c:function:: void Py_Exit(int status)
diff --git a/Doc/c-api/tuple.rst b/Doc/c-api/tuple.rst
index 62bc9a565071d3..c14cb2d38fd54a 100644
--- a/Doc/c-api/tuple.rst
+++ b/Doc/c-api/tuple.rst
@@ -111,11 +111,6 @@ Tuple Objects
raises :exc:`MemoryError` or :exc:`SystemError`.
-.. c:function:: int PyTuple_ClearFreeList()
-
- Clear the free list. Return the total number of freed items.
-
-
Struct Sequence Objects
-----------------------
diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst
index f774ca35edab92..f387279d143eec 100644
--- a/Doc/c-api/type.rst
+++ b/Doc/c-api/type.rst
@@ -109,6 +109,30 @@ Type Objects
.. versionadded:: 3.4
+.. c:function:: PyObject* PyType_GetModule(PyTypeObject *type)
+
+ Return the module object associated with the given type when the type was
+ created using :c:func:`PyType_FromModuleAndSpec`.
+
+ If no module is associated with the given type, sets :py:class:`TypeError`
+ and returns ``NULL``.
+
+ .. versionadded:: 3.9
+
+.. c:function:: void* PyType_GetModuleState(PyTypeObject *type)
+
+ Return the state of the module object associated with the given type.
+ This is a shortcut for calling :c:func:`PyModule_GetState()` on the result
+ of :c:func:`PyType_GetModule`.
+
+ If no module is associated with the given type, sets :py:class:`TypeError`
+ and returns ``NULL``.
+
+ If the *type* has an associated module but its state is ``NULL``,
+ returns ``NULL`` without setting an exception.
+
+ .. versionadded:: 3.9
+
Creating Heap-Allocated Types
.............................
@@ -116,7 +140,7 @@ Creating Heap-Allocated Types
The following functions and structs are used to create
:ref:`heap types `.
-.. c:function:: PyObject* PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases)
+.. c:function:: PyObject* PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases)
Creates and returns a heap type object from the *spec*
(:const:`Py_TPFLAGS_HEAPTYPE`).
@@ -127,8 +151,18 @@ The following functions and structs are used to create
If *bases* is ``NULL``, the *Py_tp_base* slot is used instead.
If that also is ``NULL``, the new type derives from :class:`object`.
+ The *module* must be a module object or ``NULL``.
+ If not ``NULL``, the module is associated with the new type and can later be
+ retreived with :c:func:`PyType_GetModule`.
+
This function calls :c:func:`PyType_Ready` on the new type.
+ .. versionadded:: 3.9
+
+.. c:function:: PyObject* PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases)
+
+ Equivalent to ``PyType_FromModuleAndSpec(NULL, spec, bases)``.
+
.. versionadded:: 3.3
.. c:function:: PyObject* PyType_FromSpec(PyType_Spec *spec)
@@ -194,6 +228,7 @@ The following functions and structs are used to create
* :c:member:`~PyTypeObject.tp_dictoffset`
(see :ref:`PyMemberDef `)
* :c:member:`~PyTypeObject.tp_vectorcall_offset`
+ (see :ref:`PyMemberDef `)
* :c:member:`~PyBufferProcs.bf_getbuffer`
* :c:member:`~PyBufferProcs.bf_releasebuffer`
diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst
index a8a779ef6165a8..385c7f94c672f2 100644
--- a/Doc/c-api/typeobj.rst
+++ b/Doc/c-api/typeobj.rst
@@ -684,15 +684,15 @@ and :c:type:`PyType_Type` effectively act as defaults.)
a more efficient alternative
of the simpler :c:member:`~PyTypeObject.tp_call`.
- This field is only used if the flag :const:`_Py_TPFLAGS_HAVE_VECTORCALL`
+ This field is only used if the flag :const:`Py_TPFLAGS_HAVE_VECTORCALL`
is set. If so, this must be a positive integer containing the offset in the
instance of a :c:type:`vectorcallfunc` pointer.
The *vectorcallfunc* pointer may be ``NULL``, in which case the instance behaves
- as if :const:`_Py_TPFLAGS_HAVE_VECTORCALL` was not set: calling the instance
+ as if :const:`Py_TPFLAGS_HAVE_VECTORCALL` was not set: calling the instance
falls back to :c:member:`~PyTypeObject.tp_call`.
- Any class that sets ``_Py_TPFLAGS_HAVE_VECTORCALL`` must also set
+ Any class that sets ``Py_TPFLAGS_HAVE_VECTORCALL`` must also set
:c:member:`~PyTypeObject.tp_call` and make sure its behaviour is consistent
with the *vectorcallfunc* function.
This can be done by setting *tp_call* to :c:func:`PyVectorcall_Call`.
@@ -719,7 +719,7 @@ and :c:type:`PyType_Type` effectively act as defaults.)
**Inheritance:**
This field is always inherited.
- However, the :const:`_Py_TPFLAGS_HAVE_VECTORCALL` flag is not
+ However, the :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag is not
always inherited. If it's not, then the subclass won't use
:ref:`vectorcall `, except when
:c:func:`PyVectorcall_Call` is explicitly called.
@@ -1153,7 +1153,7 @@ and :c:type:`PyType_Type` effectively act as defaults.)
type structure.
- .. data:: _Py_TPFLAGS_HAVE_VECTORCALL
+ .. data:: Py_TPFLAGS_HAVE_VECTORCALL
This bit is set when the class implements
the :ref:`vectorcall protocol `.
@@ -1163,15 +1163,9 @@ and :c:type:`PyType_Type` effectively act as defaults.)
This bit is inherited for *static* subtypes if
:c:member:`~PyTypeObject.tp_call` is also inherited.
- `Heap types`_ do not inherit ``_Py_TPFLAGS_HAVE_VECTORCALL``.
+ `Heap types`_ do not inherit ``Py_TPFLAGS_HAVE_VECTORCALL``.
- .. note::
-
- This flag is provisional and expected to become public in Python 3.9,
- with a different name and, possibly, changed semantics.
- If you use vectorcall, plan for updating your code for Python 3.9.
-
- .. versionadded:: 3.8
+ .. versionadded:: 3.9
.. c:member:: const char* PyTypeObject.tp_doc
@@ -1198,7 +1192,7 @@ and :c:type:`PyType_Type` effectively act as defaults.)
The :c:member:`~PyTypeObject.tp_traverse` pointer is used by the garbage collector to detect
reference cycles. A typical implementation of a :c:member:`~PyTypeObject.tp_traverse` function
simply calls :c:func:`Py_VISIT` on each of the instance's members that are Python
- objects. For example, this is function :c:func:`local_traverse` from the
+ objects that the instance owns. For example, this is function :c:func:`local_traverse` from the
:mod:`_thread` extension module::
static int
@@ -1218,10 +1212,36 @@ and :c:type:`PyType_Type` effectively act as defaults.)
debugging aid you may want to visit it anyway just so the :mod:`gc` module's
:func:`~gc.get_referents` function will include it.
+ .. warning::
+ When implementing :c:member:`~PyTypeObject.tp_traverse`, only the members
+ that the instance *owns* (by having strong references to them) must be
+ visited. For instance, if an object supports weak references via the
+ :c:member:`~PyTypeObject.tp_weaklist` slot, the pointer supporting
+ the linked list (what *tp_weaklist* points to) must **not** be
+ visited as the instance does not directly own the weak references to itself
+ (the weakreference list is there to support the weak reference machinery,
+ but the instance has no strong reference to the elements inside it, as they
+ are allowed to be removed even if the instance is still alive).
+
Note that :c:func:`Py_VISIT` requires the *visit* and *arg* parameters to
:c:func:`local_traverse` to have these specific names; don't name them just
anything.
+ Heap-allocated types (:const:`Py_TPFLAGS_HEAPTYPE`, such as those created
+ with :c:func:`PyType_FromSpec` and similar APIs) hold a reference to their
+ type. Their traversal function must therefore either visit
+ :c:func:`Py_TYPE(self) `, or delegate this responsibility by
+ calling ``tp_traverse`` of another heap-allocated type (such as a
+ heap-allocated superclass).
+ If they do not, the type object may not be garbage-collected.
+
+ .. versionchanged:: 3.9
+
+ Heap-allocated types are expected to visit ``Py_TYPE(self)`` in
+ ``tp_traverse``. In earlier versions of Python, due to
+ `bug 40217 `_, doing this
+ may lead to crashes in subclasses.
+
**Inheritance:**
Group: :const:`Py_TPFLAGS_HAVE_GC`, :attr:`tp_traverse`, :attr:`tp_clear`
diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst
index 77f123cf1f2c08..b1787ed1ce89cf 100644
--- a/Doc/c-api/unicode.rst
+++ b/Doc/c-api/unicode.rst
@@ -240,6 +240,16 @@ access internal read-only data of Unicode objects:
:c:func:`PyUnicode_nBYTE_DATA` family of macros.
+.. c:function:: int PyUnicode_IsIdentifier(PyObject *o)
+
+ Return ``1`` if the string is a valid identifier according to the language
+ definition, section :ref:`identifiers`. Return ``0`` otherwise.
+
+ .. versionchanged:: 3.9
+ The function does not call :c:func:`Py_FatalError` anymore if the string
+ is not ready.
+
+
Unicode Character Properties
""""""""""""""""""""""""""""
@@ -978,7 +988,7 @@ have the same semantics as the ones of the built-in :func:`str` string object
constructor.
Setting encoding to ``NULL`` causes the default encoding to be used
-which is ASCII. The file system calls should use
+which is UTF-8. The file system calls should use
:c:func:`PyUnicode_FSConverter` for encoding file names. This uses the
variable :c:data:`Py_FileSystemDefaultEncoding` internally. This
variable should be treated as read-only: on some systems, it will be a
diff --git a/Doc/conf.py b/Doc/conf.py
index abaa760c98c1aa..12d74ea24ce4ac 100644
--- a/Doc/conf.py
+++ b/Doc/conf.py
@@ -14,7 +14,8 @@
# ---------------------
extensions = ['sphinx.ext.coverage', 'sphinx.ext.doctest',
- 'pyspecific', 'c_annotations', 'escape4chm']
+ 'pyspecific', 'c_annotations', 'escape4chm',
+ 'asdl_highlight']
doctest_global_setup = '''
@@ -127,6 +128,7 @@
}
\let\Verbatim=\OriginalVerbatim
\let\endVerbatim=\endOriginalVerbatim
+\setcounter{tocdepth}{2}
'''
# The paper size ('letter' or 'a4').
diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat
index b55e972d536fe2..4dacbe201d22a5 100644
--- a/Doc/data/refcounts.dat
+++ b/Doc/data/refcounts.dat
@@ -1673,7 +1673,8 @@ PyObject_GenericSetAttr:PyObject*:name:0:
PyObject_GenericSetAttr:PyObject*:value:+1:
PyObject_GenericSetDict:int:::
-PyObject_GenericSetDict:PyObject*:o:+1:
+PyObject_GenericSetDict:PyObject*:o:0:
+PyObject_GenericSetDict:PyObject*:value:+1:
PyObject_GenericSetDict:void*:context::
PyObject_GetAttr:PyObject*::+1:
diff --git a/Doc/distutils/apiref.rst b/Doc/distutils/apiref.rst
index 12e0c0b2c9757f..b14197c2f94dba 100644
--- a/Doc/distutils/apiref.rst
+++ b/Doc/distutils/apiref.rst
@@ -1855,6 +1855,9 @@ Subclasses of :class:`Command` must define the following methods.
.. class:: bdist_msi
+.. deprecated:: 3.9
+ Use bdist_wheel (wheel packages) instead.
+
Builds a `Windows Installer`_ (.msi) binary package.
.. _Windows Installer: https://msdn.microsoft.com/en-us/library/cc185688(VS.85).aspx
diff --git a/Doc/distutils/builtdist.rst b/Doc/distutils/builtdist.rst
index b814f2e9508c9c..e032c03e229a5c 100644
--- a/Doc/distutils/builtdist.rst
+++ b/Doc/distutils/builtdist.rst
@@ -149,6 +149,9 @@ generated by each, are:
.. note::
bdist_wininst is deprecated since Python 3.8.
+.. note::
+ bdist_msi is deprecated since Python 3.9.
+
The following sections give details on the individual :command:`bdist_\*`
commands.
@@ -304,6 +307,9 @@ Creating Windows Installers
.. warning::
bdist_wininst is deprecated since Python 3.8.
+.. warning::
+ bdist_msi is deprecated since Python 3.9.
+
Executable installers are the natural format for binary distributions on
Windows. They display a nice graphical user interface, display some information
about the module distribution to be installed taken from the metadata in the
@@ -468,3 +474,6 @@ installed for all users) and 'force' (meaning always prompt for elevation).
.. note::
bdist_wininst is deprecated since Python 3.8.
+
+.. note::
+ bdist_msi is deprecated since Python 3.9.
diff --git a/Doc/extending/extending.rst b/Doc/extending/extending.rst
index 5b32a2cdc5506e..25dc2934d29ef6 100644
--- a/Doc/extending/extending.rst
+++ b/Doc/extending/extending.rst
@@ -395,18 +395,26 @@ optionally followed by an import of the module::
}
/* Add a built-in module, before Py_Initialize */
- PyImport_AppendInittab("spam", PyInit_spam);
+ if (PyImport_AppendInittab("spam", PyInit_spam) == -1) {
+ fprintf(stderr, "Error: could not extend in-built modules table\n");
+ exit(1);
+ }
/* Pass argv[0] to the Python interpreter */
Py_SetProgramName(program);
- /* Initialize the Python interpreter. Required. */
+ /* Initialize the Python interpreter. Required.
+ If this step fails, it will be a fatal error. */
Py_Initialize();
/* Optionally import the module; alternatively,
import can be deferred until the embedded script
imports it. */
- PyImport_ImportModule("spam");
+ pmodule = PyImport_ImportModule("spam");
+ if (!pmodule) {
+ PyErr_Print();
+ fprintf(stderr, "Error: could not import module 'spam'\n");
+ }
...
diff --git a/Doc/faq/design.rst b/Doc/faq/design.rst
index df3dbf4977e01e..4e3cc575ee1964 100644
--- a/Doc/faq/design.rst
+++ b/Doc/faq/design.rst
@@ -651,7 +651,7 @@ Why doesn't Python have a "with" statement for attribute assignments?
---------------------------------------------------------------------
Python has a 'with' statement that wraps the execution of a block, calling code
-on the entrance and exit from the block. Some language have a construct that
+on the entrance and exit from the block. Some languages have a construct that
looks like this::
with obj:
diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst
index 70b11d6e93056f..61ffc5dbdaa773 100644
--- a/Doc/faq/programming.rst
+++ b/Doc/faq/programming.rst
@@ -851,10 +851,11 @@ For integers, use the built-in :func:`int` type constructor, e.g. ``int('144')
e.g. ``float('144') == 144.0``.
By default, these interpret the number as decimal, so that ``int('0144') ==
-144`` and ``int('0x144')`` raises :exc:`ValueError`. ``int(string, base)`` takes
-the base to convert from as a second optional argument, so ``int('0x144', 16) ==
-324``. If the base is specified as 0, the number is interpreted using Python's
-rules: a leading '0o' indicates octal, and '0x' indicates a hex number.
+144`` holds true, and ``int('0x144')`` raises :exc:`ValueError`. ``int(string,
+base)`` takes the base to convert from as a second optional argument, so ``int(
+'0x144', 16) == 324``. If the base is specified as 0, the number is interpreted
+using Python's rules: a leading '0o' indicates octal, and '0x' indicates a hex
+number.
Do not use the built-in function :func:`eval` if all you need is to convert
strings to numbers. :func:`eval` will be significantly slower and it presents a
@@ -1494,8 +1495,8 @@ to uppercase::
Here the ``UpperOut`` class redefines the ``write()`` method to convert the
argument string to uppercase before calling the underlying
-``self.__outfile.write()`` method. All other methods are delegated to the
-underlying ``self.__outfile`` object. The delegation is accomplished via the
+``self._outfile.write()`` method. All other methods are delegated to the
+underlying ``self._outfile`` object. The delegation is accomplished via the
``__getattr__`` method; consult :ref:`the language reference `
for more information about controlling attribute access.
diff --git a/Doc/howto/argparse.rst b/Doc/howto/argparse.rst
index e78a022b372faa..76d8e6be42935f 100644
--- a/Doc/howto/argparse.rst
+++ b/Doc/howto/argparse.rst
@@ -353,7 +353,7 @@ Our program keeps growing in complexity::
args = parser.parse_args()
answer = args.square**2
if args.verbose:
- print("the square of {} equals {}".format(args.square, answer))
+ print(f"the square of {args.square} equals {answer}")
else:
print(answer)
@@ -387,9 +387,9 @@ multiple verbosity values, and actually get to use them::
args = parser.parse_args()
answer = args.square**2
if args.verbosity == 2:
- print("the square of {} equals {}".format(args.square, answer))
+ print(f"the square of {args.square} equals {answer}")
elif args.verbosity == 1:
- print("{}^2 == {}".format(args.square, answer))
+ print(f"{args.square}^2 == {answer}")
else:
print(answer)
@@ -421,9 +421,9 @@ Let's fix it by restricting the values the ``--verbosity`` option can accept::
args = parser.parse_args()
answer = args.square**2
if args.verbosity == 2:
- print("the square of {} equals {}".format(args.square, answer))
+ print(f"the square of {args.square} equals {answer}")
elif args.verbosity == 1:
- print("{}^2 == {}".format(args.square, answer))
+ print(f"{args.square}^2 == {answer}")
else:
print(answer)
@@ -461,9 +461,9 @@ verbosity argument (check the output of ``python --help``)::
args = parser.parse_args()
answer = args.square**2
if args.verbosity == 2:
- print("the square of {} equals {}".format(args.square, answer))
+ print(f"the square of {args.square} equals {answer}")
elif args.verbosity == 1:
- print("{}^2 == {}".format(args.square, answer))
+ print(f"{args.square}^2 == {answer}")
else:
print(answer)
@@ -529,9 +529,9 @@ Let's fix::
# bugfix: replace == with >=
if args.verbosity >= 2:
- print("the square of {} equals {}".format(args.square, answer))
+ print(f"the square of {args.square} equals {answer}")
elif args.verbosity >= 1:
- print("{}^2 == {}".format(args.square, answer))
+ print(f"{args.square}^2 == {answer}")
else:
print(answer)
@@ -566,9 +566,9 @@ Let's fix that bug::
args = parser.parse_args()
answer = args.square**2
if args.verbosity >= 2:
- print("the square of {} equals {}".format(args.square, answer))
+ print(f"the square of {args.square} equals {answer}")
elif args.verbosity >= 1:
- print("{}^2 == {}".format(args.square, answer))
+ print(f"{args.square}^2 == {answer}")
else:
print(answer)
@@ -606,9 +606,9 @@ not just squares::
args = parser.parse_args()
answer = args.x**args.y
if args.verbosity >= 2:
- print("{} to the power {} equals {}".format(args.x, args.y, answer))
+ print(f"{args.x} to the power {args.y} equals {answer}")
elif args.verbosity >= 1:
- print("{}^{} == {}".format(args.x, args.y, answer))
+ print(f"{args.x}^{args.y} == {answer}")
else:
print(answer)
@@ -645,9 +645,9 @@ to display *more* text instead::
args = parser.parse_args()
answer = args.x**args.y
if args.verbosity >= 2:
- print("Running '{}'".format(__file__))
+ print(f"Running '{__file__}'")
if args.verbosity >= 1:
- print("{}^{} == ".format(args.x, args.y), end="")
+ print(f"{args.x}^{args.y} == ", end="")
print(answer)
Output:
@@ -688,9 +688,9 @@ which will be the opposite of the ``--verbose`` one::
if args.quiet:
print(answer)
elif args.verbose:
- print("{} to the power {} equals {}".format(args.x, args.y, answer))
+ print(f"{args.x} to the power {args.y} equals {answer}")
else:
- print("{}^{} == {}".format(args.x, args.y, answer))
+ print(f"{args.x}^{args.y} == {answer}")
Our program is now simpler, and we've lost some functionality for the sake of
demonstration. Anyways, here's the output:
diff --git a/Doc/howto/logging.rst b/Doc/howto/logging.rst
index fbe5a118d18684..6316e086ef43ba 100644
--- a/Doc/howto/logging.rst
+++ b/Doc/howto/logging.rst
@@ -1086,8 +1086,7 @@ need:
| | :func:`sys._getframe`, which may help |
| | to speed up your code in environments |
| | like PyPy (which can't speed up code |
-| | that uses :func:`sys._getframe`), if |
-| | and when PyPy supports Python 3.x. |
+| | that uses :func:`sys._getframe`). |
+-----------------------------------------------+----------------------------------------+
| Threading information. | Set ``logging.logThreads`` to ``0``. |
+-----------------------------------------------+----------------------------------------+
diff --git a/Doc/howto/sockets.rst b/Doc/howto/sockets.rst
index 4655f280608960..d6ed128e073fd6 100644
--- a/Doc/howto/sockets.rst
+++ b/Doc/howto/sockets.rst
@@ -319,7 +319,7 @@ inside-out.
In Python, you use ``socket.setblocking(False)`` to make it non-blocking. In C, it's
more complex, (for one thing, you'll need to choose between the BSD flavor
-``O_NONBLOCK`` and the almost indistinguishable Posix flavor ``O_NDELAY``, which
+``O_NONBLOCK`` and the almost indistinguishable POSIX flavor ``O_NDELAY``, which
is completely different from ``TCP_NODELAY``), but it's the exact same idea. You
do this after creating the socket, but before using it. (Actually, if you're
nuts, you can switch back and forth.)
diff --git a/Doc/howto/unicode.rst b/Doc/howto/unicode.rst
index 51bd64bfc232ca..e948c1e3c662d6 100644
--- a/Doc/howto/unicode.rst
+++ b/Doc/howto/unicode.rst
@@ -41,8 +41,9 @@ but these are two different characters that have different meanings.
The Unicode standard describes how characters are represented by
**code points**. A code point value is an integer in the range 0 to
-0x10FFFF (about 1.1 million values, with some 110 thousand assigned so
-far). In the standard and in this document, a code point is written
+0x10FFFF (about 1.1 million values, the
+`actual number assigned `_
+is less than that). In the standard and in this document, a code point is written
using the notation ``U+265E`` to mean the character with value
``0x265e`` (9,822 in decimal).
@@ -156,9 +157,9 @@ UTF-8 has several convenient properties:
References
----------
-The `Unicode Consortium site `_ has character charts, a
+The `Unicode Consortium site `_ has character charts, a
glossary, and PDF versions of the Unicode specification. Be prepared for some
-difficult reading. `A chronology `_ of the
+difficult reading. `A chronology `_ of the
origin and development of Unicode is also available on the site.
On the Computerphile Youtube channel, Tom Scott briefly
@@ -393,7 +394,7 @@ These are grouped into categories such as "Letter", "Number", "Punctuation", or
from the above output, ``'Ll'`` means 'Letter, lowercase', ``'No'`` means
"Number, other", ``'Mn'`` is "Mark, nonspacing", and ``'So'`` is "Symbol,
other". See
-`the General Category Values section of the Unicode Character Database documentation `_ for a
+`the General Category Values section of the Unicode Character Database documentation `_ for a
list of category codes.
diff --git a/Doc/install/index.rst b/Doc/install/index.rst
index a91606c0f38e61..e6d5a3e6ebde60 100644
--- a/Doc/install/index.rst
+++ b/Doc/install/index.rst
@@ -532,7 +532,7 @@ scripts will wind up in :file:`/usr/local/python/bin`. If you want them in
python setup.py install --install-scripts=/usr/local/bin
-(This performs an installation using the "prefix scheme," where the prefix is
+(This performs an installation using the "prefix scheme", where the prefix is
whatever your Python interpreter was installed with--- :file:`/usr/local/python`
in this case.)
diff --git a/Doc/library/2to3.rst b/Doc/library/2to3.rst
index eb4c9185f48bf2..1d7bd262872905 100644
--- a/Doc/library/2to3.rst
+++ b/Doc/library/2to3.rst
@@ -9,9 +9,7 @@
of *fixers* to transform it into valid Python 3.x code. The standard library
contains a rich set of fixers that will handle almost all code. 2to3 supporting
library :mod:`lib2to3` is, however, a flexible and generic library, so it is
-possible to write your own fixers for 2to3. :mod:`lib2to3` could also be
-adapted to custom applications in which Python code needs to be edited
-automatically.
+possible to write your own fixers for 2to3.
.. _2to3-using:
@@ -466,9 +464,17 @@ and off individually. They are described here in more detail.
--------------
+.. deprecated:: 3.10
+ Python 3.9 will switch to a PEG parser (see :pep:`617`), and Python 3.10 may
+ include new language syntax that is not parsable by lib2to3's LL(1) parser.
+ The ``lib2to3`` module may be removed from the standard library in a future
+ Python version. Consider third-party alternatives such as `LibCST`_ or
+ `parso`_.
+
.. note::
The :mod:`lib2to3` API should be considered unstable and may change
drastically in the future.
-.. XXX What is the public interface anyway?
+.. _LibCST: https://libcst.readthedocs.io/
+.. _parso: https://parso.readthedocs.io/
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index f8e39189686208..5e0096cae73a79 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -961,19 +961,6 @@ values are:
usage: PROG [-h] foo [foo ...]
PROG: error: the following arguments are required: foo
-.. _`argparse.REMAINDER`:
-
-* ``argparse.REMAINDER``. All the remaining command-line arguments are gathered
- into a list. This is commonly useful for command line utilities that dispatch
- to other command line utilities::
-
- >>> parser = argparse.ArgumentParser(prog='PROG')
- >>> parser.add_argument('--foo')
- >>> parser.add_argument('command')
- >>> parser.add_argument('args', nargs=argparse.REMAINDER)
- >>> print(parser.parse_args('--foo B cmd --arg1 XX ZZ'.split()))
- Namespace(args=['--arg1', 'XX', 'ZZ'], command='cmd', foo='B')
-
If the ``nargs`` keyword argument is not provided, the number of arguments consumed
is determined by the action_. Generally this means a single command-line argument
will be consumed and a single item (not a list) will be produced.
diff --git a/Doc/library/array.rst b/Doc/library/array.rst
index c9a9b1dabb2a79..78020738bf4f75 100644
--- a/Doc/library/array.rst
+++ b/Doc/library/array.rst
@@ -22,7 +22,7 @@ defined:
+-----------+--------------------+-------------------+-----------------------+-------+
| ``'B'`` | unsigned char | int | 1 | |
+-----------+--------------------+-------------------+-----------------------+-------+
-| ``'u'`` | Py_UNICODE | Unicode character | 2 | \(1) |
+| ``'u'`` | wchar_t | Unicode character | 2 | \(1) |
+-----------+--------------------+-------------------+-----------------------+-------+
| ``'h'`` | signed short | int | 2 | |
+-----------+--------------------+-------------------+-----------------------+-------+
@@ -48,15 +48,16 @@ defined:
Notes:
(1)
- The ``'u'`` type code corresponds to Python's obsolete unicode character
- (:c:type:`Py_UNICODE` which is :c:type:`wchar_t`). Depending on the
- platform, it can be 16 bits or 32 bits.
+ It can be 16 bits or 32 bits depending on the platform.
- ``'u'`` will be removed together with the rest of the :c:type:`Py_UNICODE`
- API.
+ .. versionchanged:: 3.9
+ ``array('u')`` now uses ``wchar_t`` as C type instead of deprecated
+ ``Py_UNICODE``. This change doesn't affect to its behavior because
+ ``Py_UNICODE`` is alias of ``wchar_t`` since Python 3.3.
.. deprecated-removed:: 3.3 4.0
+
The actual representation of values is determined by the machine architecture
(strictly speaking, by the C implementation). The actual size can be accessed
through the :attr:`itemsize` attribute.
diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst
index 2cee8738e5834c..6c6ad01b842c8e 100644
--- a/Doc/library/ast.rst
+++ b/Doc/library/ast.rst
@@ -7,6 +7,10 @@
.. sectionauthor:: Martin v. Löwis
.. sectionauthor:: Georg Brandl
+.. testsetup::
+
+ import ast
+
**Source code:** :source:`Lib/ast.py`
--------------
@@ -23,6 +27,17 @@ classes all inherit from :class:`ast.AST`. An abstract syntax tree can be
compiled into a Python code object using the built-in :func:`compile` function.
+.. _abstract-grammar:
+
+Abstract Grammar
+----------------
+
+The abstract grammar is currently defined as follows:
+
+.. literalinclude:: ../../Parser/Python.asdl
+ :language: asdl
+
+
Node classes
------------
@@ -101,23 +116,1392 @@ Node classes
node = ast.UnaryOp(ast.USub(), ast.Constant(5, lineno=0, col_offset=0),
lineno=0, col_offset=0)
+.. versionchanged:: 3.8
+
+ Class :class:`ast.Constant` is now used for all constants.
+
+.. versionchanged:: 3.9
+
+ Simple indices are represented by their value, extended slices are
+ represented as tuples.
+
.. deprecated:: 3.8
- Class :class:`ast.Constant` is now used for all constants. Old classes
- :class:`ast.Num`, :class:`ast.Str`, :class:`ast.Bytes`,
+ Old classes :class:`ast.Num`, :class:`ast.Str`, :class:`ast.Bytes`,
:class:`ast.NameConstant` and :class:`ast.Ellipsis` are still available,
- but they will be removed in future Python releases.
+ but they will be removed in future Python releases. In the meantime,
+ instantiating them will return an instance of a different class.
+.. deprecated:: 3.9
-.. _abstract-grammar:
+ Old classes :class:`ast.Index` and :class:`ast.ExtSlice` are still
+ available, but they will be removed in future Python releases.
+ In the meantime, instantiating them will return an instance of
+ a different class.
-Abstract Grammar
-----------------
-The abstract grammar is currently defined as follows:
+Literals
+^^^^^^^^
-.. literalinclude:: ../../Parser/Python.asdl
- :language: none
+.. class:: Constant(value)
+
+ A constant value. The ``value`` attribute of the ``Constant`` literal contains the
+ Python object it represents. The values represented can be simple types
+ such as a number, string or ``None``, but also immutable container types
+ (tuples and frozensets) if all of their elements are constant.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('123', mode='eval'), indent=4))
+ Expression(
+ body=Constant(value=123))
+
+
+.. class:: FormattedValue(value, conversion, format_spec)
+
+ Node representing a single formatting field in an f-string. If the string
+ contains a single formatting field and nothing else the node can be
+ isolated otherwise it appears in :class:`JoinedStr`.
+
+ * ``value`` is any expression node (such as a literal, a variable, or a
+ function call).
+ * ``conversion`` is an integer:
+
+ * -1: no formatting
+ * 115: ``!s`` string formatting
+ * 114: ``!r`` repr formatting
+ * 97: ``!a`` ascii formatting
+
+ * ``format_spec`` is a :class:`JoinedStr` node representing the formatting
+ of the value, or ``None`` if no format was specified. Both
+ ``conversion`` and ``format_spec`` can be set at the same time.
+
+
+.. class:: JoinedStr(values)
+
+ An f-string, comprising a series of :class:`FormattedValue` and :class:`Constant`
+ nodes.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('f"sin({a}) is {sin(a):.3}"', mode='eval'), indent=4))
+ Expression(
+ body=JoinedStr(
+ values=[
+ Constant(value='sin('),
+ FormattedValue(
+ value=Name(id='a', ctx=Load()),
+ conversion=-1),
+ Constant(value=') is '),
+ FormattedValue(
+ value=Call(
+ func=Name(id='sin', ctx=Load()),
+ args=[
+ Name(id='a', ctx=Load())],
+ keywords=[]),
+ conversion=-1,
+ format_spec=JoinedStr(
+ values=[
+ Constant(value='.3')]))]))
+
+
+.. class:: List(elts, ctx)
+ Tuple(elts, ctx)
+
+ A list or tuple. ``elts`` holds a list of nodes representing the elements.
+ ``ctx`` is :class:`Store` if the container is an assignment target (i.e.
+ ``(x,y)=something``), and :class:`Load` otherwise.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('[1, 2, 3]', mode='eval'), indent=4))
+ Expression(
+ body=List(
+ elts=[
+ Constant(value=1),
+ Constant(value=2),
+ Constant(value=3)],
+ ctx=Load()))
+ >>> print(ast.dump(ast.parse('(1, 2, 3)', mode='eval'), indent=4))
+ Expression(
+ body=Tuple(
+ elts=[
+ Constant(value=1),
+ Constant(value=2),
+ Constant(value=3)],
+ ctx=Load()))
+
+
+.. class:: Set(elts)
+
+ A set. ``elts`` holds a list of nodes representing the set's elements.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('{1, 2, 3}', mode='eval'), indent=4))
+ Expression(
+ body=Set(
+ elts=[
+ Constant(value=1),
+ Constant(value=2),
+ Constant(value=3)]))
+
+
+.. class:: Dict(keys, values)
+
+ A dictionary. ``keys`` and ``values`` hold lists of nodes representing the
+ keys and the values respectively, in matching order (what would be returned
+ when calling :code:`dictionary.keys()` and :code:`dictionary.values()`).
+
+ When doing dictionary unpacking using dictionary literals the expression to be
+ expanded goes in the ``values`` list, with a ``None`` at the corresponding
+ position in ``keys``.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('{"a":1, **d}', mode='eval'), indent=4))
+ Expression(
+ body=Dict(
+ keys=[
+ Constant(value='a'),
+ None],
+ values=[
+ Constant(value=1),
+ Name(id='d', ctx=Load())]))
+
+
+Variables
+^^^^^^^^^
+
+.. class:: Name(id, ctx)
+
+ A variable name. ``id`` holds the name as a string, and ``ctx`` is one of
+ the following types.
+
+
+.. class:: Load()
+ Store()
+ Del()
+
+ Variable references can be used to load the value of a variable, to assign
+ a new value to it, or to delete it. Variable references are given a context
+ to distinguish these cases.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('a'), indent=4))
+ Module(
+ body=[
+ Expr(
+ value=Name(id='a', ctx=Load()))],
+ type_ignores=[])
+
+ >>> print(ast.dump(ast.parse('a = 1'), indent=4))
+ Module(
+ body=[
+ Assign(
+ targets=[
+ Name(id='a', ctx=Store())],
+ value=Constant(value=1))],
+ type_ignores=[])
+
+ >>> print(ast.dump(ast.parse('del a'), indent=4))
+ Module(
+ body=[
+ Delete(
+ targets=[
+ Name(id='a', ctx=Del())])],
+ type_ignores=[])
+
+
+.. class:: Starred(value, ctx)
+
+ A ``*var`` variable reference. ``value`` holds the variable, typically a
+ :class:`Name` node. This type must be used when building a :class:`Call`
+ node with ``*args``.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('a, *b = it'), indent=4))
+ Module(
+ body=[
+ Assign(
+ targets=[
+ Tuple(
+ elts=[
+ Name(id='a', ctx=Store()),
+ Starred(
+ value=Name(id='b', ctx=Store()),
+ ctx=Store())],
+ ctx=Store())],
+ value=Name(id='it', ctx=Load()))],
+ type_ignores=[])
+
+
+Expressions
+^^^^^^^^^^^
+
+.. class:: Expr(value)
+
+ When an expression, such as a function call, appears as a statement by itself
+ with its return value not used or stored, it is wrapped in this container.
+ ``value`` holds one of the other nodes in this section, a :class:`Constant`, a
+ :class:`Name`, a :class:`Lambda`, a :class:`Yield` or :class:`YieldFrom` node.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('-a'), indent=4))
+ Module(
+ body=[
+ Expr(
+ value=UnaryOp(
+ op=USub(),
+ operand=Name(id='a', ctx=Load())))],
+ type_ignores=[])
+
+
+.. class:: UnaryOp(op, operand)
+
+ A unary operation. ``op`` is the operator, and ``operand`` any expression
+ node.
+
+
+.. class:: UAdd
+ USub
+ Not
+ Invert
+
+ Unary operator tokens. :class:`Not` is the ``not`` keyword, :class:`Invert`
+ is the ``~`` operator.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('not x', mode='eval'), indent=4))
+ Expression(
+ body=UnaryOp(
+ op=Not(),
+ operand=Name(id='x', ctx=Load())))
+
+
+.. class:: BinOp(left, op, right)
+
+ A binary operation (like addition or division). ``op`` is the operator, and
+ ``left`` and ``right`` are any expression nodes.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('x + y', mode='eval'), indent=4))
+ Expression(
+ body=BinOp(
+ left=Name(id='x', ctx=Load()),
+ op=Add(),
+ right=Name(id='y', ctx=Load())))
+
+
+.. class:: Add
+ Sub
+ Mult
+ Div
+ FloorDiv
+ Mod
+ Pow
+ LShift
+ RShift
+ BitOr
+ BitXor
+ BitAnd
+ MatMult
+
+ Binary operator tokens.
+
+
+.. class:: BoolOp(op, values)
+
+ A boolean operation, 'or' or 'and'. ``op`` is :class:`Or` or :class:`And`.
+ ``values`` are the values involved. Consecutive operations with the same
+ operator, such as ``a or b or c``, are collapsed into one node with several
+ values.
+
+ This doesn't include ``not``, which is a :class:`UnaryOp`.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('x or y', mode='eval'), indent=4))
+ Expression(
+ body=BoolOp(
+ op=Or(),
+ values=[
+ Name(id='x', ctx=Load()),
+ Name(id='y', ctx=Load())]))
+
+
+.. class:: And
+ Or
+
+ Boolean operator tokens.
+
+
+.. class:: Compare(left, ops, comparators)
+
+ A comparison of two or more values. ``left`` is the first value in the
+ comparison, ``ops`` the list of operators, and ``comparators`` the list
+ of values after the first element in the comparison.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('1 <= a < 10', mode='eval'), indent=4))
+ Expression(
+ body=Compare(
+ left=Constant(value=1),
+ ops=[
+ LtE(),
+ Lt()],
+ comparators=[
+ Name(id='a', ctx=Load()),
+ Constant(value=10)]))
+
+
+.. class:: Eq
+ NotEq
+ Lt
+ LtE
+ Gt
+ GtE
+ Is
+ IsNot
+ In
+ NotIn
+
+ Comparison operator tokens.
+
+
+.. class:: Call(func, args, keywords, starargs, kwargs)
+
+ A function call. ``func`` is the function, which will often be a
+ :class:`Name` or :class:`Attribute` object. Of the arguments:
+
+ * ``args`` holds a list of the arguments passed by position.
+ * ``keywords`` holds a list of :class:`keyword` objects representing
+ arguments passed by keyword.
+
+ When creating a ``Call`` node, ``args`` and ``keywords`` are required, but
+ they can be empty lists. ``starargs`` and ``kwargs`` are optional.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('func(a, b=c, *d, **e)', mode='eval'), indent=4))
+ Expression(
+ body=Call(
+ func=Name(id='func', ctx=Load()),
+ args=[
+ Name(id='a', ctx=Load()),
+ Starred(
+ value=Name(id='d', ctx=Load()),
+ ctx=Load())],
+ keywords=[
+ keyword(
+ arg='b',
+ value=Name(id='c', ctx=Load())),
+ keyword(
+ value=Name(id='e', ctx=Load()))]))
+
+
+.. class:: keyword(arg, value)
+
+ A keyword argument to a function call or class definition. ``arg`` is a raw
+ string of the parameter name, ``value`` is a node to pass in.
+
+
+.. class:: IfExp(test, body, orelse)
+
+ An expression such as ``a if b else c``. Each field holds a single node, so
+ in the following example, all three are :class:`Name` nodes.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('a if b else c', mode='eval'), indent=4))
+ Expression(
+ body=IfExp(
+ test=Name(id='b', ctx=Load()),
+ body=Name(id='a', ctx=Load()),
+ orelse=Name(id='c', ctx=Load())))
+
+
+.. class:: Attribute(value, attr, ctx)
+
+ Attribute access, e.g. ``d.keys``. ``value`` is a node, typically a
+ :class:`Name`. ``attr`` is a bare string giving the name of the attribute,
+ and ``ctx`` is :class:`Load`, :class:`Store` or :class:`Del` according to how
+ the attribute is acted on.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('snake.colour', mode='eval'), indent=4))
+ Expression(
+ body=Attribute(
+ value=Name(id='snake', ctx=Load()),
+ attr='colour',
+ ctx=Load()))
+
+
+.. class:: NamedExpr(target, value)
+
+ A named expression. This AST node is produced by the assignment expressions
+ operator (also known as the walrus operator). As opposed to the :class:`Assign`
+ node in which the first argument can be multiple nodes, in this case both
+ ``target`` and ``value`` must be single nodes.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('(x := 4)', mode='eval'), indent=4))
+ Expression(
+ body=NamedExpr(
+ target=Name(id='x', ctx=Store()),
+ value=Constant(value=4)))
+
+
+Subscripting
+~~~~~~~~~~~~
+
+.. class:: Subscript(value, slice, ctx)
+
+ A subscript, such as ``l[1]``. ``value`` is the subscripted object
+ (usually sequence or mapping). ``slice`` is an index, slice or key.
+ It can be a :class:`Tuple` and contain a :class:`Slice`.
+ ``ctx`` is :class:`Load`, :class:`Store` or :class:`Del`
+ according to the action performed with the subscript.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('l[1:2, 3]', mode='eval'), indent=4))
+ Expression(
+ body=Subscript(
+ value=Name(id='l', ctx=Load()),
+ slice=Tuple(
+ elts=[
+ Slice(
+ lower=Constant(value=1),
+ upper=Constant(value=2)),
+ Constant(value=3)],
+ ctx=Load()),
+ ctx=Load()))
+
+
+.. class:: Slice(lower, upper, step)
+
+ Regular slicing (on the form ``lower:upper`` or ``lower:upper:step``).
+ Can occur only inside the *slice* field of :class:`Subscript`, either
+ directly or as an element of :class:`Tuple`.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('l[1:2]', mode='eval'), indent=4))
+ Expression(
+ body=Subscript(
+ value=Name(id='l', ctx=Load()),
+ slice=Slice(
+ lower=Constant(value=1),
+ upper=Constant(value=2)),
+ ctx=Load()))
+
+
+Comprehensions
+~~~~~~~~~~~~~~
+
+.. class:: ListComp(elt, generators)
+ SetComp(elt, generators)
+ GeneratorExp(elt, generators)
+ DictComp(key, value, generators)
+
+ List and set comprehensions, generator expressions, and dictionary
+ comprehensions. ``elt`` (or ``key`` and ``value``) is a single node
+ representing the part that will be evaluated for each item.
+
+ ``generators`` is a list of :class:`comprehension` nodes.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('[x for x in numbers]', mode='eval'), indent=4))
+ Expression(
+ body=ListComp(
+ elt=Name(id='x', ctx=Load()),
+ generators=[
+ comprehension(
+ target=Name(id='x', ctx=Store()),
+ iter=Name(id='numbers', ctx=Load()),
+ ifs=[],
+ is_async=0)]))
+ >>> print(ast.dump(ast.parse('{x: x**2 for x in numbers}', mode='eval'), indent=4))
+ Expression(
+ body=DictComp(
+ key=Name(id='x', ctx=Load()),
+ value=BinOp(
+ left=Name(id='x', ctx=Load()),
+ op=Pow(),
+ right=Constant(value=2)),
+ generators=[
+ comprehension(
+ target=Name(id='x', ctx=Store()),
+ iter=Name(id='numbers', ctx=Load()),
+ ifs=[],
+ is_async=0)]))
+ >>> print(ast.dump(ast.parse('{x for x in numbers}', mode='eval'), indent=4))
+ Expression(
+ body=SetComp(
+ elt=Name(id='x', ctx=Load()),
+ generators=[
+ comprehension(
+ target=Name(id='x', ctx=Store()),
+ iter=Name(id='numbers', ctx=Load()),
+ ifs=[],
+ is_async=0)]))
+
+
+.. class:: comprehension(target, iter, ifs, is_async)
+
+ One ``for`` clause in a comprehension. ``target`` is the reference to use for
+ each element - typically a :class:`Name` or :class:`Tuple` node. ``iter``
+ is the object to iterate over. ``ifs`` is a list of test expressions: each
+ ``for`` clause can have multiple ``ifs``.
+
+ ``is_async`` indicates a comprehension is asynchronous (using an
+ ``async for`` instead of ``for``). The value is an integer (0 or 1).
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('[ord(c) for line in file for c in line]', mode='eval'),
+ ... indent=4)) # Multiple comprehensions in one.
+ Expression(
+ body=ListComp(
+ elt=Call(
+ func=Name(id='ord', ctx=Load()),
+ args=[
+ Name(id='c', ctx=Load())],
+ keywords=[]),
+ generators=[
+ comprehension(
+ target=Name(id='line', ctx=Store()),
+ iter=Name(id='file', ctx=Load()),
+ ifs=[],
+ is_async=0),
+ comprehension(
+ target=Name(id='c', ctx=Store()),
+ iter=Name(id='line', ctx=Load()),
+ ifs=[],
+ is_async=0)]))
+
+ >>> print(ast.dump(ast.parse('(n**2 for n in it if n>5 if n<10)', mode='eval'),
+ ... indent=4)) # generator comprehension
+ Expression(
+ body=GeneratorExp(
+ elt=BinOp(
+ left=Name(id='n', ctx=Load()),
+ op=Pow(),
+ right=Constant(value=2)),
+ generators=[
+ comprehension(
+ target=Name(id='n', ctx=Store()),
+ iter=Name(id='it', ctx=Load()),
+ ifs=[
+ Compare(
+ left=Name(id='n', ctx=Load()),
+ ops=[
+ Gt()],
+ comparators=[
+ Constant(value=5)]),
+ Compare(
+ left=Name(id='n', ctx=Load()),
+ ops=[
+ Lt()],
+ comparators=[
+ Constant(value=10)])],
+ is_async=0)]))
+
+ >>> print(ast.dump(ast.parse('[i async for i in soc]', mode='eval'),
+ ... indent=4)) # Async comprehension
+ Expression(
+ body=ListComp(
+ elt=Name(id='i', ctx=Load()),
+ generators=[
+ comprehension(
+ target=Name(id='i', ctx=Store()),
+ iter=Name(id='soc', ctx=Load()),
+ ifs=[],
+ is_async=1)]))
+
+Statements
+^^^^^^^^^^
+
+.. class:: Assign(targets, value, type_comment)
+
+ An assignment. ``targets`` is a list of nodes, and ``value`` is a single node.
+
+ Multiple nodes in ``targets`` represents assigning the same value to each.
+ Unpacking is represented by putting a :class:`Tuple` or :class:`List`
+ within ``targets``.
+
+ .. attribute:: type_comment
+
+ ``type_comment`` is an optional string with the type annotation as a comment.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('a = b = 1'), indent=4)) # Multiple assignment
+ Module(
+ body=[
+ Assign(
+ targets=[
+ Name(id='a', ctx=Store()),
+ Name(id='b', ctx=Store())],
+ value=Constant(value=1))],
+ type_ignores=[])
+
+ >>> print(ast.dump(ast.parse('a,b = c'), indent=4)) # Unpacking
+ Module(
+ body=[
+ Assign(
+ targets=[
+ Tuple(
+ elts=[
+ Name(id='a', ctx=Store()),
+ Name(id='b', ctx=Store())],
+ ctx=Store())],
+ value=Name(id='c', ctx=Load()))],
+ type_ignores=[])
+
+
+.. class:: AnnAssign(target, annotation, value, simple)
+
+ An assignment with a type annotation. ``target`` is a single node and can
+ be a :class:`Name`, a :class:`Attribute` or a :class:`Subscript`.
+ ``annotation`` is the annotation, such as a :class:`Constant` or :class:`Name`
+ node. ``value`` is a single optional node. ``simple`` is a boolean integer
+ set to True for a :class:`Name` node in ``target`` that do not appear in
+ between parenthesis and are hence pure names and not expressions.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('c: int'), indent=4))
+ Module(
+ body=[
+ AnnAssign(
+ target=Name(id='c', ctx=Store()),
+ annotation=Name(id='int', ctx=Load()),
+ simple=1)],
+ type_ignores=[])
+
+ >>> print(ast.dump(ast.parse('(a): int = 1'), indent=4)) # Annotation with parenthesis
+ Module(
+ body=[
+ AnnAssign(
+ target=Name(id='a', ctx=Store()),
+ annotation=Name(id='int', ctx=Load()),
+ value=Constant(value=1),
+ simple=0)],
+ type_ignores=[])
+
+ >>> print(ast.dump(ast.parse('a.b: int'), indent=4)) # Attribute annotation
+ Module(
+ body=[
+ AnnAssign(
+ target=Attribute(
+ value=Name(id='a', ctx=Load()),
+ attr='b',
+ ctx=Store()),
+ annotation=Name(id='int', ctx=Load()),
+ simple=0)],
+ type_ignores=[])
+
+ >>> print(ast.dump(ast.parse('a[1]: int'), indent=4)) # Subscript annotation
+ Module(
+ body=[
+ AnnAssign(
+ target=Subscript(
+ value=Name(id='a', ctx=Load()),
+ slice=Constant(value=1),
+ ctx=Store()),
+ annotation=Name(id='int', ctx=Load()),
+ simple=0)],
+ type_ignores=[])
+
+
+.. class:: AugAssign(target, op, value)
+
+ Augmented assignment, such as ``a += 1``. In the following example,
+ ``target`` is a :class:`Name` node for ``x`` (with the :class:`Store`
+ context), ``op`` is :class:`Add`, and ``value`` is a :class:`Constant` with
+ value for 1.
+
+ The ``target`` attribute connot be of class :class:`Tuple` or :class:`List`,
+ unlike the targets of :class:`Assign`.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('x += 2'), indent=4))
+ Module(
+ body=[
+ AugAssign(
+ target=Name(id='x', ctx=Store()),
+ op=Add(),
+ value=Constant(value=2))],
+ type_ignores=[])
+
+
+.. class:: Raise(exc, cause)
+
+ A ``raise`` statement. ``exc`` is the exception object to be raised, normally a
+ :class:`Call` or :class:`Name`, or ``None`` for a standalone ``raise``.
+ ``cause`` is the optional part for ``y`` in ``raise x from y``.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('raise x from y'), indent=4))
+ Module(
+ body=[
+ Raise(
+ exc=Name(id='x', ctx=Load()),
+ cause=Name(id='y', ctx=Load()))],
+ type_ignores=[])
+
+
+.. class:: Assert(test, msg)
+
+ An assertion. ``test`` holds the condition, such as a :class:`Compare` node.
+ ``msg`` holds the failure message.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('assert x,y'), indent=4))
+ Module(
+ body=[
+ Assert(
+ test=Name(id='x', ctx=Load()),
+ msg=Name(id='y', ctx=Load()))],
+ type_ignores=[])
+
+
+.. class:: Delete(targets)
+
+ Represents a ``del`` statement. ``targets`` is a list of nodes, such as
+ :class:`Name`, :class:`Attribute` or :class:`Subscript` nodes.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('del x,y,z'), indent=4))
+ Module(
+ body=[
+ Delete(
+ targets=[
+ Name(id='x', ctx=Del()),
+ Name(id='y', ctx=Del()),
+ Name(id='z', ctx=Del())])],
+ type_ignores=[])
+
+
+.. class:: Pass()
+
+ A ``pass`` statement.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('pass'), indent=4))
+ Module(
+ body=[
+ Pass()],
+ type_ignores=[])
+
+
+Other statements which are only applicable inside functions or loops are
+described in other sections.
+
+Imports
+~~~~~~~
+
+.. class:: Import(names)
+
+ An import statement. ``names`` is a list of :class:`alias` nodes.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('import x,y,z'), indent=4))
+ Module(
+ body=[
+ Import(
+ names=[
+ alias(name='x'),
+ alias(name='y'),
+ alias(name='z')])],
+ type_ignores=[])
+
+
+.. class:: ImportFrom(module, names, level)
+
+ Represents ``from x import y``. ``module`` is a raw string of the 'from' name,
+ without any leading dots, or ``None`` for statements such as ``from . import foo``.
+ ``level`` is an integer holding the level of the relative import (0 means
+ absolute import).
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('from y import x,y,z'), indent=4))
+ Module(
+ body=[
+ ImportFrom(
+ module='y',
+ names=[
+ alias(name='x'),
+ alias(name='y'),
+ alias(name='z')],
+ level=0)],
+ type_ignores=[])
+
+
+.. class:: alias(name, asname)
+
+ Both parameters are raw strings of the names. ``asname`` can be ``None`` if
+ the regular name is to be used.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('from ..foo.bar import a as b, c'), indent=4))
+ Module(
+ body=[
+ ImportFrom(
+ module='foo.bar',
+ names=[
+ alias(name='a', asname='b'),
+ alias(name='c')],
+ level=2)],
+ type_ignores=[])
+
+Control flow
+^^^^^^^^^^^^
+
+.. note::
+ Optional clauses such as ``else`` are stored as an empty list if they're
+ not present.
+
+.. class:: If(test, body, orelse)
+
+ An ``if`` statement. ``test`` holds a single node, such as a :class:`Compare`
+ node. ``body`` and ``orelse`` each hold a list of nodes.
+
+ ``elif`` clauses don't have a special representation in the AST, but rather
+ appear as extra :class:`If` nodes within the ``orelse`` section of the
+ previous one.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse("""
+ ... if x:
+ ... ...
+ ... elif y:
+ ... ...
+ ... else:
+ ... ...
+ ... """), indent=4))
+ Module(
+ body=[
+ If(
+ test=Name(id='x', ctx=Load()),
+ body=[
+ Expr(
+ value=Constant(value=Ellipsis))],
+ orelse=[
+ If(
+ test=Name(id='y', ctx=Load()),
+ body=[
+ Expr(
+ value=Constant(value=Ellipsis))],
+ orelse=[
+ Expr(
+ value=Constant(value=Ellipsis))])])],
+ type_ignores=[])
+
+
+.. class:: For(target, iter, body, orelse, type_comment)
+
+ A ``for`` loop. ``target`` holds the variable(s) the loop assigns to, as a
+ single :class:`Name`, :class:`Tuple` or :class:`List` node. ``iter`` holds
+ the item to be looped over, again as a single node. ``body`` and ``orelse``
+ contain lists of nodes to execute. Those in ``orelse`` are executed if the
+ loop finishes normally, rather than via a ``break`` statement.
+
+ .. attribute:: type_comment
+
+ ``type_comment`` is an optional string with the type annotation as a comment.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse("""
+ ... for x in y:
+ ... ...
+ ... else:
+ ... ...
+ ... """), indent=4))
+ Module(
+ body=[
+ For(
+ target=Name(id='x', ctx=Store()),
+ iter=Name(id='y', ctx=Load()),
+ body=[
+ Expr(
+ value=Constant(value=Ellipsis))],
+ orelse=[
+ Expr(
+ value=Constant(value=Ellipsis))])],
+ type_ignores=[])
+
+
+.. class:: While(test, body, orelse)
+
+ A ``while`` loop. ``test`` holds the condition, such as a :class:`Compare`
+ node.
+
+ .. doctest::
+
+ >> print(ast.dump(ast.parse("""
+ ... while x:
+ ... ...
+ ... else:
+ ... ...
+ ... """), indent=4))
+ Module(
+ body=[
+ While(
+ test=Name(id='x', ctx=Load()),
+ body=[
+ Expr(
+ value=Constant(value=Ellipsis))],
+ orelse=[
+ Expr(
+ value=Constant(value=Ellipsis))])],
+ type_ignores=[])
+
+
+.. class:: Break
+ Continue
+
+ The ``break`` and ``continue`` statements.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse("""\
+ ... for a in b:
+ ... if a > 5:
+ ... break
+ ... else:
+ ... continue
+ ...
+ ... """), indent=4))
+ Module(
+ body=[
+ For(
+ target=Name(id='a', ctx=Store()),
+ iter=Name(id='b', ctx=Load()),
+ body=[
+ If(
+ test=Compare(
+ left=Name(id='a', ctx=Load()),
+ ops=[
+ Gt()],
+ comparators=[
+ Constant(value=5)]),
+ body=[
+ Break()],
+ orelse=[
+ Continue()])],
+ orelse=[])],
+ type_ignores=[])
+
+
+.. class:: Try(body, handlers, orelse, finalbody)
+
+ ``try`` blocks. All attributes are list of nodes to execute, except for
+ ``handlers``, which is a list of :class:`ExceptHandler` nodes.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse("""
+ ... try:
+ ... ...
+ ... except Exception:
+ ... ...
+ ... except OtherException as e:
+ ... ...
+ ... else:
+ ... ...
+ ... finally:
+ ... ...
+ ... """), indent=4))
+ Module(
+ body=[
+ Try(
+ body=[
+ Expr(
+ value=Constant(value=Ellipsis))],
+ handlers=[
+ ExceptHandler(
+ type=Name(id='Exception', ctx=Load()),
+ body=[
+ Expr(
+ value=Constant(value=Ellipsis))]),
+ ExceptHandler(
+ type=Name(id='OtherException', ctx=Load()),
+ name='e',
+ body=[
+ Expr(
+ value=Constant(value=Ellipsis))])],
+ orelse=[
+ Expr(
+ value=Constant(value=Ellipsis))],
+ finalbody=[
+ Expr(
+ value=Constant(value=Ellipsis))])],
+ type_ignores=[])
+
+
+.. class:: ExceptHandler(type, name, body)
+
+ A single ``except`` clause. ``type`` is the exception type it will match,
+ typically a :class:`Name` node (or ``None`` for a catch-all ``except:`` clause).
+ ``name`` is a raw string for the name to hold the exception, or ``None`` if
+ the clause doesn't have ``as foo``. ``body`` is a list of nodes.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse("""\
+ ... try:
+ ... a + 1
+ ... except TypeError:
+ ... pass
+ ... """), indent=4))
+ Module(
+ body=[
+ Try(
+ body=[
+ Expr(
+ value=BinOp(
+ left=Name(id='a', ctx=Load()),
+ op=Add(),
+ right=Constant(value=1)))],
+ handlers=[
+ ExceptHandler(
+ type=Name(id='TypeError', ctx=Load()),
+ body=[
+ Pass()])],
+ orelse=[],
+ finalbody=[])],
+ type_ignores=[])
+
+
+.. class:: With(items, body, type_comment)
+
+ A ``with`` block. ``items`` is a list of :class:`withitem` nodes representing
+ the context managers, and ``body`` is the indented block inside the context.
+
+ .. attribute:: type_comment
+
+ ``type_comment`` is an optional string with the type annotation as a comment.
+
+
+.. class:: withitem(context_expr, optional_vars)
+
+ A single context manager in a ``with`` block. ``context_expr`` is the context
+ manager, often a :class:`Call` node. ``optional_vars`` is a :class:`Name`,
+ :class:`Tuple` or :class:`List` for the ``as foo`` part, or ``None`` if that
+ isn't used.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse("""\
+ ... with a as b, c as d:
+ ... something(b, d)
+ ... """), indent=4))
+ Module(
+ body=[
+ With(
+ items=[
+ withitem(
+ context_expr=Name(id='a', ctx=Load()),
+ optional_vars=Name(id='b', ctx=Store())),
+ withitem(
+ context_expr=Name(id='c', ctx=Load()),
+ optional_vars=Name(id='d', ctx=Store()))],
+ body=[
+ Expr(
+ value=Call(
+ func=Name(id='something', ctx=Load()),
+ args=[
+ Name(id='b', ctx=Load()),
+ Name(id='d', ctx=Load())],
+ keywords=[]))])],
+ type_ignores=[])
+
+
+Function and class definitions
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. class:: FunctionDef(name, args, body, decorator_list, returns, type_comment)
+
+ A function definition.
+
+ * ``name`` is a raw string of the function name.
+ * ``args`` is a :class:`arguments` node.
+ * ``body`` is the list of nodes inside the function.
+ * ``decorator_list`` is the list of decorators to be applied, stored outermost
+ first (i.e. the first in the list will be applied last).
+ * ``returns`` is the return annotation.
+
+ .. attribute:: type_comment
+
+ ``type_comment`` is an optional string with the type annotation as a comment.
+
+
+.. class:: Lambda(args, body)
+
+ ``lambda`` is a minimal function definition that can be used inside an
+ expression. Unlike :class:`FunctionDef`, ``body`` holds a single node.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('lambda x,y: ...'), indent=4))
+ Module(
+ body=[
+ Expr(
+ value=Lambda(
+ args=arguments(
+ posonlyargs=[],
+ args=[
+ arg(arg='x'),
+ arg(arg='y')],
+ kwonlyargs=[],
+ kw_defaults=[],
+ defaults=[]),
+ body=Constant(value=Ellipsis)))],
+ type_ignores=[])
+
+
+.. class:: arguments(posonlyargs, args, vararg, kwonlyargs, kw_defaults, kwarg, defaults)
+
+ The arguments for a function.
+
+ * ``posonlyargs``, ``args`` and ``kwonlyargs`` are lists of :class:`arg` nodes.
+ * ``vararg`` and ``kwarg`` are single :class:`arg` nodes, referring to the
+ ``*args, **kwargs`` parameters.
+ * ``kw_defaults`` is a list of default values for keyword-only arguments. If
+ one is ``None``, the corresponding argument is required.
+ * ``defaults`` is a list of default values for arguments that can be passed
+ positionally. If there are fewer defaults, they correspond to the last n
+ arguments.
+
+
+.. class:: arg(arg, annotation, type_comment)
+
+ A single argument in a list. ``arg`` is a raw string of the argument
+ name, ``annotation`` is its annotation, such as a :class:`Str` or
+ :class:`Name` node.
+
+ .. attribute:: type_comment
+
+ ``type_comment`` is an optional string with the type annotation as a comment
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse("""\
+ ... @decorator1
+ ... @decorator2
+ ... def f(a: 'annotation', b=1, c=2, *d, e, f=3, **g) -> 'return annotation':
+ ... pass
+ ... """), indent=4))
+ Module(
+ body=[
+ FunctionDef(
+ name='f',
+ args=arguments(
+ posonlyargs=[],
+ args=[
+ arg(
+ arg='a',
+ annotation=Constant(value='annotation')),
+ arg(arg='b'),
+ arg(arg='c')],
+ vararg=arg(arg='d'),
+ kwonlyargs=[
+ arg(arg='e'),
+ arg(arg='f')],
+ kw_defaults=[
+ None,
+ Constant(value=3)],
+ kwarg=arg(arg='g'),
+ defaults=[
+ Constant(value=1),
+ Constant(value=2)]),
+ body=[
+ Pass()],
+ decorator_list=[
+ Name(id='decorator1', ctx=Load()),
+ Name(id='decorator2', ctx=Load())],
+ returns=Constant(value='return annotation'))],
+ type_ignores=[])
+
+
+.. class:: Return(value)
+
+ A ``return`` statement.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('return 4'), indent=4))
+ Module(
+ body=[
+ Return(
+ value=Constant(value=4))],
+ type_ignores=[])
+
+
+.. class:: Yield(value)
+ YieldFrom(value)
+
+ A ``yield`` or ``yield from`` expression. Because these are expressions, they
+ must be wrapped in a :class:`Expr` node if the value sent back is not used.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('yield x'), indent=4))
+ Module(
+ body=[
+ Expr(
+ value=Yield(
+ value=Name(id='x', ctx=Load())))],
+ type_ignores=[])
+
+ >>> print(ast.dump(ast.parse('yield from x'), indent=4))
+ Module(
+ body=[
+ Expr(
+ value=YieldFrom(
+ value=Name(id='x', ctx=Load())))],
+ type_ignores=[])
+
+
+.. class:: Global(names)
+ Nonlocal(names)
+
+ ``global`` and ``nonlocal`` statements. ``names`` is a list of raw strings.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse('global x,y,z'), indent=4))
+ Module(
+ body=[
+ Global(
+ names=[
+ 'x',
+ 'y',
+ 'z'])],
+ type_ignores=[])
+
+ >>> print(ast.dump(ast.parse('nonlocal x,y,z'), indent=4))
+ Module(
+ body=[
+ Nonlocal(
+ names=[
+ 'x',
+ 'y',
+ 'z'])],
+ type_ignores=[])
+
+
+.. class:: ClassDef(name, bases, keywords, starargs, kwargs, body, decorator_list)
+
+ A class definition.
+
+ * ``name`` is a raw string for the class name
+ * ``bases`` is a list of nodes for explicitly specified base classes.
+ * ``keywords`` is a list of :class:`keyword` nodes, principally for 'metaclass'.
+ Other keywords will be passed to the metaclass, as per `PEP-3115
+ `_.
+ * ``starargs`` and ``kwargs`` are each a single node, as in a function call.
+ starargs will be expanded to join the list of base classes, and kwargs will
+ be passed to the metaclass.
+ * ``body`` is a list of nodes representing the code within the class
+ definition.
+ * ``decorator_list`` is a list of nodes, as in :class:`FunctionDef`.
+
+ .. doctest::
+
+ >>> print(ast.dump(ast.parse("""\
+ ... @decorator1
+ ... @decorator2
+ ... class Foo(base1, base2, metaclass=meta):
+ ... pass
+ ... """), indent=4))
+ Module(
+ body=[
+ ClassDef(
+ name='Foo',
+ bases=[
+ Name(id='base1', ctx=Load()),
+ Name(id='base2', ctx=Load())],
+ keywords=[
+ keyword(
+ arg='metaclass',
+ value=Name(id='meta', ctx=Load()))],
+ body=[
+ Pass()],
+ decorator_list=[
+ Name(id='decorator1', ctx=Load()),
+ Name(id='decorator2', ctx=Load())])],
+ type_ignores=[])
+
+Async and await
+^^^^^^^^^^^^^^^
+
+.. class:: AsyncFunctionDef(name, args, body, decorator_list, returns, type_comment)
+
+ An ``async def`` function definition. Has the same fields as
+ :class:`FunctionDef`.
+
+
+.. class:: Await(value)
+
+ An ``await`` expression. ``value`` is what it waits for.
+ Only valid in the body of an :class:`AsyncFunctionDef`.
+
+.. doctest::
+
+ >>> print(ast.dump(ast.parse("""\
+ ... async def f():
+ ... await other_func()
+ ... """), indent=4))
+ Module(
+ body=[
+ AsyncFunctionDef(
+ name='f',
+ args=arguments(
+ posonlyargs=[],
+ args=[],
+ kwonlyargs=[],
+ kw_defaults=[],
+ defaults=[]),
+ body=[
+ Expr(
+ value=Await(
+ value=Call(
+ func=Name(id='other_func', ctx=Load()),
+ args=[],
+ keywords=[])))],
+ decorator_list=[])],
+ type_ignores=[])
+
+
+.. class:: AsyncFor(target, iter, body, orelse, type_comment)
+ AsyncWith(items, body, type_comment)
+
+ ``async for`` loops and ``async with`` context managers. They have the same
+ fields as :class:`For` and :class:`With`, respectively. Only valid in the
+ body of an :class:`AsyncFunctionDef`.
:mod:`ast` Helpers
@@ -318,9 +1702,9 @@ and classes for traversing abstract syntax trees:
def visit_Name(self, node):
return Subscript(
value=Name(id='data', ctx=Load()),
- slice=Index(value=Constant(value=node.id)),
+ slice=Constant(value=node.id),
ctx=node.ctx
- ), node)
+ )
Keep in mind that if the node you're operating on has child nodes you must
either transform the child nodes yourself or call the :meth:`generic_visit`
diff --git a/Doc/library/asyncio-api-index.rst b/Doc/library/asyncio-api-index.rst
index d5b5659abc65e2..047e5bbc58ccad 100644
--- a/Doc/library/asyncio-api-index.rst
+++ b/Doc/library/asyncio-api-index.rst
@@ -48,6 +48,9 @@ await on multiple things with timeouts.
* - :class:`Task`
- Task object.
+ * - :func:`to_thread`
+ - Asychronously run a function in a separate OS thread.
+
* - :func:`run_coroutine_threadsafe`
- Schedule a coroutine from another OS thread.
diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst
index 0029d94f0b5986..d60a6ce95cdd87 100644
--- a/Doc/library/asyncio-eventloop.rst
+++ b/Doc/library/asyncio-eventloop.rst
@@ -359,7 +359,8 @@ Opening network connections
host=None, port=None, \*, ssl=None, \
family=0, proto=0, flags=0, sock=None, \
local_addr=None, server_hostname=None, \
- ssl_handshake_timeout=None)
+ ssl_handshake_timeout=None, \
+ happy_eyeballs_delay=None, interleave=None)
Open a streaming transport connection to a given
address specified by *host* and *port*.
@@ -448,7 +449,18 @@ Opening network connections
.. versionadded:: 3.8
- The *happy_eyeballs_delay* and *interleave* parameters.
+ Added the *happy_eyeballs_delay* and *interleave* parameters.
+
+ Happy Eyeballs Algorithm: Success with Dual-Stack Hosts.
+ When a server's IPv4 path and protocol are working, but the server's
+ IPv6 path and protocol are not working, a dual-stack client
+ application experiences significant connection delay compared to an
+ IPv4-only client. This is undesirable because it causes the dual-
+ stack client to have a worse user experience. This document
+ specifies requirements for algorithms that reduce this user-visible
+ delay and provides an algorithm.
+
+ For more information: https://tools.ietf.org/html/rfc6555
.. versionadded:: 3.7
diff --git a/Doc/library/asyncio-future.rst b/Doc/library/asyncio-future.rst
index 832d58119b7b07..e1ac18eaf09882 100644
--- a/Doc/library/asyncio-future.rst
+++ b/Doc/library/asyncio-future.rst
@@ -170,7 +170,7 @@ Future Object
Returns the number of callbacks removed, which is typically 1,
unless a callback was added more than once.
- .. method:: cancel()
+ .. method:: cancel(msg=None)
Cancel the Future and schedule callbacks.
@@ -178,6 +178,9 @@ Future Object
Otherwise, change the Future's state to *cancelled*,
schedule the callbacks, and return ``True``.
+ .. versionchanged:: 3.9
+ Added the ``msg`` parameter.
+
.. method:: exception()
Return the exception that was set on this Future.
@@ -255,3 +258,6 @@ the Future has a result::
- asyncio Future is not compatible with the
:func:`concurrent.futures.wait` and
:func:`concurrent.futures.as_completed` functions.
+
+ - :meth:`asyncio.Future.cancel` accepts an optional ``msg`` argument,
+ but :func:`concurrent.futures.cancel` does not.
diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst
index 1d87d2f8005ec6..eb1312a949a10a 100644
--- a/Doc/library/asyncio-subprocess.rst
+++ b/Doc/library/asyncio-subprocess.rst
@@ -95,14 +95,14 @@ Creating Subprocesses
See the documentation of :meth:`loop.subprocess_shell` for other
parameters.
-.. important::
-
- It is the application's responsibility to ensure that all whitespace and
- special characters are quoted appropriately to avoid `shell injection
- `_
- vulnerabilities. The :func:`shlex.quote` function can be used to properly
- escape whitespace and special shell characters in strings that are going
- to be used to construct shell commands.
+ .. important::
+
+ It is the application's responsibility to ensure that all whitespace and
+ special characters are quoted appropriately to avoid `shell injection
+ `_
+ vulnerabilities. The :func:`shlex.quote` function can be used to properly
+ escape whitespace and special shell characters in strings that are going
+ to be used to construct shell commands.
.. deprecated-removed:: 3.8 3.10
diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst
index d992b0011dc660..847363b134a7a0 100644
--- a/Doc/library/asyncio-task.rst
+++ b/Doc/library/asyncio-task.rst
@@ -453,7 +453,8 @@ Timeouts
wrap it in :func:`shield`.
The function will wait until the future is actually cancelled,
- so the total wait time may exceed the *timeout*.
+ so the total wait time may exceed the *timeout*. If an exception
+ happens during cancellation, it is propagated.
If the wait is cancelled, the future *aw* is also cancelled.
@@ -498,6 +499,8 @@ Waiting Primitives
set concurrently and block until the condition specified
by *return_when*.
+ The *aws* set must not be empty.
+
Returns two sets of Tasks/Futures: ``(done, pending)``.
Usage::
@@ -573,7 +576,7 @@ Waiting Primitives
if task in done:
# Everything will work as expected now.
- .. deprecated:: 3.8
+ .. deprecated-removed:: 3.8 3.11
Passing coroutine objects to ``wait()`` directly is
deprecated.
@@ -582,9 +585,9 @@ Waiting Primitives
.. function:: as_completed(aws, \*, loop=None, timeout=None)
Run :ref:`awaitable objects ` in the *aws*
- set concurrently. Return an iterator of :class:`Future` objects.
- Each Future object returned represents the earliest result
- from the set of the remaining awaitables.
+ set concurrently. Return an iterator of coroutines.
+ Each coroutine returned can be awaited to get the earliest next
+ result from the set of the remaining awaitables.
Raises :exc:`asyncio.TimeoutError` if the timeout occurs before
all Futures are done.
@@ -594,11 +597,71 @@ Waiting Primitives
Example::
- for f in as_completed(aws):
- earliest_result = await f
+ for coro in as_completed(aws):
+ earliest_result = await coro
# ...
+Running in Threads
+==================
+
+.. coroutinefunction:: to_thread(func, /, \*args, \*\*kwargs)
+
+ Asynchronously run function *func* in a separate thread.
+
+ Any \*args and \*\*kwargs supplied for this function are directly passed
+ to *func*. Also, the current :class:`contextvars.Context` is propogated,
+ allowing context variables from the event loop thread to be accessed in the
+ separate thread.
+
+ Return an :class:`asyncio.Future` which represents the eventual result of
+ *func*.
+
+ This coroutine function is primarily intended to be used for executing
+ IO-bound functions/methods that would otherwise block the event loop if
+ they were ran in the main thread. For example::
+
+ def blocking_io():
+ print(f"start blocking_io at {time.strftime('%X')}")
+ # Note that time.sleep() can be replaced with any blocking
+ # IO-bound operation, such as file operations.
+ time.sleep(1)
+ print(f"blocking_io complete at {time.strftime('%X')}")
+
+ async def main():
+ print(f"started main at {time.strftime('%X')}")
+
+ await asyncio.gather(
+ asyncio.to_thread(blocking_io),
+ asyncio.sleep(1))
+
+ print(f"finished main at {time.strftime('%X')}")
+
+
+ asyncio.run(main())
+
+ # Expected output:
+ #
+ # started main at 19:50:53
+ # start blocking_io at 19:50:53
+ # blocking_io complete at 19:50:54
+ # finished main at 19:50:54
+
+ Directly calling `blocking_io()` in any coroutine would block the event loop
+ for its duration, resulting in an additional 1 second of run time. Instead,
+ by using `asyncio.to_thread()`, we can run it in a separate thread without
+ blocking the event loop.
+
+ .. note::
+
+ Due to the :term:`GIL`, `asyncio.to_thread()` can typically only be used
+ to make IO-bound functions non-blocking. However, for extension modules
+ that release the GIL or alternative Python implementations that don't
+ have one, `asyncio.to_thread()` can also be used for CPU-bound functions.
+
+ .. versionadded:: 3.9
+
+
Scheduling From Other Threads
=============================
@@ -721,7 +784,7 @@ Task Object
.. deprecated-removed:: 3.8 3.10
The *loop* parameter.
- .. method:: cancel()
+ .. method:: cancel(msg=None)
Request the Task to be cancelled.
@@ -736,6 +799,9 @@ Task Object
suppressing cancellation completely is not common and is actively
discouraged.
+ .. versionchanged:: 3.9
+ Added the ``msg`` parameter.
+
.. _asyncio_example_task_cancel:
The following example illustrates how coroutines can intercept
diff --git a/Doc/library/code.rst b/Doc/library/code.rst
index 6708079f778c1a..538e5afc7822aa 100644
--- a/Doc/library/code.rst
+++ b/Doc/library/code.rst
@@ -56,8 +56,8 @@ build applications which provide an interactive interpreter prompt.
*source* is the source string; *filename* is the optional filename from which
source was read, defaulting to ``''``; and *symbol* is the optional
- grammar start symbol, which should be either ``'single'`` (the default) or
- ``'eval'``.
+ grammar start symbol, which should be ``'single'`` (the default), ``'eval'``
+ or ``'exec'``.
Returns a code object (the same as ``compile(source, filename, symbol)``) if the
command is complete and valid; ``None`` if the command is incomplete; raises
diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst
index ec6a0533033bd1..f071057293eece 100644
--- a/Doc/library/codecs.rst
+++ b/Doc/library/codecs.rst
@@ -197,9 +197,6 @@ wider range of codecs when working with binary files:
*buffering* has the same meaning as for the built-in :func:`open` function.
It defaults to -1 which means that the default buffer size will be used.
- .. versionchanged:: 3.9
- The ``'U'`` mode has been removed.
-
.. function:: EncodedFile(file, data_encoding, file_encoding=None, errors='strict')
diff --git a/Doc/library/codeop.rst b/Doc/library/codeop.rst
index a52d2c62c4fea1..c66b9d3ec0a26d 100644
--- a/Doc/library/codeop.rst
+++ b/Doc/library/codeop.rst
@@ -43,8 +43,9 @@ To do just the former:
:exc:`OverflowError` or :exc:`ValueError` if there is an invalid literal.
The *symbol* argument determines whether *source* is compiled as a statement
- (``'single'``, the default) or as an :term:`expression` (``'eval'``). Any
- other value will cause :exc:`ValueError` to be raised.
+ (``'single'``, the default), as a sequence of statements (``'exec'``) or
+ as an :term:`expression` (``'eval'``). Any other value will
+ cause :exc:`ValueError` to be raised.
.. note::
diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst
index a5e8d04099b22f..ea2b420292eb0f 100644
--- a/Doc/library/collections.rst
+++ b/Doc/library/collections.rst
@@ -33,10 +33,10 @@ Python's general purpose built-in containers, :class:`dict`, :class:`list`,
:class:`UserString` wrapper around string objects for easier string subclassing
===================== ====================================================================
-.. deprecated-removed:: 3.3 3.9
+.. deprecated-removed:: 3.3 3.10
Moved :ref:`collections-abstract-base-classes` to the :mod:`collections.abc` module.
For backwards compatibility, they continue to be visible in this module through
- Python 3.8.
+ Python 3.9.
:class:`ChainMap` objects
@@ -116,6 +116,9 @@ The class can be used to simulate nested scopes and is useful in templating.
>>> list(combined)
['music', 'art', 'opera']
+ .. versionchanged:: 3.9
+ Added support for ``|`` and ``|=`` operators, specified in :pep:`584`.
+
.. seealso::
* The `MultiContext class
@@ -287,6 +290,47 @@ For example::
>>> sorted(c.elements())
['a', 'a', 'a', 'a', 'b', 'b']
+ .. method:: isdisjoint(other)
+
+ True if none of the elements in *self* overlap with those in *other*.
+ Negative or missing counts are ignored.
+ Logically equivalent to: ``not (+self) & (+other)``
+
+ .. versionadded:: 3.10
+
+ .. method:: isequal(other)
+
+ Test whether counts agree exactly.
+ Negative or missing counts are treated as zero.
+
+ This method works differently than the inherited :meth:`__eq__` method
+ which treats negative or missing counts as distinct from zero::
+
+ >>> Counter(a=1, b=0).isequal(Counter(a=1))
+ True
+ >>> Counter(a=1, b=0) == Counter(a=1)
+ False
+
+ Logically equivalent to: ``+self == +other``
+
+ .. versionadded:: 3.10
+
+ .. method:: issubset(other)
+
+ True if the counts in *self* are less than or equal to those in *other*.
+ Negative or missing counts are treated as zero.
+ Logically equivalent to: ``not self - (+other)``
+
+ .. versionadded:: 3.10
+
+ .. method:: issuperset(other)
+
+ True if the counts in *self* are greater than or equal to those in *other*.
+ Negative or missing counts are treated as zero.
+ Logically equivalent to: ``not other - (+self)``
+
+ .. versionadded:: 3.10
+
.. method:: most_common([n])
Return a list of the *n* most common elements and their counts from the
@@ -729,6 +773,10 @@ stack manipulations such as ``dup``, ``drop``, ``swap``, ``over``, ``pick``,
initialized from the first argument to the constructor, if present, or to
``None``, if absent.
+ .. versionchanged:: 3.9
+ Added merge (``|``) and update (``|=``) operators, specified in
+ :pep:`584`.
+
:class:`defaultdict` Examples
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1119,6 +1167,10 @@ anywhere a regular dictionary is used.
passed to the :class:`OrderedDict` constructor and its :meth:`update`
method.
+.. versionchanged:: 3.9
+ Added merge (``|``) and update (``|=``) operators, specified in :pep:`584`.
+
+
:class:`OrderedDict` Examples and Recipes
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1150,6 +1202,8 @@ variants of :func:`functools.lru_cache`::
return value
def __setitem__(self, key, value):
+ if key in self:
+ self.move_to_end(key)
super().__setitem__(key, value)
if len(self) > self.maxsize:
oldest = next(iter(self))
diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst
index 394d60634f1e0d..9b914b1f0d9c6d 100644
--- a/Doc/library/compileall.rst
+++ b/Doc/library/compileall.rst
@@ -113,6 +113,11 @@ compile Python sources.
Ignore symlinks pointing outside the given directory.
+.. cmdoption:: --hardlink-dupes
+
+ If two ``.pyc`` files with different optimization level have
+ the same content, use hard links to consolidate duplicate files.
+
.. versionchanged:: 3.2
Added the ``-i``, ``-b`` and ``-h`` options.
@@ -125,7 +130,7 @@ compile Python sources.
Added the ``--invalidation-mode`` option.
.. versionchanged:: 3.9
- Added the ``-s``, ``-p``, ``-e`` options.
+ Added the ``-s``, ``-p``, ``-e`` and ``--hardlink-dupes`` options.
Raised the default recursion limit from 10 to
:py:func:`sys.getrecursionlimit()`.
Added the possibility to specify the ``-o`` option multiple times.
@@ -143,14 +148,14 @@ runtime.
Public functions
----------------
-.. function:: compile_dir(dir, maxlevels=sys.getrecursionlimit(), ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, workers=1, invalidation_mode=None, stripdir=None, prependdir=None, limit_sl_dest=None)
+.. function:: compile_dir(dir, maxlevels=sys.getrecursionlimit(), ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, workers=1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None, hardlink_dupes=False)
Recursively descend the directory tree named by *dir*, compiling all :file:`.py`
files along the way. Return a true value if all the files compiled successfully,
and a false value otherwise.
The *maxlevels* parameter is used to limit the depth of the recursion; it
- defaults to ``10``.
+ defaults to ``sys.getrecursionlimit()``.
If *ddir* is given, it is prepended to the path to each file being compiled
for use in compilation time tracebacks, and is also compiled in to the
@@ -176,7 +181,8 @@ Public functions
coexist.
*optimize* specifies the optimization level for the compiler. It is passed to
- the built-in :func:`compile` function.
+ the built-in :func:`compile` function. Accepts also a sequence of optimization
+ levels which lead to multiple compilations of one :file:`.py` file in one call.
The argument *workers* specifies how many workers are used to
compile files in parallel. The default is to not use multiple workers.
@@ -193,6 +199,9 @@ Public functions
the ``-s``, ``-p`` and ``-e`` options described above.
They may be specified as ``str``, ``bytes`` or :py:class:`os.PathLike`.
+ If *hardlink_dupes* is true and two ``.pyc`` files with different optimization
+ level have the same content, use hard links to consolidate duplicate files.
+
.. versionchanged:: 3.2
Added the *legacy* and *optimize* parameter.
@@ -219,9 +228,10 @@ Public functions
Setting *workers* to 0 now chooses the optimal number of cores.
.. versionchanged:: 3.9
- Added *stripdir*, *prependdir* and *limit_sl_dest* arguments.
+ Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* arguments.
+ Default value of *maxlevels* was changed from ``10`` to ``sys.getrecursionlimit()``
-.. function:: compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, invalidation_mode=None)
+.. function:: compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None, hardlink_dupes=False)
Compile the file with path *fullname*. Return a true value if the file
compiled successfully, and a false value otherwise.
@@ -247,7 +257,8 @@ Public functions
coexist.
*optimize* specifies the optimization level for the compiler. It is passed to
- the built-in :func:`compile` function.
+ the built-in :func:`compile` function. Accepts also a sequence of optimization
+ levels which lead to multiple compilations of one :file:`.py` file in one call.
*invalidation_mode* should be a member of the
:class:`py_compile.PycInvalidationMode` enum and controls how the generated
@@ -257,6 +268,9 @@ Public functions
the ``-s``, ``-p`` and ``-e`` options described above.
They may be specified as ``str``, ``bytes`` or :py:class:`os.PathLike`.
+ If *hardlink_dupes* is true and two ``.pyc`` files with different optimization
+ level have the same content, use hard links to consolidate duplicate files.
+
.. versionadded:: 3.2
.. versionchanged:: 3.5
@@ -273,7 +287,7 @@ Public functions
The *invalidation_mode* parameter's default value is updated to None.
.. versionchanged:: 3.9
- Added *stripdir*, *prependdir* and *limit_sl_dest* arguments.
+ Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* arguments.
.. function:: compile_path(skip_curdir=True, maxlevels=0, force=False, quiet=0, legacy=False, optimize=-1, invalidation_mode=None)
diff --git a/Doc/library/constants.rst b/Doc/library/constants.rst
index 501715980f9572..f17e1a37875168 100644
--- a/Doc/library/constants.rst
+++ b/Doc/library/constants.rst
@@ -31,7 +31,7 @@ A small number of constants live in the built-in namespace. They are:
etc.) to indicate that the operation is not implemented with respect to
the other type; may be returned by the in-place binary special methods
(e.g. :meth:`__imul__`, :meth:`__iand__`, etc.) for the same purpose.
- Its truth value is true.
+ It should not be evaluated in a boolean context.
.. note::
@@ -50,6 +50,11 @@ A small number of constants live in the built-in namespace. They are:
even though they have similar names and purposes.
See :exc:`NotImplementedError` for details on when to use it.
+ .. versionchanged:: 3.9
+ Evaluating ``NotImplemented`` in a boolean context is deprecated. While
+ it currently evaluates as true, it will emit a :exc:`DeprecationWarning`.
+ It will raise a :exc:`TypeError` in a future version of Python.
+
.. index:: single: ...; ellipsis literal
.. data:: Ellipsis
diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst
index 49e22fa73ed265..61d39828e0194a 100644
--- a/Doc/library/csv.rst
+++ b/Doc/library/csv.rst
@@ -161,7 +161,8 @@ The :mod:`csv` module defines the following classes:
If a row has more fields than fieldnames, the remaining data is put in a
list and stored with the fieldname specified by *restkey* (which defaults
to ``None``). If a non-blank row has fewer fields than fieldnames, the
- missing values are filled-in with ``None``.
+ missing values are filled-in with the value of *restval* (which defaults
+ to ``None``).
All other optional or keyword arguments are passed to the underlying
:class:`reader` instance.
diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst
index c125a1130a9603..fe63d20671dd74 100644
--- a/Doc/library/dataclasses.rst
+++ b/Doc/library/dataclasses.rst
@@ -19,6 +19,8 @@ in :pep:`557`.
The member variables to use in these generated methods are defined
using :pep:`526` type annotations. For example this code::
+ from dataclasses import dataclass
+
@dataclass
class InventoryItem:
'''Class for keeping track of an item in inventory.'''
diff --git a/Doc/library/datatypes.rst b/Doc/library/datatypes.rst
index 94010c0e391b0b..675bbb6fafdca4 100644
--- a/Doc/library/datatypes.rst
+++ b/Doc/library/datatypes.rst
@@ -20,6 +20,7 @@ The following modules are documented in this chapter:
.. toctree::
datetime.rst
+ zoneinfo.rst
calendar.rst
collections.rst
collections.abc.rst
diff --git a/Doc/library/datetime.rst b/Doc/library/datetime.rst
index 22ecbb551d8973..508bc88e7f4b8a 100644
--- a/Doc/library/datetime.rst
+++ b/Doc/library/datetime.rst
@@ -35,7 +35,8 @@ on efficient attribute extraction for output formatting and manipulation.
Aware and Naive Objects
-----------------------
-Date and time objects may be categorized as "aware" or "naive."
+Date and time objects may be categorized as "aware" or "naive" depending on
+whether or not they include timezone information.
With sufficient knowledge of applicable algorithmic and political time
adjustments, such as time zone and daylight saving time information,
@@ -670,7 +671,8 @@ Instance methods:
.. method:: date.isocalendar()
- Return a 3-tuple, (ISO year, ISO week number, ISO weekday).
+ Return a :term:`named tuple` object with three components: ``year``,
+ ``week`` and ``weekday``.
The ISO calendar is a widely used variant of the Gregorian calendar. [#]_
@@ -682,11 +684,14 @@ Instance methods:
For example, 2004 begins on a Thursday, so the first week of ISO year 2004
begins on Monday, 29 Dec 2003 and ends on Sunday, 4 Jan 2004::
- >>> from datetime import date
- >>> date(2003, 12, 29).isocalendar()
- (2004, 1, 1)
- >>> date(2004, 1, 4).isocalendar()
- (2004, 1, 7)
+ >>> from datetime import date
+ >>> date(2003, 12, 29).isocalendar()
+ datetime.IsoCalendarDate(year=2004, week=1, weekday=1)
+ >>> date(2004, 1, 4).isocalendar()
+ datetime.IsoCalendarDate(year=2004, week=1, weekday=7)
+
+ .. versionchanged:: 3.9
+ Result changed from a tuple to a :term:`named tuple`.
.. method:: date.isoformat()
@@ -1397,8 +1402,8 @@ Instance methods:
.. method:: datetime.isocalendar()
- Return a 3-tuple, (ISO year, ISO week number, ISO weekday). The same as
- ``self.date().isocalendar()``.
+ Return a :term:`named tuple` with three components: ``year``, ``week``
+ and ``weekday``. The same as ``self.date().isocalendar()``.
.. method:: datetime.isoformat(sep='T', timespec='auto')
diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst
index bcae55eb821784..69a20fca17898a 100644
--- a/Doc/library/decimal.rst
+++ b/Doc/library/decimal.rst
@@ -1475,9 +1475,18 @@ are also included in the pure Python version for compatibility.
.. data:: HAVE_THREADS
- The default value is ``True``. If Python is compiled without threads, the
- C version automatically disables the expensive thread local context
- machinery. In this case, the value is ``False``.
+ The value is ``True``. Deprecated, because Python now always has threads.
+
+.. deprecated:: 3.9
+
+.. data:: HAVE_CONTEXTVAR
+
+ The default value is ``True``. If Python is compiled ``--without-decimal-contextvar``,
+ the C version uses a thread-local rather than a coroutine-local context and the value
+ is ``False``. This is slightly faster in some nested context scenarios.
+
+.. versionadded:: 3.9 backported to 3.7 and 3.8.
+
Rounding modes
--------------
@@ -2121,17 +2130,67 @@ Q. Is the CPython implementation fast for large numbers?
A. Yes. In the CPython and PyPy3 implementations, the C/CFFI versions of
the decimal module integrate the high speed `libmpdec
`_ library for
-arbitrary precision correctly-rounded decimal floating point arithmetic.
+arbitrary precision correctly-rounded decimal floating point arithmetic [#]_.
``libmpdec`` uses `Karatsuba multiplication
`_
for medium-sized numbers and the `Number Theoretic Transform
`_
-for very large numbers. However, to realize this performance gain, the
-context needs to be set for unrounded calculations.
+for very large numbers.
+
+The context must be adapted for exact arbitrary precision arithmetic. :attr:`Emin`
+and :attr:`Emax` should always be set to the maximum values, :attr:`clamp`
+should always be 0 (the default). Setting :attr:`prec` requires some care.
+
+The easiest approach for trying out bignum arithmetic is to use the maximum
+value for :attr:`prec` as well [#]_::
+
+ >>> setcontext(Context(prec=MAX_PREC, Emax=MAX_EMAX, Emin=MIN_EMIN))
+ >>> x = Decimal(2) ** 256
+ >>> x / 128
+ Decimal('904625697166532776746648320380374280103671755200316906558262375061821325312')
+
+
+For inexact results, :attr:`MAX_PREC` is far too large on 64-bit platforms and
+the available memory will be insufficient::
+
+ >>> Decimal(1) / 3
+ Traceback (most recent call last):
+ File "", line 1, in
+ MemoryError
+
+On systems with overallocation (e.g. Linux), a more sophisticated approach is to
+adjust :attr:`prec` to the amount of available RAM. Suppose that you have 8GB of
+RAM and expect 10 simultaneous operands using a maximum of 500MB each::
+
+ >>> import sys
+ >>>
+ >>> # Maximum number of digits for a single operand using 500MB in 8-byte words
+ >>> # with 19 digits per word (4-byte and 9 digits for the 32-bit build):
+ >>> maxdigits = 19 * ((500 * 1024**2) // 8)
+ >>>
+ >>> # Check that this works:
+ >>> c = Context(prec=maxdigits, Emax=MAX_EMAX, Emin=MIN_EMIN)
+ >>> c.traps[Inexact] = True
+ >>> setcontext(c)
+ >>>
+ >>> # Fill the available precision with nines:
+ >>> x = Decimal(0).logical_invert() * 9
+ >>> sys.getsizeof(x)
+ 524288112
+ >>> x + 2
+ Traceback (most recent call last):
+ File "", line 1, in
+ decimal.Inexact: []
+
+In general (and especially on systems without overallocation), it is recommended
+to estimate even tighter bounds and set the :attr:`Inexact` trap if all calculations
+are expected to be exact.
+
- >>> c = getcontext()
- >>> c.prec = MAX_PREC
- >>> c.Emax = MAX_EMAX
- >>> c.Emin = MIN_EMIN
+.. [#]
+ .. versionadded:: 3.3
-.. versionadded:: 3.3
\ No newline at end of file
+.. [#]
+ .. versionchanged:: 3.9
+ This approach now works for all exact results except for non-integer powers.
+ Also backported to 3.7 and 3.8.
diff --git a/Doc/library/difflib.rst b/Doc/library/difflib.rst
index ada311bc3a205b..7a898c21b52e03 100644
--- a/Doc/library/difflib.rst
+++ b/Doc/library/difflib.rst
@@ -421,7 +421,7 @@ The :class:`SequenceMatcher` class has this constructor:
is not changed.
- .. method:: find_longest_match(alo, ahi, blo, bhi)
+ .. method:: find_longest_match(alo=0, ahi=None, blo=0, bhi=None)
Find longest matching block in ``a[alo:ahi]`` and ``b[blo:bhi]``.
@@ -458,6 +458,9 @@ The :class:`SequenceMatcher` class has this constructor:
This method returns a :term:`named tuple` ``Match(a, b, size)``.
+ .. versionchanged:: 3.9
+ Added default arguments.
+
.. method:: get_matching_blocks()
diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst
index 61233d98a0d184..f871ec4f13def8 100644
--- a/Doc/library/dis.rst
+++ b/Doc/library/dis.rst
@@ -244,7 +244,7 @@ operation is being performed, so the intermediate analysis object isn't useful:
.. function:: findlabels(code)
- Detect all offsets in the code object *code* which are jump targets, and
+ Detect all offsets in the raw compiled bytecode string *code* which are jump targets, and
return a list of these offsets.
@@ -844,9 +844,9 @@ All of the following opcodes use their arguments.
.. opcode:: BUILD_CONST_KEY_MAP (count)
- The version of :opcode:`BUILD_MAP` specialized for constant keys. *count*
- values are consumed from the stack. The top element on the stack contains
- a tuple of keys.
+ The version of :opcode:`BUILD_MAP` specialized for constant keys. Pops the
+ top element on the stack which contains a tuple of keys, then starting from
+ ``TOS1``, pops *count* values to form values in the built dictionary.
.. versionadded:: 3.6
@@ -986,7 +986,7 @@ All of the following opcodes use their arguments.
TOS is an :term:`iterator`. Call its :meth:`~iterator.__next__` method. If
this yields a new value, push it on the stack (leaving the iterator below
- it). If the iterator indicates it is exhausted TOS is popped, and the byte
+ it). If the iterator indicates it is exhausted, TOS is popped, and the byte
code counter is incremented by *delta*.
@@ -1136,7 +1136,7 @@ All of the following opcodes use their arguments.
.. versionadded:: 3.7
-.. opcode:: MAKE_FUNCTION (argc)
+.. opcode:: MAKE_FUNCTION (flags)
Pushes a new function object on the stack. From bottom to top, the consumed
stack must consist of values if the argument carries a specified flag value
diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst
index eaf29cfde2344d..4b4f5eb1944cc5 100644
--- a/Doc/library/enum.rst
+++ b/Doc/library/enum.rst
@@ -273,6 +273,10 @@ overridden::
the next :class:`int` in sequence with the last :class:`int` provided, but
the way it does this is an implementation detail and may change.
+.. note::
+
+ The :meth:`_generate_next_value_` method must be defined before any members.
+
Iteration
---------
diff --git a/Doc/library/fcntl.rst b/Doc/library/fcntl.rst
index 5c172b836acca9..07a15d27216e92 100644
--- a/Doc/library/fcntl.rst
+++ b/Doc/library/fcntl.rst
@@ -63,6 +63,8 @@ The module defines the following functions:
If the :c:func:`fcntl` fails, an :exc:`OSError` is raised.
+ .. audit-event:: fcntl.fcntl fd,cmd,arg fcntl.fcntl
+
.. function:: ioctl(fd, request, arg=0, mutate_flag=True)
@@ -112,6 +114,8 @@ The module defines the following functions:
>>> buf
array('h', [13341])
+ .. audit-event:: fcntl.ioctl fd,request,arg fcntl.ioctl
+
.. function:: flock(fd, operation)
@@ -122,6 +126,8 @@ The module defines the following functions:
If the :c:func:`flock` fails, an :exc:`OSError` exception is raised.
+ .. audit-event:: fcntl.flock fd,operation fcntl.flock
+
.. function:: lockf(fd, cmd, len=0, start=0, whence=0)
@@ -155,6 +161,8 @@ The module defines the following functions:
The default for *len* is 0 which means to lock to the end of the file. The
default for *whence* is also 0.
+ .. audit-event:: fcntl.lockf fd,cmd,len,start,whence fcntl.lockf
+
Examples (all on a SVR4 compliant system)::
import struct, fcntl, os
diff --git a/Doc/library/fileinput.rst b/Doc/library/fileinput.rst
index 236f5d563833e4..cc4039a30e38ae 100644
--- a/Doc/library/fileinput.rst
+++ b/Doc/library/fileinput.rst
@@ -148,8 +148,8 @@ available for subclassing as well:
The sequence must be accessed in strictly sequential order; random access
and :meth:`~io.TextIOBase.readline` cannot be mixed.
- With *mode* you can specify which file mode will be passed to :func:`open`.
- It must be ``'r'`` or ``'rb'``.
+ With *mode* you can specify which file mode will be passed to :func:`open`. It
+ must be one of ``'r'``, ``'rU'``, ``'U'`` and ``'rb'``.
The *openhook*, when given, must be a function that takes two arguments,
*filename* and *mode*, and returns an accordingly opened file-like object. You
@@ -166,14 +166,15 @@ available for subclassing as well:
.. versionchanged:: 3.2
Can be used as a context manager.
+ .. deprecated:: 3.4
+ The ``'rU'`` and ``'U'`` modes.
+
.. deprecated:: 3.8
Support for :meth:`__getitem__` method is deprecated.
.. versionchanged:: 3.8
The keyword parameter *mode* and *openhook* are now keyword-only.
- .. versionchanged:: 3.9
- The ``'rU'`` and ``'U'`` modes have been removed.
**Optional in-place filtering:** if the keyword argument ``inplace=True`` is
diff --git a/Doc/library/fractions.rst b/Doc/library/fractions.rst
index d3a42762e3ff8d..a4d006eb58ffeb 100644
--- a/Doc/library/fractions.rst
+++ b/Doc/library/fractions.rst
@@ -84,6 +84,10 @@ another rational number, or from a string.
The :class:`Fraction` constructor now accepts :class:`float` and
:class:`decimal.Decimal` instances.
+ .. versionchanged:: 3.9
+ The :func:`math.gcd` function is now used to normalize the *numerator*
+ and *denominator*. :func:`math.gcd` always return a :class:`int` type.
+ Previously, the GCD type depended on *numerator* and *denominator*.
.. attribute:: numerator
diff --git a/Doc/library/ftplib.rst b/Doc/library/ftplib.rst
index a4bb695a9ab10f..f4d4cdf9ada9d9 100644
--- a/Doc/library/ftplib.rst
+++ b/Doc/library/ftplib.rst
@@ -19,6 +19,8 @@ as mirroring other FTP servers. It is also used by the module
:mod:`urllib.request` to handle URLs that use FTP. For more information on FTP
(File Transfer Protocol), see Internet :rfc:`959`.
+The default encoding is UTF-8, following :rfc:`2640`.
+
Here's a sample session using the :mod:`ftplib` module::
>>> from ftplib import FTP
@@ -41,7 +43,7 @@ Here's a sample session using the :mod:`ftplib` module::
The module defines the following items:
-.. class:: FTP(host='', user='', passwd='', acct='', timeout=None, source_address=None)
+.. class:: FTP(host='', user='', passwd='', acct='', timeout=None, source_address=None, *, encoding='utf-8')
Return a new instance of the :class:`FTP` class. When *host* is given, the
method call ``connect(host)`` is made. When *user* is given, additionally
@@ -50,7 +52,8 @@ The module defines the following items:
parameter specifies a timeout in seconds for blocking operations like the
connection attempt (if is not specified, the global default timeout setting
will be used). *source_address* is a 2-tuple ``(host, port)`` for the socket
- to bind to as its source address before connecting.
+ to bind to as its source address before connecting. The *encoding* parameter
+ specifies the encoding for directories and filenames.
The :class:`FTP` class supports the :keyword:`with` statement, e.g.:
@@ -74,9 +77,11 @@ The module defines the following items:
.. versionchanged:: 3.9
If the *timeout* parameter is set to be zero, it will raise a
- :class:`ValueError` to prevent the creation of a non-blocking socket
+ :class:`ValueError` to prevent the creation of a non-blocking socket.
+ The *encoding* parameter was added, and the default was changed from
+ Latin-1 to UTF-8 to follow :rfc:`2640`.
-.. class:: FTP_TLS(host='', user='', passwd='', acct='', keyfile=None, certfile=None, context=None, timeout=None, source_address=None)
+.. class:: FTP_TLS(host='', user='', passwd='', acct='', keyfile=None, certfile=None, context=None, timeout=None, source_address=None, *, encoding='utf-8')
A :class:`FTP` subclass which adds TLS support to FTP as described in
:rfc:`4217`.
@@ -110,7 +115,9 @@ The module defines the following items:
.. versionchanged:: 3.9
If the *timeout* parameter is set to be zero, it will raise a
- :class:`ValueError` to prevent the creation of a non-blocking socket
+ :class:`ValueError` to prevent the creation of a non-blocking socket.
+ The *encoding* parameter was added, and the default was changed from
+ Latin-1 to UTF-8 to follow :rfc:`2640`.
Here's a sample session using the :class:`FTP_TLS` class::
@@ -259,9 +266,10 @@ followed by ``lines`` for the text version or ``binary`` for the binary version.
.. method:: FTP.retrlines(cmd, callback=None)
- Retrieve a file or directory listing in ASCII transfer mode. *cmd* should be
- an appropriate ``RETR`` command (see :meth:`retrbinary`) or a command such as
- ``LIST`` or ``NLST`` (usually just the string ``'LIST'``).
+ Retrieve a file or directory listing in the encoding specified by the
+ *encoding* parameter at initialization.
+ *cmd* should be an appropriate ``RETR`` command (see :meth:`retrbinary`) or
+ a command such as ``LIST`` or ``NLST`` (usually just the string ``'LIST'``).
``LIST`` retrieves a list of files and information about those files.
``NLST`` retrieves a list of file names.
The *callback* function is called for each line with a string argument
@@ -291,7 +299,7 @@ followed by ``lines`` for the text version or ``binary`` for the binary version.
.. method:: FTP.storlines(cmd, fp, callback=None)
- Store a file in ASCII transfer mode. *cmd* should be an appropriate
+ Store a file in line mode. *cmd* should be an appropriate
``STOR`` command (see :meth:`storbinary`). Lines are read until EOF from the
:term:`file object` *fp* (opened in binary mode) using its :meth:`~io.IOBase.readline`
method to provide the data to be stored. *callback* is an optional single
@@ -309,10 +317,9 @@ followed by ``lines`` for the text version or ``binary`` for the binary version.
If optional *rest* is given, a ``REST`` command is sent to the server, passing
*rest* as an argument. *rest* is usually a byte offset into the requested file,
telling the server to restart sending the file's bytes at the requested offset,
- skipping over the initial bytes. Note however that :rfc:`959` requires only that
- *rest* be a string containing characters in the printable range from ASCII code
- 33 to ASCII code 126. The :meth:`transfercmd` method, therefore, converts
- *rest* to a string, but no check is performed on the string's contents. If the
+ skipping over the initial bytes. Note however that the :meth:`transfercmd`
+ method converts *rest* to a string with the *encoding* parameter specified
+ at initialization, but no check is performed on the string's contents. If the
server does not recognize the ``REST`` command, an :exc:`error_reply` exception
will be raised. If this happens, simply call :meth:`transfercmd` without a
*rest* argument.
diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst
index cc48597ef91d50..e9c92f7c8210d1 100644
--- a/Doc/library/functions.rst
+++ b/Doc/library/functions.rst
@@ -43,9 +43,8 @@ are always available. They are listed here in alphabetical order.
.. function:: abs(x)
Return the absolute value of a number. The argument may be an
- integer or a floating point number. If the argument is a complex number, its
- magnitude is returned. If *x* defines :meth:`__abs__`,
- ``abs(x)`` returns ``x.__abs__()``.
+ integer, a floating point number, or an object implementing :meth:`__abs__`.
+ If the argument is a complex number, its magnitude is returned.
.. function:: all(iterable)
@@ -1090,6 +1089,12 @@ are always available. They are listed here in alphabetical order.
first decoded using a platform-dependent encoding or using the specified
*encoding* if given.
+ There is an additional mode character permitted, ``'U'``, which no longer
+ has any effect, and is considered deprecated. It previously enabled
+ :term:`universal newlines` in text mode, which became the default behaviour
+ in Python 3.0. Refer to the documentation of the
+ :ref:`newline ` parameter for further details.
+
.. note::
Python doesn't depend on the underlying operating system's notion of text
@@ -1246,6 +1251,10 @@ are always available. They are listed here in alphabetical order.
* The file is now non-inheritable.
+ .. deprecated-removed:: 3.4 3.10
+
+ The ``'U'`` mode.
+
.. versionchanged::
3.5
@@ -1261,10 +1270,6 @@ are always available. They are listed here in alphabetical order.
* On Windows, opening a console buffer may return a subclass of
:class:`io.RawIOBase` other than :class:`io.FileIO`.
- .. versionchanged:: 3.9
- The ``'U'`` mode has been removed.
-
-
.. function:: ord(c)
Given a string representing one Unicode character, return an integer
@@ -1305,7 +1310,7 @@ are always available. They are listed here in alphabetical order.
the second argument to be negative, permitting computation of modular
inverses.
- .. versionchanged:: 3.9
+ .. versionchanged:: 3.8
Allow keyword arguments. Formerly, only positional arguments were
supported.
@@ -1829,6 +1834,9 @@ are always available. They are listed here in alphabetical order.
Negative values for *level* are no longer supported (which also changes
the default value to 0).
+ .. versionchanged:: 3.9
+ When the command line options :option:`-E` or :option:`-I` are being used,
+ the environment variable :envvar:`PYTHONCASEOK` is now ignored.
.. rubric:: Footnotes
diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst
index e708a0d99cd004..a44eb85b27dbab 100644
--- a/Doc/library/functools.rst
+++ b/Doc/library/functools.rst
@@ -26,6 +26,32 @@ function for the purposes of this module.
The :mod:`functools` module defines the following functions:
+.. decorator:: cache(user_function)
+
+ Simple lightweight unbounded function cache. Sometimes called
+ `"memoize" `_.
+
+ Returns the same as ``lru_cache(maxsize=None)``, creating a thin
+ wrapper around a dictionary lookup for the function arguments. Because it
+ never needs to evict old values, this is smaller and faster than
+ :func:`lru_cache()` with a size limit.
+
+ For example::
+
+ @cache
+ def factorial(n):
+ return n * factorial(n-1) if n else 1
+
+ >>> factorial(10) # no previously cached result, makes 11 recursive calls
+ 3628800
+ >>> factorial(5) # just looks up cached value result
+ 120
+ >>> factorial(12) # makes two new recursive calls, the other 10 are cached
+ 479001600
+
+ .. versionadded:: 3.9
+
+
.. decorator:: cached_property(func)
Transform a method of a class into a property whose value is computed once
@@ -107,8 +133,7 @@ The :mod:`functools` module defines the following functions:
return sum(sentence.count(vowel) for vowel in 'aeiou')
If *maxsize* is set to ``None``, the LRU feature is disabled and the cache can
- grow without bound. The LRU feature performs best when *maxsize* is a
- power-of-two.
+ grow without bound.
If *typed* is set to true, function arguments of different types will be
cached separately. For example, ``f(3)`` and ``f(3.0)`` will be treated
@@ -133,11 +158,11 @@ The :mod:`functools` module defines the following functions:
bypassing the cache, or for rewrapping the function with a different cache.
An `LRU (least recently used) cache
- `_ works
- best when the most recent calls are the best predictors of upcoming calls (for
- example, the most popular articles on a news server tend to change each day).
- The cache's size limit assures that the cache does not grow without bound on
- long-running processes such as web servers.
+ `_
+ works best when the most recent calls are the best predictors of upcoming
+ calls (for example, the most popular articles on a news server tend to
+ change each day). The cache's size limit assures that the cache does not
+ grow without bound on long-running processes such as web servers.
In general, the LRU cache should only be used when you want to reuse
previously computed values. Accordingly, it doesn't make sense to cache
diff --git a/Doc/library/gettext.rst b/Doc/library/gettext.rst
index 937330bb201b08..ec2c12806b4160 100644
--- a/Doc/library/gettext.rst
+++ b/Doc/library/gettext.rst
@@ -724,8 +724,8 @@ implementations, and valuable experience to the creation of this module:
.. [#] The default locale directory is system dependent; for example, on RedHat Linux
it is :file:`/usr/share/locale`, but on Solaris it is :file:`/usr/lib/locale`.
The :mod:`gettext` module does not try to support these system dependent
- defaults; instead its default is :file:`{sys.prefix}/share/locale` (see
- :data:`sys.prefix`). For this reason, it is always best to call
+ defaults; instead its default is :file:`{sys.base_prefix}/share/locale` (see
+ :data:`sys.base_prefix`). For this reason, it is always best to call
:func:`bindtextdomain` with an explicit absolute path at the start of your
application.
diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst
index b737d22d4100cd..d644974e660984 100644
--- a/Doc/library/hashlib.rst
+++ b/Doc/library/hashlib.rst
@@ -87,6 +87,8 @@ library that Python uses on your platform. On most platforms the
that the hashing algorithm is not used in a security context, e.g. as a
non-cryptographic one-way compression function.
+ Hashlib now uses SHA3 and SHAKE from OpenSSL 1.1.1 and newer.
+
For example, to obtain the digest of the byte string ``b'Nobody inspects the
spammish repetition'``::
diff --git a/Doc/library/hmac.rst b/Doc/library/hmac.rst
index 57ac8bb16120f5..6f1b59b57ce580 100644
--- a/Doc/library/hmac.rst
+++ b/Doc/library/hmac.rst
@@ -114,6 +114,12 @@ A hash object has the following attributes:
.. versionadded:: 3.4
+.. deprecated:: 3.9
+
+ The undocumented attributes ``HMAC.digest_cons``, ``HMAC.inner``, and
+ ``HMAC.outer`` are internal implementation details and will be removed in
+ Python 3.10.
+
This module also provides the following helper function:
.. function:: compare_digest(a, b)
@@ -132,6 +138,11 @@ This module also provides the following helper function:
.. versionadded:: 3.3
+ .. versionchanged:: 3.10
+
+ The function uses OpenSSL's ``CRYPTO_memcmp()`` internally when
+ available.
+
.. seealso::
diff --git a/Doc/library/http.rst b/Doc/library/http.rst
index 0e3441cbcb718c..14ee73363e62e0 100644
--- a/Doc/library/http.rst
+++ b/Doc/library/http.rst
@@ -62,6 +62,7 @@ Code Enum Name Details
``100`` ``CONTINUE`` HTTP/1.1 :rfc:`7231`, Section 6.2.1
``101`` ``SWITCHING_PROTOCOLS`` HTTP/1.1 :rfc:`7231`, Section 6.2.2
``102`` ``PROCESSING`` WebDAV :rfc:`2518`, Section 10.1
+``103`` ``EARLY_HINTS`` An HTTP Status Code for Indicating Hints :rfc:`8297`
``200`` ``OK`` HTTP/1.1 :rfc:`7231`, Section 6.3.1
``201`` ``CREATED`` HTTP/1.1 :rfc:`7231`, Section 6.3.2
``202`` ``ACCEPTED`` HTTP/1.1 :rfc:`7231`, Section 6.3.3
@@ -98,10 +99,12 @@ Code Enum Name Details
``415`` ``UNSUPPORTED_MEDIA_TYPE`` HTTP/1.1 :rfc:`7231`, Section 6.5.13
``416`` ``REQUESTED_RANGE_NOT_SATISFIABLE`` HTTP/1.1 Range Requests :rfc:`7233`, Section 4.4
``417`` ``EXPECTATION_FAILED`` HTTP/1.1 :rfc:`7231`, Section 6.5.14
+``418`` ``IM_A_TEAPOT`` HTCPCP/1.0 :rfc:`2324`, Section 2.3.2
``421`` ``MISDIRECTED_REQUEST`` HTTP/2 :rfc:`7540`, Section 9.1.2
``422`` ``UNPROCESSABLE_ENTITY`` WebDAV :rfc:`4918`, Section 11.2
``423`` ``LOCKED`` WebDAV :rfc:`4918`, Section 11.3
``424`` ``FAILED_DEPENDENCY`` WebDAV :rfc:`4918`, Section 11.4
+``425`` ``TOO_EARLY`` Using Early Data in HTTP :rfc:`8470`
``426`` ``UPGRADE_REQUIRED`` HTTP/1.1 :rfc:`7231`, Section 6.5.15
``428`` ``PRECONDITION_REQUIRED`` Additional HTTP Status Codes :rfc:`6585`
``429`` ``TOO_MANY_REQUESTS`` Additional HTTP Status Codes :rfc:`6585`
@@ -130,3 +133,6 @@ equal to the constant name (i.e. ``http.HTTPStatus.OK`` is also available as
.. versionadded:: 3.8
Added ``451 UNAVAILABLE_FOR_LEGAL_REASONS`` status code.
+
+.. versionadded:: 3.9
+ Added ``103 EARLY_HINTS``, ``418 IM_A_TEAPOT`` and ``425 TOO_EARLY`` status codes.
diff --git a/Doc/library/idle.rst b/Doc/library/idle.rst
index f15f46b788b36a..b1192e7bb46552 100644
--- a/Doc/library/idle.rst
+++ b/Doc/library/idle.rst
@@ -142,7 +142,9 @@ Replace...
Open a search-and-replace dialog.
Go to Line
- Move cursor to the line number requested and make that line visible.
+ Move the cursor to the beginning of the line requested and make that
+ line visible. A request past the end of the file goes to the end.
+ Clear any selection and update the line and column status.
Show Completions
Open a scrollable list allowing selection of keywords and attributes. See
@@ -681,19 +683,22 @@ clash, or cannot or does not want to run as admin, it might be easiest to
completely remove Python and start over.
A zombie pythonw.exe process could be a problem. On Windows, use Task
-Manager to detect and stop one. Sometimes a restart initiated by a program
-crash or Keyboard Interrupt (control-C) may fail to connect. Dismissing
-the error box or Restart Shell on the Shell menu may fix a temporary problem.
+Manager to check for one and stop it if there is. Sometimes a restart
+initiated by a program crash or Keyboard Interrupt (control-C) may fail
+to connect. Dismissing the error box or using Restart Shell on the Shell
+menu may fix a temporary problem.
When IDLE first starts, it attempts to read user configuration files in
``~/.idlerc/`` (~ is one's home directory). If there is a problem, an error
message should be displayed. Leaving aside random disk glitches, this can
-be prevented by never editing the files by hand, using the configuration
-dialog, under Options, instead Options. Once it happens, the solution may
-be to delete one or more of the configuration files.
+be prevented by never editing the files by hand. Instead, use the
+configuration dialog, under Options. Once there is an error in a user
+configuration file, the best solution may be to delete it and start over
+with the settings dialog.
If IDLE quits with no message, and it was not started from a console, try
-starting from a console (``python -m idlelib``) and see if a message appears.
+starting it from a console or terminal (``python -m idlelib``) and see if
+this results in an error message.
Running user code
^^^^^^^^^^^^^^^^^
diff --git a/Doc/library/imaplib.rst b/Doc/library/imaplib.rst
index 5b8ca7ce68fd94..7c5b0750161598 100644
--- a/Doc/library/imaplib.rst
+++ b/Doc/library/imaplib.rst
@@ -582,6 +582,15 @@ An :class:`IMAP4` instance has the following methods:
Unsubscribe from old mailbox.
+.. method:: IMAP4.unselect()
+
+ :meth:`imaplib.IMAP4.unselect` frees server's resources associated with the
+ selected mailbox and returns the server to the authenticated
+ state. This command performs the same actions as :meth:`imaplib.IMAP4.close`, except
+ that no messages are permanently removed from the currently
+ selected mailbox.
+
+ .. versionadded:: 3.9
.. method:: IMAP4.xatom(name[, ...])
diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst
index cfde3a47f75dc7..99bfeacbbc7407 100644
--- a/Doc/library/importlib.rst
+++ b/Doc/library/importlib.rst
@@ -480,6 +480,8 @@ ABC hierarchy::
.. class:: ResourceReader
+ *Superseded by TraversableReader*
+
An :term:`abstract base class` to provide the ability to read
*resources*.
@@ -795,6 +797,28 @@ ABC hierarchy::
itself does not end in ``__init__``.
+.. class:: Traversable
+
+ An object with a subset of pathlib.Path methods suitable for
+ traversing directories and opening files.
+
+ .. versionadded:: 3.9
+
+
+.. class:: TraversableReader
+
+ An abstract base class for resource readers capable of serving
+ the ``files`` interface. Subclasses ResourceReader and provides
+ concrete implementations of the ResourceReader's abstract
+ methods. Therefore, any loader supplying TraversableReader
+ also supplies ResourceReader.
+
+ Loaders that wish to support resource reading are expected to
+ implement this interface.
+
+ .. versionadded:: 3.9
+
+
:mod:`importlib.resources` -- Resources
---------------------------------------
@@ -853,6 +877,19 @@ The following types are defined.
The following functions are available.
+
+.. function:: files(package)
+
+ Returns an :class:`importlib.resources.abc.Traversable` object
+ representing the resource container for the package (think directory)
+ and its resources (think files). A Traversable may contain other
+ containers (think subdirectories).
+
+ *package* is either a name or a module object which conforms to the
+ ``Package`` requirements.
+
+ .. versionadded:: 3.9
+
.. function:: open_binary(package, resource)
Open for binary reading the *resource* within *package*.
@@ -1029,6 +1066,10 @@ find and load modules.
Only class methods are defined by this class to alleviate the need for
instantiation.
+ .. versionchanged:: 3.4
+ Gained :meth:`~Loader.create_module` and :meth:`~Loader.exec_module`
+ methods.
+
.. class:: WindowsRegistryFinder
diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst
index 9b9bc99f43d48c..d00a30ff004063 100644
--- a/Doc/library/inspect.rst
+++ b/Doc/library/inspect.rst
@@ -624,18 +624,15 @@ function.
.. attribute:: Signature.parameters
- An dictionary of :class:`Parameter` objects. Parameters appear in strict
- definition order, including keyword-only parameters.
+ An ordered mapping of parameters' names to the corresponding
+ :class:`Parameter` objects. Parameters appear in strict definition
+ order, including keyword-only parameters.
.. versionchanged:: 3.7
Python only explicitly guaranteed that it preserved the declaration
order of keyword-only parameters as of version 3.7, although in practice
this order had always been preserved in Python 3.
- .. versionchanged:: 3.9
- :attr:`parameters` is now of type :class:`dict`. Formerly, it was of
- type :class:`collections.OrderedDict`.
-
.. attribute:: Signature.return_annotation
The "return" annotation for the callable. If the callable has no "return"
@@ -824,7 +821,7 @@ function.
.. attribute:: BoundArguments.arguments
- An ordered, mutable mapping of parameters' names to arguments' values.
+ A mutable mapping of parameters' names to arguments' values.
Contains only explicitly bound arguments. Changes in :attr:`arguments`
will reflect in :attr:`args` and :attr:`kwargs`.
diff --git a/Doc/library/io.rst b/Doc/library/io.rst
index f0987da9b6a4cb..aecbec56866d73 100644
--- a/Doc/library/io.rst
+++ b/Doc/library/io.rst
@@ -132,12 +132,13 @@ High-level Module Interface
Opens the provided file with mode ``'rb'``. This function should be used
when the intent is to treat the contents as executable code.
- ``path`` should be an absolute path.
+ ``path`` should be a :class:`str` and an absolute path.
The behavior of this function may be overridden by an earlier call to the
- :c:func:`PyFile_SetOpenCodeHook`, however, it should always be considered
- interchangeable with ``open(path, 'rb')``. Overriding the behavior is
- intended for additional validation or preprocessing of the file.
+ :c:func:`PyFile_SetOpenCodeHook`. However, assuming that ``path`` is a
+ :class:`str` and an absolute path, ``open_code(path)`` should always behave
+ the same as ``open(path, 'rb')``. Overriding the behavior is intended for
+ additional validation or preprocessing of the file.
.. versionadded:: 3.8
diff --git a/Doc/library/ipaddress.rst b/Doc/library/ipaddress.rst
index 140401d2f36739..5f5e66412da477 100644
--- a/Doc/library/ipaddress.rst
+++ b/Doc/library/ipaddress.rst
@@ -217,11 +217,20 @@ write code that handles both IP versions correctly. Address objects are
:RFC:`4291` for details. For example,
``"0000:0000:0000:0000:0000:0abc:0007:0def"`` can be compressed to
``"::abc:7:def"``.
+
+ Optionally, the string may also have a scope zone ID, expressed
+ with a suffix ``%scope_id``. If present, the scope ID must be non-empty,
+ and may not contain ``%``.
+ See :RFC:`4007` for details.
+ For example, ``fe80::1234%1`` might identify address ``fe80::1234`` on the first link of the node.
2. An integer that fits into 128 bits.
3. An integer packed into a :class:`bytes` object of length 16, big-endian.
+
>>> ipaddress.IPv6Address('2001:db8::1000')
IPv6Address('2001:db8::1000')
+ >>> ipaddress.IPv6Address('ff02::5678%1')
+ IPv6Address('ff02::5678%1')
.. attribute:: compressed
@@ -268,6 +277,12 @@ write code that handles both IP versions correctly. Address objects are
``::FFFF/96``), this property will report the embedded IPv4 address.
For any other address, this property will be ``None``.
+ .. attribute:: scope_id
+
+ For scoped addresses as defined by :RFC:`4007`, this property identifies
+ the particular zone of the address's scope that the address belongs to,
+ as a string. When no scope zone is specified, this property will be ``None``.
+
.. attribute:: sixtofour
For addresses that appear to be 6to4 addresses (starting with
@@ -299,6 +314,8 @@ the :func:`str` and :func:`int` builtin functions::
>>> int(ipaddress.IPv6Address('::1'))
1
+Note that IPv6 scoped addresses are converted to integers without scope zone ID.
+
Operators
^^^^^^^^^
@@ -311,8 +328,9 @@ IPv6).
Comparison operators
""""""""""""""""""""
-Address objects can be compared with the usual set of comparison operators. Some
-examples::
+Address objects can be compared with the usual set of comparison operators.
+Same IPv6 addresses with different scope zone IDs are not equal.
+Some examples::
>>> IPv4Address('127.0.0.2') > IPv4Address('127.0.0.1')
True
@@ -320,6 +338,10 @@ examples::
False
>>> IPv4Address('127.0.0.2') != IPv4Address('127.0.0.1')
True
+ >>> IPv6Address('fe80::1234') == IPv6Address('fe80::1234%1')
+ False
+ >>> IPv6Address('fe80::1234%1') != IPv6Address('fe80::1234%2')
+ True
Arithmetic operators
@@ -487,7 +509,8 @@ dictionaries.
hosts are all the IP addresses that belong to the network, except the
network address itself and the network broadcast address. For networks
with a mask length of 31, the network address and network broadcast
- address are also included in the result.
+ address are also included in the result. Networks with a mask of 32
+ will return a list containing the single host address.
>>> list(ip_network('192.0.2.0/29').hosts()) #doctest: +NORMALIZE_WHITESPACE
[IPv4Address('192.0.2.1'), IPv4Address('192.0.2.2'),
@@ -495,6 +518,8 @@ dictionaries.
IPv4Address('192.0.2.5'), IPv4Address('192.0.2.6')]
>>> list(ip_network('192.0.2.0/31').hosts())
[IPv4Address('192.0.2.0'), IPv4Address('192.0.2.1')]
+ >>> list(ip_network('192.0.2.1/32').hosts())
+ [IPv4Address('192.0.2.1')]
.. method:: overlaps(other)
@@ -656,6 +681,8 @@ dictionaries.
hosts are all the IP addresses that belong to the network, except the
Subnet-Router anycast address. For networks with a mask length of 127,
the Subnet-Router anycast address is also included in the result.
+ Networks with a mask of 128 will return a list containing the
+ single host address.
.. method:: overlaps(other)
.. method:: address_exclude(network)
diff --git a/Doc/library/itertools.rst b/Doc/library/itertools.rst
index 3e5a673898106c..107bc515a67785 100644
--- a/Doc/library/itertools.rst
+++ b/Doc/library/itertools.rst
@@ -199,9 +199,9 @@ loops that truncate the stream.
Return *r* length subsequences of elements from the input *iterable*.
- Combinations are emitted in lexicographic sort order. So, if the
- input *iterable* is sorted, the combination tuples will be produced
- in sorted order.
+ The combination tuples are emitted in lexicographic ordering according to
+ the order of the input *iterable*. So, if the input *iterable* is sorted,
+ the combination tuples will be produced in sorted order.
Elements are treated as unique based on their position, not on their
value. So if the input elements are unique, there will be no repeat
@@ -248,9 +248,9 @@ loops that truncate the stream.
Return *r* length subsequences of elements from the input *iterable*
allowing individual elements to be repeated more than once.
- Combinations are emitted in lexicographic sort order. So, if the
- input *iterable* is sorted, the combination tuples will be produced
- in sorted order.
+ The combination tuples are emitted in lexicographic ordering according to
+ the order of the input *iterable*. So, if the input *iterable* is sorted,
+ the combination tuples will be produced in sorted order.
Elements are treated as unique based on their position, not on their
value. So if the input elements are unique, the generated combinations
@@ -484,9 +484,9 @@ loops that truncate the stream.
of the *iterable* and all possible full-length permutations
are generated.
- Permutations are emitted in lexicographic sort order. So, if the
- input *iterable* is sorted, the permutation tuples will be produced
- in sorted order.
+ The permutation tuples are emitted in lexicographic ordering according to
+ the order of the input *iterable*. So, if the input *iterable* is sorted,
+ the combination tuples will be produced in sorted order.
Elements are treated as unique based on their position, not on their
value. So if the input elements are unique, there will be no repeat
@@ -563,6 +563,9 @@ loops that truncate the stream.
for prod in result:
yield tuple(prod)
+ Before :func:`product` runs, it completely consumes the input iterables,
+ keeping pools of values in memory to generate the products. Accordingly,
+ it only useful with finite inputs.
.. function:: repeat(object[, times])
diff --git a/Doc/library/keyword.rst b/Doc/library/keyword.rst
index 3768df969c55c9..acec45cdcd586e 100644
--- a/Doc/library/keyword.rst
+++ b/Doc/library/keyword.rst
@@ -8,16 +8,17 @@
--------------
-This module allows a Python program to determine if a string is a keyword.
+This module allows a Python program to determine if a string is a
+:ref:`keyword `.
.. function:: iskeyword(s)
- Return ``True`` if *s* is a Python keyword.
+ Return ``True`` if *s* is a Python :ref:`keyword `.
.. data:: kwlist
- Sequence containing all the keywords defined for the interpreter. If any
- keywords are defined to only be active when particular :mod:`__future__`
- statements are in effect, these will be included as well.
+ Sequence containing all the :ref:`keywords ` defined for the
+ interpreter. If any keywords are defined to only be active when particular
+ :mod:`__future__` statements are in effect, these will be included as well.
diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst
index 3b414ad30272cc..7267f812cc1925 100644
--- a/Doc/library/logging.rst
+++ b/Doc/library/logging.rst
@@ -159,6 +159,7 @@ is the module's name in the Python package namespace.
message format string, and the *args* are the arguments which are merged into
*msg* using the string formatting operator. (Note that this means that you can
use keywords in the format string, together with a single dictionary argument.)
+ No % formatting operation is performed on *msg* when no *args* are supplied.
There are four keyword arguments in *kwargs* which are inspected:
*exc_info*, *stack_info*, *stacklevel* and *extra*.
@@ -607,6 +608,9 @@ The useful mapping keys in a :class:`LogRecord` are given in the section on
attributes are ``default_time_format`` (for the strptime format string)
and ``default_msec_format`` (for appending the millisecond value).
+ .. versionchanged:: 3.9
+ The ``default_msec_format`` can be ``None``.
+
.. method:: formatException(exc_info)
Formats the specified exception information (a standard exception tuple as
diff --git a/Doc/library/mailcap.rst b/Doc/library/mailcap.rst
index 896afd1d730628..bf9639bdaca50a 100644
--- a/Doc/library/mailcap.rst
+++ b/Doc/library/mailcap.rst
@@ -18,7 +18,7 @@ belonging to a temporary file) and the :program:`xmpeg` program can be
automatically started to view the file.
The mailcap format is documented in :rfc:`1524`, "A User Agent Configuration
-Mechanism For Multimedia Mail Format Information," but is not an Internet
+Mechanism For Multimedia Mail Format Information", but is not an Internet
standard. However, mailcap files are supported on most Unix systems.
diff --git a/Doc/library/math.rst b/Doc/library/math.rst
index c4c180037f8788..6ec1feee35a6dc 100644
--- a/Doc/library/math.rst
+++ b/Doc/library/math.rst
@@ -126,15 +126,20 @@ Number-theoretic and representation functions
`_\.
-.. function:: gcd(a, b)
+.. function:: gcd(*integers)
- Return the greatest common divisor of the integers *a* and *b*. If either
- *a* or *b* is nonzero, then the value of ``gcd(a, b)`` is the largest
- positive integer that divides both *a* and *b*. ``gcd(0, 0)`` returns
- ``0``.
+ Return the greatest common divisor of the specified integer arguments.
+ If any of the arguments is nonzero, then the returned value is the largest
+ positive integer that is a divisor af all arguments. If all arguments
+ are zero, then the returned value is ``0``. ``gcd()`` without arguments
+ returns ``0``.
.. versionadded:: 3.5
+ .. versionchanged:: 3.9
+ Added support for an arbitrary number of arguments. Formerly, only two
+ arguments were supported.
+
.. function:: isclose(a, b, *, rel_tol=1e-09, abs_tol=0.0)
@@ -201,6 +206,17 @@ Number-theoretic and representation functions
.. versionadded:: 3.8
+.. function:: lcm(*integers)
+
+ Return the least common multiple of the specified integer arguments.
+ If all arguments are nonzero, then the returned value is the smallest
+ positive integer that is a multiple of all arguments. If any of the arguments
+ is zero, then the returned value is ``0``. ``lcm()`` without arguments
+ returns ``1``.
+
+ .. versionadded:: 3.9
+
+
.. function:: ldexp(x, i)
Return ``x * (2**i)``. This is essentially the inverse of function
diff --git a/Doc/library/mmap.rst b/Doc/library/mmap.rst
index 12b14d69332d72..698c17653786b3 100644
--- a/Doc/library/mmap.rst
+++ b/Doc/library/mmap.rst
@@ -81,7 +81,9 @@ To map anonymous memory, -1 should be passed as the fileno along with the length
private copy-on-write mapping, so changes to the contents of the mmap
object will be private to this process, and :const:`MAP_SHARED` creates a
mapping that's shared with all other processes mapping the same areas of
- the file. The default value is :const:`MAP_SHARED`.
+ the file. The default value is :const:`MAP_SHARED`. Some systems have
+ additional possible flags with the full list specified in
+ :ref:`MAP_* constants `.
*prot*, if specified, gives the desired memory protection; the two most
useful values are :const:`PROT_READ` and :const:`PROT_WRITE`, to specify
@@ -244,7 +246,8 @@ To map anonymous memory, -1 should be passed as the fileno along with the length
.. method:: readline()
Returns a single line, starting at the current file position and up to the
- next newline.
+ next newline. The file position is updated to point after the bytes that were
+ returned.
.. method:: resize(newsize)
@@ -341,3 +344,21 @@ MADV_* Constants
Availability: Systems with the madvise() system call.
.. versionadded:: 3.8
+
+.. _map-constants:
+
+MAP_* Constants
++++++++++++++++
+
+.. data:: MAP_SHARED
+ MAP_PRIVATE
+ MAP_DENYWRITE
+ MAP_EXECUTABLE
+ MAP_ANON
+ MAP_ANONYMOUS
+ MAP_POPULATE
+
+ These are the various flags that can be passed to :meth:`mmap.mmap`. Note that some options might not be present on some systems.
+
+ .. versionchanged:: 3.10
+ Added MAP_POPULATE constant.
diff --git a/Doc/library/msvcrt.rst b/Doc/library/msvcrt.rst
index 14ad2cd4373afe..42fffee6a0f449 100644
--- a/Doc/library/msvcrt.rst
+++ b/Doc/library/msvcrt.rst
@@ -42,6 +42,8 @@ File Operations
regions in a file may be locked at the same time, but may not overlap. Adjacent
regions are not merged; they must be unlocked individually.
+ .. audit-event:: msvcrt.locking fd,mode,nbytes msvcrt.locking
+
.. data:: LK_LOCK
LK_RLCK
@@ -77,12 +79,16 @@ File Operations
and :const:`os.O_TEXT`. The returned file descriptor may be used as a parameter
to :func:`os.fdopen` to create a file object.
+ .. audit-event:: msvcrt.open_osfhandle handle,flags msvcrt.open_osfhandle
+
.. function:: get_osfhandle(fd)
Return the file handle for the file descriptor *fd*. Raises :exc:`OSError` if
*fd* is not recognized.
+ .. audit-event:: msvcrt.get_osfhandle fd msvcrt.get_osfhandle
+
.. _msvcrt-console:
diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst
index 492f94c30017f0..08258a65a89dc3 100644
--- a/Doc/library/multiprocessing.rst
+++ b/Doc/library/multiprocessing.rst
@@ -439,7 +439,8 @@ process which created it.
>>> def f(x):
... return x*x
...
- >>> p.map(f, [1,2,3])
+ >>> with p:
+ ... p.map(f, [1,2,3])
Process PoolWorker-1:
Process PoolWorker-2:
Process PoolWorker-3:
@@ -877,6 +878,16 @@ For an example of the usage of queues for interprocess communication see
It is a simplified :class:`Queue` type, very close to a locked :class:`Pipe`.
+ .. method:: close()
+
+ Close the queue: release internal resources.
+
+ A queue must not be used anymore after it is closed. For example,
+ :meth:`get`, :meth:`put` and :meth:`empty` methods must no longer be
+ called.
+
+ .. versionadded:: 3.9
+
.. method:: empty()
Return ``True`` if the queue is empty, ``False`` otherwise.
@@ -2127,6 +2138,16 @@ with the :class:`Pool` class.
Note that the methods of the pool object should only be called by
the process which created the pool.
+ .. warning::
+ :class:`multiprocessing.pool` objects have internal resources that need to be
+ properly managed (like any other resource) by using the pool as a context manager
+ or by calling :meth:`close` and :meth:`terminate` manually. Failure to do this
+ can lead to the process hanging on finalization.
+
+ Note that it is **not correct** to rely on the garbage collector to destroy the pool
+ as CPython does not assure that the finalizer of the pool will be called
+ (see :meth:`object.__del__` for more information).
+
.. versionadded:: 3.2
*maxtasksperchild*
diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst
index fa02bde84650e1..36c53556c2685e 100644
--- a/Doc/library/operator.rst
+++ b/Doc/library/operator.rst
@@ -112,6 +112,10 @@ The mathematical and bitwise operations are the most numerous:
Return *a* converted to an integer. Equivalent to ``a.__index__()``.
+ .. versionchanged:: 3.10
+ The result always has exact type :class:`int`. Previously, the result
+ could have been an instance of a subclass of ``int``.
+
.. function:: inv(obj)
invert(obj)
diff --git a/Doc/library/os.rst b/Doc/library/os.rst
index bfc03227e4ed29..275b2d390e7cf5 100644
--- a/Doc/library/os.rst
+++ b/Doc/library/os.rst
@@ -135,6 +135,9 @@ process and user.
``os.environ``, and when one of the :meth:`pop` or :meth:`clear` methods is
called.
+ .. versionchanged:: 3.9
+ Updated to support :pep:`584`'s merge (``|``) and update (``|=``) operators.
+
.. data:: environb
@@ -148,6 +151,9 @@ process and user.
.. versionadded:: 3.2
+ .. versionchanged:: 3.9
+ Updated to support :pep:`584`'s merge (``|``) and update (``|=``) operators.
+
.. function:: chdir(path)
fchdir(fd)
@@ -445,6 +451,8 @@ process and user.
On some platforms, including FreeBSD and Mac OS X, setting ``environ`` may
cause memory leaks. Refer to the system documentation for :c:func:`putenv`.
+ .. audit-event:: os.putenv key,value os.putenv
+
.. versionchanged:: 3.9
The function is now always available.
@@ -640,6 +648,8 @@ process and user.
don't update ``os.environ``, so it is actually preferable to delete items of
``os.environ``.
+ .. audit-event:: os.unsetenv key os.unsetenv
+
.. versionchanged:: 3.9
The function is now always available and is also available on Windows.
@@ -766,6 +776,8 @@ as internal buffering of data.
docs for :func:`chmod` for possible values of *mode*. As of Python 3.3, this
is equivalent to ``os.chmod(fd, mode)``.
+ .. audit-event:: os.chmod path,mode,dir_fd os.fchmod
+
.. availability:: Unix.
@@ -776,6 +788,8 @@ as internal buffering of data.
:func:`chown`. As of Python 3.3, this is equivalent to ``os.chown(fd, uid,
gid)``.
+ .. audit-event:: os.chown path,uid,gid,dir_fd os.fchown
+
.. availability:: Unix.
@@ -883,6 +897,8 @@ as internal buffering of data.
:data:`F_ULOCK` or :data:`F_TEST`.
*len* specifies the section of the file to lock.
+ .. audit-event:: os.lockf fd,cmd,len os.lockf
+
.. availability:: Unix.
.. versionadded:: 3.3
@@ -1135,7 +1151,8 @@ or `the MSDN `_ on Windo
Combine the functionality of :func:`os.readv` and :func:`os.pread`.
.. availability:: Linux 2.6.30 and newer, FreeBSD 6.0 and newer,
- OpenBSD 2.7 and newer. Using flags requires Linux 4.6 or newer.
+ OpenBSD 2.7 and newer, AIX 7.1 and newer. Using flags requires
+ Linux 4.6 or newer.
.. versionadded:: 3.7
@@ -1194,6 +1211,7 @@ or `the MSDN `_ on Windo
- :data:`RWF_DSYNC`
- :data:`RWF_SYNC`
+ - :data:`RWF_APPEND`
Return the total number of bytes actually written.
@@ -1203,15 +1221,16 @@ or `the MSDN `_ on Windo
Combine the functionality of :func:`os.writev` and :func:`os.pwrite`.
.. availability:: Linux 2.6.30 and newer, FreeBSD 6.0 and newer,
- OpenBSD 2.7 and newer. Using flags requires Linux 4.7 or newer.
+ OpenBSD 2.7 and newer, AIX 7.1 and newer. Using flags requires
+ Linux 4.7 or newer.
.. versionadded:: 3.7
.. data:: RWF_DSYNC
- Provide a per-write equivalent of the :data:`O_DSYNC` ``open(2)`` flag. This
- flag effect applies only to the data range written by the system call.
+ Provide a per-write equivalent of the :data:`O_DSYNC` :func:`os.open` flag.
+ This flag effect applies only to the data range written by the system call.
.. availability:: Linux 4.7 and newer.
@@ -1220,14 +1239,28 @@ or `the MSDN `_ on Windo
.. data:: RWF_SYNC
- Provide a per-write equivalent of the :data:`O_SYNC` ``open(2)`` flag. This
- flag effect applies only to the data range written by the system call.
+ Provide a per-write equivalent of the :data:`O_SYNC` :func:`os.open` flag.
+ This flag effect applies only to the data range written by the system call.
.. availability:: Linux 4.7 and newer.
.. versionadded:: 3.7
+.. data:: RWF_APPEND
+
+ Provide a per-write equivalent of the :data:`O_APPEND` :func:`os.open`
+ flag. This flag is meaningful only for :func:`os.pwritev`, and its
+ effect applies only to the data range written by the system call. The
+ *offset* argument does not affect the write operation; the data is always
+ appended to the end of the file. However, if the *offset* argument is
+ ``-1``, the current file *offset* is updated.
+
+ .. availability:: Linux 4.16 and newer.
+
+ .. versionadded:: 3.10
+
+
.. function:: read(fd, n)
Read at most *n* bytes from file descriptor *fd*.
@@ -1250,7 +1283,7 @@ or `the MSDN `_ on Windo
.. function:: sendfile(out_fd, in_fd, offset, count)
- sendfile(out_fd, in_fd, offset, count, [headers], [trailers], flags=0)
+ sendfile(out_fd, in_fd, offset, count, headers=(), trailers=(), flags=0)
Copy *count* bytes from file descriptor *in_fd* to file descriptor *out_fd*
starting at *offset*.
@@ -1603,6 +1636,8 @@ features:
This function can raise :exc:`OSError` and subclasses such as
:exc:`FileNotFoundError`, :exc:`PermissionError`, and :exc:`NotADirectoryError`.
+ .. audit-event:: os.chdir path os.chdir
+
.. versionadded:: 3.3
Added support for specifying *path* as a file descriptor
on some platforms.
@@ -1631,6 +1666,8 @@ features:
This function can support :ref:`not following symlinks `.
+ .. audit-event:: os.chflags path,flags os.chflags
+
.. availability:: Unix.
.. versionadded:: 3.3
@@ -1676,6 +1713,8 @@ features:
read-only flag with it (via the ``stat.S_IWRITE`` and ``stat.S_IREAD``
constants or a corresponding integer value). All other bits are ignored.
+ .. audit-event:: os.chmod path,mode,dir_fd os.chmod
+
.. versionadded:: 3.3
Added support for specifying *path* as an open file descriptor,
and the *dir_fd* and *follow_symlinks* arguments.
@@ -1696,6 +1735,8 @@ features:
See :func:`shutil.chown` for a higher-level function that accepts names in
addition to numeric ids.
+ .. audit-event:: os.chown path,uid,gid,dir_fd os.chown
+
.. availability:: Unix.
.. versionadded:: 3.3
@@ -1722,6 +1763,8 @@ features:
descriptor *fd*. The descriptor must refer to an opened directory, not an
open file. As of Python 3.3, this is equivalent to ``os.chdir(fd)``.
+ .. audit-event:: os.chdir path os.fchdir
+
.. availability:: Unix.
@@ -1746,6 +1789,8 @@ features:
not follow symbolic links. As of Python 3.3, this is equivalent to
``os.chflags(path, flags, follow_symlinks=False)``.
+ .. audit-event:: os.chflags path,flags os.lchflags
+
.. availability:: Unix.
.. versionchanged:: 3.6
@@ -1759,6 +1804,8 @@ features:
for possible values of *mode*. As of Python 3.3, this is equivalent to
``os.chmod(path, mode, follow_symlinks=False)``.
+ .. audit-event:: os.chmod path,mode,dir_fd os.lchmod
+
.. availability:: Unix.
.. versionchanged:: 3.6
@@ -1770,6 +1817,8 @@ features:
function will not follow symbolic links. As of Python 3.3, this is equivalent
to ``os.chown(path, uid, gid, follow_symlinks=False)``.
+ .. audit-event:: os.chown path,uid,gid,dir_fd os.lchown
+
.. availability:: Unix.
.. versionchanged:: 3.6
@@ -1784,6 +1833,8 @@ features:
supply :ref:`paths relative to directory descriptors `, and :ref:`not
following symlinks `.
+ .. audit-event:: os.link src,dst,src_dir_fd,dst_dir_fd os.link
+
.. availability:: Unix, Windows.
.. versionchanged:: 3.2
@@ -1886,6 +1937,8 @@ features:
It is also possible to create temporary directories; see the
:mod:`tempfile` module's :func:`tempfile.mkdtemp` function.
+ .. audit-event:: os.mkdir path,mode,dir_fd os.mkdir
+
.. versionadded:: 3.3
The *dir_fd* argument.
@@ -1918,6 +1971,8 @@ features:
This function handles UNC paths correctly.
+ .. audit-event:: os.mkdir path,mode,dir_fd os.makedirs
+
.. versionadded:: 3.2
The *exist_ok* parameter.
@@ -2083,6 +2138,8 @@ features:
This function is semantically identical to :func:`unlink`.
+ .. audit-event:: os.remove path,dir_fd os.remove
+
.. versionadded:: 3.3
The *dir_fd* argument.
@@ -2103,6 +2160,8 @@ features:
they are empty. Raises :exc:`OSError` if the leaf directory could not be
successfully removed.
+ .. audit-event:: os.remove path,dir_fd os.removedirs
+
.. versionchanged:: 3.6
Accepts a :term:`path-like object`.
@@ -2128,6 +2187,8 @@ features:
If you want cross-platform overwriting of the destination, use :func:`replace`.
+ .. audit-event:: os.rename src,dst,src_dir_fd,dst_dir_fd os.rename
+
.. versionadded:: 3.3
The *src_dir_fd* and *dst_dir_fd* arguments.
@@ -2147,6 +2208,8 @@ features:
This function can fail with the new directory structure made if you lack
permissions needed to remove the leaf directory or file.
+ .. audit-event:: os.rename src,dst,src_dir_fd,dst_dir_fd os.renames
+
.. versionchanged:: 3.6
Accepts a :term:`path-like object` for *old* and *new*.
@@ -2162,6 +2225,8 @@ features:
This function can support specifying *src_dir_fd* and/or *dst_dir_fd* to
supply :ref:`paths relative to directory descriptors `.
+ .. audit-event:: os.rename src,dst,src_dir_fd,dst_dir_fd os.replace
+
.. versionadded:: 3.3
.. versionchanged:: 3.6
@@ -2178,6 +2243,8 @@ features:
This function can support :ref:`paths relative to directory descriptors
`.
+ .. audit-event:: os.rmdir path,dir_fd os.rmdir
+
.. versionadded:: 3.3
The *dir_fd* parameter.
@@ -2821,6 +2888,8 @@ features:
:exc:`OSError` is raised when the function is called by an unprivileged
user.
+ .. audit-event:: os.symlink src,dst,dir_fd os.symlink
+
.. availability:: Unix, Windows.
.. versionchanged:: 3.2
@@ -2873,6 +2942,8 @@ features:
traditional Unix name. Please see the documentation for
:func:`remove` for further information.
+ .. audit-event:: os.remove path,dir_fd os.unlink
+
.. versionadded:: 3.3
The *dir_fd* parameter.
@@ -2910,6 +2981,8 @@ features:
:ref:`paths relative to directory descriptors ` and :ref:`not
following symlinks `.
+ .. audit-event:: os.utime path,times,ns,dir_fd os.utime
+
.. versionadded:: 3.3
Added support for specifying *path* as an open file descriptor,
and the *dir_fd*, *follow_symlinks*, and *ns* parameters.
@@ -3003,6 +3076,8 @@ features:
for name in dirs:
os.rmdir(os.path.join(root, name))
+ .. audit-event:: os.walk top,topdown,onerror,followlinks os.walk
+
.. versionchanged:: 3.5
This function now calls :func:`os.scandir` instead of :func:`os.listdir`,
making it faster by reducing the number of calls to :func:`os.stat`.
@@ -3062,6 +3137,8 @@ features:
for name in dirs:
os.rmdir(name, dir_fd=rootfd)
+ .. audit-event:: os.fwalk top,topdown,onerror,follow_symlinks,dir_fd os.fwalk
+
.. availability:: Unix.
.. versionadded:: 3.3
@@ -3135,6 +3212,8 @@ These functions are all available on Linux only.
This function can support :ref:`specifying a file descriptor ` and
:ref:`not following symlinks `.
+ .. audit-event:: os.getxattr path,attribute os.getxattr
+
.. versionchanged:: 3.6
Accepts a :term:`path-like object` for *path* and *attribute*.
@@ -3149,6 +3228,8 @@ These functions are all available on Linux only.
This function can support :ref:`specifying a file descriptor ` and
:ref:`not following symlinks `.
+ .. audit-event:: os.listxattr path os.listxattr
+
.. versionchanged:: 3.6
Accepts a :term:`path-like object`.
@@ -3163,6 +3244,8 @@ These functions are all available on Linux only.
This function can support :ref:`specifying a file descriptor ` and
:ref:`not following symlinks `.
+ .. audit-event:: os.removexattr path,attribute os.removexattr
+
.. versionchanged:: 3.6
Accepts a :term:`path-like object` for *path* and *attribute*.
@@ -3186,6 +3269,8 @@ These functions are all available on Linux only.
A bug in Linux kernel versions less than 2.6.39 caused the flags argument
to be ignored on some filesystems.
+ .. audit-event:: os.setxattr path,attribute,value,flags os.setxattr
+
.. versionchanged:: 3.6
Accepts a :term:`path-like object` for *path* and *attribute*.
@@ -3248,6 +3333,8 @@ to be ignored.
`_
for more information about how DLLs are loaded.
+ .. audit-event:: os.add_dll_directory path os.add_dll_directory
+
.. availability:: Windows.
.. versionadded:: 3.8
@@ -3480,6 +3567,8 @@ written in Python, such as a mail server's external command delivery program.
Note that some platforms including FreeBSD <= 6.3 and Cygwin have
known issues when using ``fork()`` from a thread.
+ .. audit-event:: os.fork "" os.fork
+
.. versionchanged:: 3.8
Calling ``fork()`` in a subinterpreter is no longer supported
(:exc:`RuntimeError` is raised).
@@ -3499,6 +3588,8 @@ written in Python, such as a mail server's external command delivery program.
master end of the pseudo-terminal. For a more portable approach, use the
:mod:`pty` module. If an error occurs :exc:`OSError` is raised.
+ .. audit-event:: os.forkpty "" os.forkpty
+
.. versionchanged:: 3.8
Calling ``forkpty()`` in a subinterpreter is no longer supported
(:exc:`RuntimeError` is raised).
@@ -3525,6 +3616,8 @@ written in Python, such as a mail server's external command delivery program.
See also :func:`signal.pthread_kill`.
+ .. audit-event:: os.kill pid,sig os.kill
+
.. versionadded:: 3.2
Windows support.
@@ -3537,6 +3630,8 @@ written in Python, such as a mail server's external command delivery program.
Send the signal *sig* to the process group *pgid*.
+ .. audit-event:: os.killpg pgid,sig os.killpg
+
.. availability:: Unix.
@@ -3587,6 +3682,11 @@ written in Python, such as a mail server's external command delivery program.
subprocess was killed.) On Windows systems, the return value
contains the signed integer return code from the child process.
+ On Unix, :func:`waitstatus_to_exitcode` can be used to convert the ``close``
+ method result (exit status) into an exit code if it is not ``None``. On
+ Windows, the ``close`` method result is directly the exit code
+ (or ``None``).
+
This is implemented using :class:`subprocess.Popen`; see that class's
documentation for more powerful ways to manage and communicate with
subprocesses.
@@ -3890,6 +3990,10 @@ written in Python, such as a mail server's external command delivery program.
to using this function. See the :ref:`subprocess-replacements` section in
the :mod:`subprocess` documentation for some helpful recipes.
+ On Unix, :func:`waitstatus_to_exitcode` can be used to convert the result
+ (exit status) into an exit code. On Windows, the result is directly the exit
+ code.
+
.. audit-event:: os.system command os.system
.. availability:: Unix, Windows.
@@ -3912,10 +4016,8 @@ written in Python, such as a mail server's external command delivery program.
See the Unix manual page
:manpage:`times(2)` and :manpage:`times(3)` manual page on Unix or `the GetProcessTimes MSDN
- `
- _ on Windows.
- On Windows, only :attr:`user` and :attr:`system` are known; the other
- attributes are zero.
+ `_
+ on Windows. On Windows, only :attr:`user` and :attr:`system` are known; the other attributes are zero.
.. availability:: Unix, Windows.
@@ -3932,8 +4034,16 @@ written in Python, such as a mail server's external command delivery program.
number is zero); the high bit of the low byte is set if a core file was
produced.
+ :func:`waitstatus_to_exitcode` can be used to convert the exit status into an
+ exit code.
+
.. availability:: Unix.
+ .. seealso::
+
+ :func:`waitpid` can be used to wait for the completion of a specific
+ child process and has more options.
+
.. function:: waitid(idtype, id, options)
Wait for the completion of one or more child processes.
@@ -4029,6 +4139,9 @@ written in Python, such as a mail server's external command delivery program.
id is known, not necessarily a child process. The :func:`spawn\* `
functions called with :const:`P_NOWAIT` return suitable process handles.
+ :func:`waitstatus_to_exitcode` can be used to convert the exit status into an
+ exit code.
+
.. versionchanged:: 3.5
If the system call is interrupted and the signal handler does not raise an
exception, the function now retries the system call instead of raising an
@@ -4044,6 +4157,9 @@ written in Python, such as a mail server's external command delivery program.
information. The option argument is the same as that provided to
:func:`waitpid` and :func:`wait4`.
+ :func:`waitstatus_to_exitcode` can be used to convert the exit status into an
+ exitcode.
+
.. availability:: Unix.
@@ -4055,9 +4171,42 @@ written in Python, such as a mail server's external command delivery program.
resource usage information. The arguments to :func:`wait4` are the same
as those provided to :func:`waitpid`.
+ :func:`waitstatus_to_exitcode` can be used to convert the exit status into an
+ exitcode.
+
.. availability:: Unix.
+.. function:: waitstatus_to_exitcode(status)
+
+ Convert a wait status to an exit code.
+
+ On Unix:
+
+ * If the process exited normally (if ``WIFEXITED(status)`` is true),
+ return the process exit status (return ``WEXITSTATUS(status)``):
+ result greater than or equal to 0.
+ * If the process was terminated by a signal (if ``WIFSIGNALED(status)`` is
+ true), return ``-signum`` where *signum* is the number of the signal that
+ caused the process to terminate (return ``-WTERMSIG(status)``):
+ result less than 0.
+ * Otherwise, raise a :exc:`ValueError`.
+
+ On Windows, return *status* shifted right by 8 bits.
+
+ On Unix, if the process is being traced or if :func:`waitpid` was called
+ with :data:`WUNTRACED` option, the caller must first check if
+ ``WIFSTOPPED(status)`` is true. This function must not be called if
+ ``WIFSTOPPED(status)`` is true.
+
+ .. seealso::
+
+ :func:`WIFEXITED`, :func:`WEXITSTATUS`, :func:`WIFSIGNALED`,
+ :func:`WTERMSIG`, :func:`WIFSTOPPED`, :func:`WSTOPSIG` functions.
+
+ .. versionadded:: 3.9
+
+
.. data:: WNOHANG
The option for :func:`waitpid` to return immediately if no child process status
@@ -4091,28 +4240,36 @@ used to determine the disposition of a process.
Return ``True`` if a core dump was generated for the process, otherwise
return ``False``.
+ This function should be employed only if :func:`WIFSIGNALED` is true.
+
.. availability:: Unix.
.. function:: WIFCONTINUED(status)
- Return ``True`` if the process has been continued from a job control stop,
- otherwise return ``False``.
+ Return ``True`` if a stopped child has been resumed by delivery of
+ :data:`~signal.SIGCONT` (if the process has been continued from a job
+ control stop), otherwise return ``False``.
+
+ See :data:`WCONTINUED` option.
.. availability:: Unix.
.. function:: WIFSTOPPED(status)
- Return ``True`` if the process has been stopped, otherwise return
- ``False``.
+ Return ``True`` if the process was stopped by delivery of a signal,
+ otherwise return ``False``.
- .. availability:: Unix.
+ :func:`WIFSTOPPED` only returns ``True`` if the :func:`waitpid` call was
+ done using :data:`WUNTRACED` option or when the process is being traced (see
+ :manpage:`ptrace(2)`).
+ .. availability:: Unix.
.. function:: WIFSIGNALED(status)
- Return ``True`` if the process exited due to a signal, otherwise return
+ Return ``True`` if the process was terminated by a signal, otherwise return
``False``.
.. availability:: Unix.
@@ -4120,7 +4277,8 @@ used to determine the disposition of a process.
.. function:: WIFEXITED(status)
- Return ``True`` if the process exited using the :manpage:`exit(2)` system call,
+ Return ``True`` if the process exited terminated normally, that is,
+ by calling ``exit()`` or ``_exit()``, or by returning from ``main()``;
otherwise return ``False``.
.. availability:: Unix.
@@ -4128,8 +4286,9 @@ used to determine the disposition of a process.
.. function:: WEXITSTATUS(status)
- If ``WIFEXITED(status)`` is true, return the integer parameter to the
- :manpage:`exit(2)` system call. Otherwise, the return value is meaningless.
+ Return the process exit status.
+
+ This function should be employed only if :func:`WIFEXITED` is true.
.. availability:: Unix.
@@ -4138,12 +4297,16 @@ used to determine the disposition of a process.
Return the signal which caused the process to stop.
+ This function should be employed only if :func:`WIFSTOPPED` is true.
+
.. availability:: Unix.
.. function:: WTERMSIG(status)
- Return the signal which caused the process to exit.
+ Return the number of the signal that caused the process to terminate.
+
+ This function should be employed only if :func:`WIFSIGNALED` is true.
.. availability:: Unix.
diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst
index 5906a33bdea118..bf6fee44df2c88 100644
--- a/Doc/library/pathlib.rst
+++ b/Doc/library/pathlib.rst
@@ -528,8 +528,10 @@ Pure paths provide the following methods and properties:
>>> PurePath('a/b.py').match('/*.py')
False
- As with other methods, case-sensitivity is observed::
+ As with other methods, case-sensitivity follows platform defaults::
+ >>> PurePosixPath('b.py').match('*.PY')
+ False
>>> PureWindowsPath('b.py').match('*.PY')
True
@@ -549,7 +551,9 @@ Pure paths provide the following methods and properties:
File "", line 1, in
File "pathlib.py", line 694, in relative_to
.format(str(self), str(formatted)))
- ValueError: '/etc/passwd' does not start with '/usr'
+ ValueError: '/etc/passwd' is not in the subpath of '/usr' OR one path is relative and the other absolute.
+
+ NOTE: This function is part of :class:`PurePath` and works with strings. It does not check or access the underlying file structure.
.. method:: PurePath.with_name(name)
@@ -569,6 +573,30 @@ Pure paths provide the following methods and properties:
ValueError: PureWindowsPath('c:/') has an empty name
+.. method:: PurePath.with_stem(stem)
+
+ Return a new path with the :attr:`stem` changed. If the original path
+ doesn't have a name, ValueError is raised::
+
+ >>> p = PureWindowsPath('c:/Downloads/draft.txt')
+ >>> p.with_stem('final')
+ PureWindowsPath('c:/Downloads/final.txt')
+ >>> p = PureWindowsPath('c:/Downloads/pathlib.tar.gz')
+ >>> p.with_stem('lib')
+ PureWindowsPath('c:/Downloads/lib.gz')
+ >>> p = PureWindowsPath('c:/')
+ >>> p.with_stem('')
+ Traceback (most recent call last):
+ File "", line 1, in
+ File "/home/antoine/cpython/default/Lib/pathlib.py", line 861, in with_stem
+ return self.with_name(stem + self.suffix)
+ File "/home/antoine/cpython/default/Lib/pathlib.py", line 851, in with_name
+ raise ValueError("%r has an empty name" % (self,))
+ ValueError: PureWindowsPath('c:/') has an empty name
+
+ .. versionadded:: 3.9
+
+
.. method:: PurePath.with_suffix(suffix)
Return a new path with the :attr:`suffix` changed. If the original path
@@ -685,7 +713,7 @@ call fails (for example because the path doesn't exist).
.. method:: Path.stat()
- Return information about this path (similarly to :func:`os.stat`).
+ Return a :class:`os.stat_result` object containing information about this path, like :func:`os.stat`.
The result is looked up at each call to this method.
::
@@ -763,6 +791,8 @@ call fails (for example because the path doesn't exist).
Using the "``**``" pattern in large directory trees may consume
an inordinate amount of time.
+ .. audit-event:: pathlib.Path.glob self,pattern pathlib.Path.glob
+
.. method:: Path.group()
@@ -1025,6 +1055,8 @@ call fails (for example because the path doesn't exist).
PosixPath('setup.py'),
PosixPath('test_pathlib.py')]
+ .. audit-event:: pathlib.Path.rglob self,pattern pathlib.Path.rglob
+
.. method:: Path.rmdir()
@@ -1155,6 +1187,7 @@ os and os.path pathlib
:func:`os.path.abspath` :meth:`Path.resolve`
:func:`os.chmod` :meth:`Path.chmod`
:func:`os.mkdir` :meth:`Path.mkdir`
+:func:`os.makedirs` :meth:`Path.mkdir`
:func:`os.rename` :meth:`Path.rename`
:func:`os.replace` :meth:`Path.replace`
:func:`os.rmdir` :meth:`Path.rmdir`
diff --git a/Doc/library/pickle.rst b/Doc/library/pickle.rst
index 779b60ed4da00e..b7c34527719486 100644
--- a/Doc/library/pickle.rst
+++ b/Doc/library/pickle.rst
@@ -252,10 +252,10 @@ process more convenient:
.. versionchanged:: 3.8
The *buffers* argument was added.
-.. function:: loads(bytes_object, \*, fix_imports=True, encoding="ASCII", errors="strict", buffers=None)
+.. function:: loads(data, /, \*, fix_imports=True, encoding="ASCII", errors="strict", buffers=None)
Return the reconstituted object hierarchy of the pickled representation
- *bytes_object* of an object.
+ *data* of an object. *data* must be a :term:`bytes-like object`.
The protocol version of the pickle is detected automatically, so no
protocol argument is needed. Bytes past the pickled representation
@@ -639,9 +639,9 @@ the methods :meth:`__getstate__` and :meth:`__setstate__`.
At unpickling time, some methods like :meth:`__getattr__`,
:meth:`__getattribute__`, or :meth:`__setattr__` may be called upon the
instance. In case those methods rely on some internal invariant being
- true, the type should implement :meth:`__getnewargs__` or
- :meth:`__getnewargs_ex__` to establish such an invariant; otherwise,
- neither :meth:`__new__` nor :meth:`__init__` will be called.
+ true, the type should implement :meth:`__new__` to establish such an
+ invariant, as :meth:`__init__` is not called when unpickling an
+ instance.
.. index:: pair: copy; protocol
diff --git a/Doc/library/pkgutil.rst b/Doc/library/pkgutil.rst
index 78a51573458583..2066cbb9fc57ce 100644
--- a/Doc/library/pkgutil.rst
+++ b/Doc/library/pkgutil.rst
@@ -227,3 +227,44 @@ support.
then ``None`` is returned. In particular, the :term:`loader` for
:term:`namespace packages ` does not support
:meth:`get_data `.
+
+
+.. function:: resolve_name(name)
+
+ Resolve a name to an object.
+
+ This functionality is used in numerous places in the standard library (see
+ :issue:`12915`) - and equivalent functionality is also in widely used
+ third-party packages such as setuptools, Django and Pyramid.
+
+ It is expected that *name* will be a string in one of the following
+ formats, where W is shorthand for a valid Python identifier and dot stands
+ for a literal period in these pseudo-regexes:
+
+ * ``W(.W)*``
+ * ``W(.W)*:(W(.W)*)?``
+
+ The first form is intended for backward compatibility only. It assumes that
+ some part of the dotted name is a package, and the rest is an object
+ somewhere within that package, possibly nested inside other objects.
+ Because the place where the package stops and the object hierarchy starts
+ can't be inferred by inspection, repeated attempts to import must be done
+ with this form.
+
+ In the second form, the caller makes the division point clear through the
+ provision of a single colon: the dotted name to the left of the colon is a
+ package to be imported, and the dotted name to the right is the object
+ hierarchy within that package. Only one import is needed in this form. If
+ it ends with the colon, then a module object is returned.
+
+ The function will return an object (which might be a module), or raise one
+ of the following exceptions:
+
+ :exc:`ValueError` -- if *name* isn't in a recognised format.
+
+ :exc:`ImportError` -- if an import failed when it shouldn't have.
+
+ :exc:`AttributeError` -- If a failure occurred when traversing the object
+ hierarchy within the imported package to get to the desired object.
+
+ .. versionadded:: 3.9
diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst
index 207c3f8bca928b..16256c549208fc 100644
--- a/Doc/library/pprint.rst
+++ b/Doc/library/pprint.rst
@@ -147,7 +147,7 @@ The :mod:`pprint` module also provides several shortcut functions:
.. index:: builtin: eval
- Determine if the formatted representation of *object* is "readable," or can be
+ Determine if the formatted representation of *object* is "readable", or can be
used to reconstruct the value using :func:`eval`. This always returns ``False``
for recursive objects.
diff --git a/Doc/library/pty.rst b/Doc/library/pty.rst
index e85d2e239fdbdb..73d4f102fd4d8e 100644
--- a/Doc/library/pty.rst
+++ b/Doc/library/pty.rst
@@ -69,6 +69,11 @@ The :mod:`pty` module defines the following functions:
*select* throws an error on your platform when passed three empty lists. This
is a bug, documented in `issue 26228 `_.
+ Return the exit status value from :func:`os.waitpid` on the child process.
+
+ :func:`waitstatus_to_exitcode` can be used to convert the exit status into
+ an exit code.
+
.. audit-event:: pty.spawn argv pty.spawn
.. versionchanged:: 3.4
diff --git a/Doc/library/pyclbr.rst b/Doc/library/pyclbr.rst
index b80a2faed9b424..36e83e85c23141 100644
--- a/Doc/library/pyclbr.rst
+++ b/Doc/library/pyclbr.rst
@@ -1,8 +1,8 @@
-:mod:`pyclbr` --- Python class browser support
-==============================================
+:mod:`pyclbr` --- Python module browser support
+===============================================
.. module:: pyclbr
- :synopsis: Supports information extraction for a Python class browser.
+ :synopsis: Supports information extraction for a Python module browser.
.. sectionauthor:: Fred L. Drake, Jr.
@@ -29,6 +29,9 @@ modules.
*path* is a sequence of directory paths prepended to ``sys.path``,
which is used to locate the module source code.
+ This function is the original interface and is only kept for back
+ compatibility. It returns a filtered version of the following.
+
.. function:: readmodule_ex(module, path=None)
diff --git a/Doc/library/queue.rst b/Doc/library/queue.rst
index 2eeab5e2626647..0ec5900bef5bbf 100644
--- a/Doc/library/queue.rst
+++ b/Doc/library/queue.rst
@@ -190,32 +190,28 @@ fully processed by daemon consumer threads.
Example of how to wait for enqueued tasks to be completed::
+ import threading, queue
+
+ q = queue.Queue()
+
def worker():
while True:
item = q.get()
- if item is None:
- break
- do_work(item)
+ print(f'Working on {item}')
+ print(f'Finished {item}')
q.task_done()
- q = queue.Queue()
- threads = []
- for i in range(num_worker_threads):
- t = threading.Thread(target=worker)
- t.start()
- threads.append(t)
+ # turn-on the worker thread
+ threading.Thread(target=worker, daemon=True).start()
- for item in source():
+ # send thirty task requests to the worker
+ for item in range(30):
q.put(item)
+ print('All task requests sent\n', end='')
# block until all tasks are done
q.join()
-
- # stop workers
- for i in range(num_worker_threads):
- q.put(None)
- for t in threads:
- t.join()
+ print('All work completed')
SimpleQueue Objects
diff --git a/Doc/library/random.rst b/Doc/library/random.rst
index 1eb39bbda42e81..0cdf0a6ac4a477 100644
--- a/Doc/library/random.rst
+++ b/Doc/library/random.rst
@@ -104,12 +104,17 @@ Bookkeeping functions
the time :func:`getstate` was called.
-.. function:: getrandbits(k)
+Functions for bytes
+-------------------
- Returns a Python integer with *k* random bits. This method is supplied with
- the MersenneTwister generator and some other generators may also provide it
- as an optional part of the API. When available, :meth:`getrandbits` enables
- :meth:`randrange` to handle arbitrarily large ranges.
+.. function:: randbytes(n)
+
+ Generate *n* random bytes.
+
+ This method should not be used for generating security tokens.
+ Use :func:`secrets.token_bytes` instead.
+
+ .. versionadded:: 3.9
Functions for integers
@@ -135,6 +140,16 @@ Functions for integers
Return a random integer *N* such that ``a <= N <= b``. Alias for
``randrange(a, b+1)``.
+.. function:: getrandbits(k)
+
+ Returns a Python integer with *k* random bits. This method is supplied with
+ the MersenneTwister generator and some other generators may also provide it
+ as an optional part of the API. When available, :meth:`getrandbits` enables
+ :meth:`randrange` to handle arbitrarily large ranges.
+
+ .. versionchanged:: 3.9
+ This method now accepts zero for *k*.
+
Functions for sequences
-----------------------
@@ -198,8 +213,11 @@ Functions for sequences
generated. For example, a sequence of length 2080 is the largest that
can fit within the period of the Mersenne Twister random number generator.
+ .. deprecated-removed:: 3.9 3.11
+ The optional parameter *random*.
+
-.. function:: sample(population, k)
+.. function:: sample(population, k, *, counts=None)
Return a *k* length list of unique elements chosen from the population sequence
or set. Used for random sampling without replacement.
@@ -213,6 +231,11 @@ Functions for sequences
Members of the population need not be :term:`hashable` or unique. If the population
contains repeats, then each occurrence is a possible selection in the sample.
+ Repeated elements can be specified one at a time or with the optional
+ keyword-only *counts* parameter. For example, ``sample(['red', 'blue'],
+ counts=[4, 2], k=5)`` is equivalent to ``sample(['red', 'red', 'red', 'red',
+ 'blue', 'blue'], k=5)``.
+
To choose a sample from a range of integers, use a :func:`range` object as an
argument. This is especially fast and space efficient for sampling from a large
population: ``sample(range(10000000), k=60)``.
@@ -220,6 +243,16 @@ Functions for sequences
If the sample size is larger than the population size, a :exc:`ValueError`
is raised.
+ .. versionchanged:: 3.9
+ Added the *counts* parameter.
+
+ .. deprecated:: 3.9
+ In the future, the *population* must be a sequence. Instances of
+ :class:`set` are no longer supported. The set must first be converted
+ to a :class:`list` or :class:`tuple`, preferably in a deterministic
+ order so that the sample is reproducible.
+
+
Real-valued distributions
-------------------------
@@ -395,12 +428,11 @@ Simulations::
>>> choices(['red', 'black', 'green'], [18, 18, 2], k=6)
['red', 'green', 'black', 'black', 'red', 'black']
- >>> # Deal 20 cards without replacement from a deck of 52 playing cards
- >>> # and determine the proportion of cards with a ten-value
- >>> # (a ten, jack, queen, or king).
- >>> deck = collections.Counter(tens=16, low_cards=36)
- >>> seen = sample(list(deck.elements()), k=20)
- >>> seen.count('tens') / 20
+ >>> # Deal 20 cards without replacement from a deck
+ >>> # of 52 playing cards, and determine the proportion of cards
+ >>> # with a ten-value: ten, jack, queen, or king.
+ >>> dealt = sample(['tens', 'low cards'], counts=[16, 36], k=20)
+ >>> dealt.count('tens') / 20
0.15
>>> # Estimate the probability of getting 5 or more heads from 7 spins
@@ -408,29 +440,28 @@ Simulations::
>>> def trial():
... return choices('HT', cum_weights=(0.60, 1.00), k=7).count('H') >= 5
...
- >>> sum(trial() for i in range(10000)) / 10000
+ >>> sum(trial() for i in range(10_000)) / 10_000
0.4169
>>> # Probability of the median of 5 samples being in middle two quartiles
>>> def trial():
- ... return 2500 <= sorted(choices(range(10000), k=5))[2] < 7500
+ ... return 2_500 <= sorted(choices(range(10_000), k=5))[2] < 7_500
...
- >>> sum(trial() for i in range(10000)) / 10000
+ >>> sum(trial() for i in range(10_000)) / 10_000
0.7958
Example of `statistical bootstrapping
`_ using resampling
-with replacement to estimate a confidence interval for the mean of a sample of
-size five::
+with replacement to estimate a confidence interval for the mean of a sample::
# http://statistics.about.com/od/Applications/a/Example-Of-Bootstrapping.htm
from statistics import fmean as mean
from random import choices
- data = 1, 2, 4, 4, 10
- means = sorted(mean(choices(data, k=5)) for i in range(20))
+ data = [41, 50, 29, 37, 81, 30, 73, 63, 20, 35, 68, 22, 60, 31, 95]
+ means = sorted(mean(choices(data, k=len(data))) for i in range(100))
print(f'The sample mean of {mean(data):.1f} has a 90% confidence '
- f'interval from {means[1]:.1f} to {means[-2]:.1f}')
+ f'interval from {means[5]:.1f} to {means[94]:.1f}')
Example of a `resampling permutation test
`_
@@ -446,7 +477,7 @@ between the effects of a drug versus a placebo::
placebo = [54, 51, 58, 44, 55, 52, 42, 47, 58, 46]
observed_diff = mean(drug) - mean(placebo)
- n = 10000
+ n = 10_000
count = 0
combined = drug + placebo
for i in range(n):
@@ -459,34 +490,31 @@ between the effects of a drug versus a placebo::
print(f'The one-sided p-value of {count / n:.4f} leads us to reject the null')
print(f'hypothesis that there is no difference between the drug and the placebo.')
-Simulation of arrival times and service deliveries in a single server queue::
+Simulation of arrival times and service deliveries for a multiserver queue::
+ from heapq import heappush, heappop
from random import expovariate, gauss
- from statistics import mean, median, stdev
+ from statistics import mean, quantiles
average_arrival_interval = 5.6
- average_service_time = 5.0
- stdev_service_time = 0.5
-
- num_waiting = 0
- arrivals = []
- starts = []
- arrival = service_end = 0.0
- for i in range(20000):
- if arrival <= service_end:
- num_waiting += 1
- arrival += expovariate(1.0 / average_arrival_interval)
- arrivals.append(arrival)
- else:
- num_waiting -= 1
- service_start = service_end if num_waiting else arrival
- service_time = gauss(average_service_time, stdev_service_time)
- service_end = service_start + service_time
- starts.append(service_start)
-
- waits = [start - arrival for arrival, start in zip(arrivals, starts)]
- print(f'Mean wait: {mean(waits):.1f}. Stdev wait: {stdev(waits):.1f}.')
- print(f'Median wait: {median(waits):.1f}. Max wait: {max(waits):.1f}.')
+ average_service_time = 15.0
+ stdev_service_time = 3.5
+ num_servers = 3
+
+ waits = []
+ arrival_time = 0.0
+ servers = [0.0] * num_servers # time when each server becomes available
+ for i in range(100_000):
+ arrival_time += expovariate(1.0 / average_arrival_interval)
+ next_server_available = heappop(servers)
+ wait = max(0.0, next_server_available - arrival_time)
+ waits.append(wait)
+ service_duration = gauss(average_service_time, stdev_service_time)
+ service_completed = arrival_time + wait + service_duration
+ heappush(servers, service_completed)
+
+ print(f'Mean wait: {mean(waits):.1f} Max wait: {max(waits):.1f}')
+ print('Quartiles:', [round(q, 1) for q in quantiles(waits)])
.. seealso::
diff --git a/Doc/library/re.rst b/Doc/library/re.rst
index 7c950bfd5b1fd5..9abbd8ba73616e 100644
--- a/Doc/library/re.rst
+++ b/Doc/library/re.rst
@@ -1617,10 +1617,14 @@ The text categories are specified with regular expressions. The technique is
to combine those into a single master regular expression and to loop over
successive matches::
- import collections
+ from typing import NamedTuple
import re
- Token = collections.namedtuple('Token', ['type', 'value', 'line', 'column'])
+ class Token(NamedTuple):
+ type: str
+ value: str
+ line: int
+ column: int
def tokenize(code):
keywords = {'IF', 'THEN', 'ENDIF', 'FOR', 'NEXT', 'GOSUB', 'RETURN'}
diff --git a/Doc/library/resource.rst b/Doc/library/resource.rst
index 3573da7ea2d716..e4eac43642d14d 100644
--- a/Doc/library/resource.rst
+++ b/Doc/library/resource.rst
@@ -78,6 +78,9 @@ this module for those platforms.
VxWorks only supports setting :data:`RLIMIT_NOFILE`.
+ .. audit-event:: resource.setrlimit resource,limits resource.setrlimit
+
+
.. function:: prlimit(pid, resource[, limits])
Combines :func:`setrlimit` and :func:`getrlimit` in one function and
@@ -94,6 +97,8 @@ this module for those platforms.
:exc:`PermissionError` when the user doesn't have ``CAP_SYS_RESOURCE`` for
the process.
+ .. audit-event:: resource.prlimit pid,resource,limits resource.prlimit
+
.. availability:: Linux 2.6.36 or later with glibc 2.13 or later.
.. versionadded:: 3.4
diff --git a/Doc/library/select.rst b/Doc/library/select.rst
index bb2809580d0401..a354187c266c7f 100644
--- a/Doc/library/select.rst
+++ b/Doc/library/select.rst
@@ -117,7 +117,7 @@ The module defines the following:
.. function:: select(rlist, wlist, xlist[, timeout])
This is a straightforward interface to the Unix :c:func:`select` system call.
- The first three arguments are sequences of 'waitable objects': either
+ The first three arguments are iterables of 'waitable objects': either
integers representing file descriptors or objects with a parameterless method
named :meth:`~io.IOBase.fileno` returning such an integer:
@@ -126,7 +126,7 @@ The module defines the following:
* *xlist*: wait for an "exceptional condition" (see the manual page for what
your system considers such a condition)
- Empty sequences are allowed, but acceptance of three empty sequences is
+ Empty iterables are allowed, but acceptance of three empty iterables is
platform-dependent. (It is known to work on Unix but not on Windows.) The
optional *timeout* argument specifies a time-out as a floating point number
in seconds. When the *timeout* argument is omitted the function blocks until
@@ -141,7 +141,7 @@ The module defines the following:
single: socket() (in module socket)
single: popen() (in module os)
- Among the acceptable object types in the sequences are Python :term:`file
+ Among the acceptable object types in the iterables are Python :term:`file
objects ` (e.g. ``sys.stdin``, or objects returned by
:func:`open` or :func:`os.popen`), socket objects returned by
:func:`socket.socket`. You may also define a :dfn:`wrapper` class yourself,
diff --git a/Doc/library/shlex.rst b/Doc/library/shlex.rst
index adc23da6d491b7..7f7f0c7f124ac5 100644
--- a/Doc/library/shlex.rst
+++ b/Doc/library/shlex.rst
@@ -36,6 +36,9 @@ The :mod:`shlex` module defines the following functions:
instance, passing ``None`` for *s* will read the string to split from
standard input.
+ .. deprecated:: 3.9
+ Passing ``None`` for *s* will raise an exception in future Python
+ versions.
.. function:: join(split_command)
diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst
index 59390d0e907eb9..c7c63e6f80844a 100644
--- a/Doc/library/shutil.rst
+++ b/Doc/library/shutil.rst
@@ -67,6 +67,8 @@ Directory and files operations
a new symbolic link will be created instead of copying the
file *src* points to.
+ .. audit-event:: shutil.copyfile src,dst shutil.copyfile
+
.. versionchanged:: 3.3
:exc:`IOError` used to be raised instead of :exc:`OSError`.
Added *follow_symlinks* argument.
@@ -101,6 +103,8 @@ Directory and files operations
:func:`copymode` cannot modify symbolic links on the local platform, and it
is asked to do so, it will do nothing and return.
+ .. audit-event:: shutil.copymode src,dst shutil.copymode
+
.. versionchanged:: 3.3
Added *follow_symlinks* argument.
@@ -146,6 +150,8 @@ Directory and files operations
Please see :data:`os.supports_follow_symlinks`
for more information.
+ .. audit-event:: shutil.copystat src,dst shutil.copystat
+
.. versionchanged:: 3.3
Added *follow_symlinks* argument and support for Linux extended attributes.
@@ -167,6 +173,10 @@ Directory and files operations
To preserve all file metadata from the original, use
:func:`~shutil.copy2` instead.
+ .. audit-event:: shutil.copyfile src,dst shutil.copy
+
+ .. audit-event:: shutil.copymode src,dst shutil.copy
+
.. versionchanged:: 3.3
Added *follow_symlinks* argument.
Now returns path to the newly created file.
@@ -194,6 +204,10 @@ Directory and files operations
Please see :func:`copystat` for more information
about platform support for modifying symbolic link metadata.
+ .. audit-event:: shutil.copyfile src,dst shutil.copy2
+
+ .. audit-event:: shutil.copystat src,dst shutil.copy2
+
.. versionchanged:: 3.3
Added *follow_symlinks* argument, try to copy extended
file system attributes too (currently Linux only).
@@ -342,6 +356,8 @@ Directory and files operations
*copy_function* allows the move to succeed when it is not possible to also
copy the metadata, at the expense of not copying any of the metadata.
+ .. audit-event:: shutil.move src,dst shutil.move
+
.. versionchanged:: 3.3
Added explicit symlink handling for foreign filesystems, thus adapting
it to the behavior of GNU's :program:`mv`.
@@ -381,6 +397,8 @@ Directory and files operations
See also :func:`os.chown`, the underlying function.
+ .. audit-event:: shutil.chown path,user,group shutil.chown
+
.. availability:: Unix.
.. versionadded:: 3.3
@@ -632,6 +650,8 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
registered for that extension. In case none is found,
a :exc:`ValueError` is raised.
+ .. audit-event:: shutil.unpack_archive filename,extract_dir,format shutil.unpack_archive
+
.. versionchanged:: 3.7
Accepts a :term:`path-like object` for *filename* and *extract_dir*.
diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst
index a79fc501352d4c..05b285ed110ec4 100644
--- a/Doc/library/signal.rst
+++ b/Doc/library/signal.rst
@@ -53,12 +53,12 @@ This has consequences:
Signals and threads
^^^^^^^^^^^^^^^^^^^
-Python signal handlers are always executed in the main Python thread,
+Python signal handlers are always executed in the main Python thread of the main interpreter,
even if the signal was received in another thread. This means that signals
can't be used as a means of inter-thread communication. You can use
the synchronization primitives from the :mod:`threading` module instead.
-Besides, only the main thread is allowed to set a new signal handler.
+Besides, only the main thread of the main interpreter is allowed to set a new signal handler.
Module contents
@@ -91,6 +91,110 @@ The variables defined in the :mod:`signal` module are:
signal.
+.. data:: SIGABRT
+
+ Abort signal from :manpage:`abort(3)`.
+
+.. data:: SIGALRM
+
+ Timer signal from :manpage:`alarm(2)`.
+
+ .. availability:: Unix.
+
+.. data:: SIGBREAK
+
+ Interrupt from keyboard (CTRL + BREAK).
+
+ .. availability:: Windows.
+
+.. data:: SIGBUS
+
+ Bus error (bad memory access).
+
+ .. availability:: Unix.
+
+.. data:: SIGCHLD
+
+ Child process stopped or terminated.
+
+ .. availability:: Windows.
+
+.. data:: SIGCLD
+
+ Alias to :data:`SIGCHLD`.
+
+.. data:: SIGCONT
+
+ Continue the process if it is currently stopped
+
+ .. availability:: Unix.
+
+.. data:: SIGFPE
+
+ Floating-point exception. For example, division by zero.
+
+ .. seealso::
+ :exc:`ZeroDivisionError` is raised when the second argument of a division
+ or modulo operation is zero.
+
+.. data:: SIGHUP
+
+ Hangup detected on controlling terminal or death of controlling process.
+
+ .. availability:: Unix.
+
+.. data:: SIGILL
+
+ Illegal instruction.
+
+.. data:: SIGINT
+
+ Interrupt from keyboard (CTRL + C).
+
+ Default action is to raise :exc:`KeyboardInterrupt`.
+
+.. data:: SIGKILL
+
+ Kill signal.
+
+ It cannot be caught, blocked, or ignored.
+
+ .. availability:: Unix.
+
+.. data:: SIGPIPE
+
+ Broken pipe: write to pipe with no readers.
+
+ Default action is to ignore the signal.
+
+ .. availability:: Unix.
+
+.. data:: SIGSEGV
+
+ Segmentation fault: invalid memory reference.
+
+.. data:: SIGTERM
+
+ Termination signal.
+
+.. data:: SIGUSR1
+
+ User-defined signal 1.
+
+ .. availability:: Unix.
+
+.. data:: SIGUSR2
+
+ User-defined signal 2.
+
+ .. availability:: Unix.
+
+.. data:: SIGWINCH
+
+ Window resize signal.
+
+ .. availability:: Unix.
+
.. data:: SIG*
All the signal numbers are defined symbolically. For example, the hangup signal
@@ -266,7 +370,7 @@ The :mod:`signal` module defines the following functions:
same process as the caller. The target thread can be executing any code
(Python or not). However, if the target thread is executing the Python
interpreter, the Python signal handlers will be :ref:`executed by the main
- thread `. Therefore, the only point of sending a
+ thread of the main interpreter `. Therefore, the only point of sending a
signal to a particular Python thread would be to force a running system call
to fail with :exc:`InterruptedError`.
@@ -277,6 +381,8 @@ The :mod:`signal` module defines the following functions:
If *signalnum* is 0, then no signal is sent, but error checking is still
performed; this can be used to check if the target thread is still running.
+ .. audit-event:: signal.pthread_kill thread_id,signalnum signal.pthread_kill
+
.. availability:: Unix. See the man page :manpage:`pthread_kill(3)` for further
information.
@@ -308,6 +414,8 @@ The :mod:`signal` module defines the following functions:
For example, ``signal.pthread_sigmask(signal.SIG_BLOCK, [])`` reads the
signal mask of the calling thread.
+ :data:`SIGKILL` and :data:`SIGSTOP` cannot be blocked.
+
.. availability:: Unix. See the man page :manpage:`sigprocmask(3)` and
:manpage:`pthread_sigmask(3)` for further information.
@@ -358,7 +466,8 @@ The :mod:`signal` module defines the following functions:
If not -1, *fd* must be non-blocking. It is up to the library to remove
any bytes from *fd* before calling poll or select again.
- When threads are enabled, this function can only be called from the main thread;
+ When threads are enabled, this function can only be called
+ from :ref:`the main thread of the main interpreter `;
attempting to call it from other threads will cause a :exc:`ValueError`
exception to be raised.
@@ -411,7 +520,8 @@ The :mod:`signal` module defines the following functions:
signal handler will be returned (see the description of :func:`getsignal`
above). (See the Unix man page :manpage:`signal(2)` for further information.)
- When threads are enabled, this function can only be called from the main thread;
+ When threads are enabled, this function can only be called
+ from :ref:`the main thread of the main interpreter `;
attempting to call it from other threads will cause a :exc:`ValueError`
exception to be raised.
diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst
index a88e358eae5fde..b3cc60357f554a 100644
--- a/Doc/library/smtplib.rst
+++ b/Doc/library/smtplib.rst
@@ -279,9 +279,10 @@ An :class:`SMTP` instance has the following methods:
response for ESMTP option and store them for use by :meth:`has_extn`.
Also sets several informational attributes: the message returned by
the server is stored as the :attr:`ehlo_resp` attribute, :attr:`does_esmtp`
- is set to true or false depending on whether the server supports ESMTP, and
- :attr:`esmtp_features` will be a dictionary containing the names of the
- SMTP service extensions this server supports, and their parameters (if any).
+ is set to ``True`` or ``False`` depending on whether the server supports
+ ESMTP, and :attr:`esmtp_features` will be a dictionary containing the names
+ of the SMTP service extensions this server supports, and their parameters
+ (if any).
Unless you wish to use :meth:`has_extn` before sending mail, it should not be
necessary to call this method explicitly. It will be implicitly called by
diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst
index 2cc946c519d405..d798c1a9d10a05 100755
--- a/Doc/library/socket.rst
+++ b/Doc/library/socket.rst
@@ -78,15 +78,15 @@ created. Socket addresses are represented as follows:
Python programs.
- For :const:`AF_INET6` address family, a four-tuple ``(host, port, flowinfo,
- scopeid)`` is used, where *flowinfo* and *scopeid* represent the ``sin6_flowinfo``
+ scope_id)`` is used, where *flowinfo* and *scope_id* represent the ``sin6_flowinfo``
and ``sin6_scope_id`` members in :const:`struct sockaddr_in6` in C. For
- :mod:`socket` module methods, *flowinfo* and *scopeid* can be omitted just for
- backward compatibility. Note, however, omission of *scopeid* can cause problems
+ :mod:`socket` module methods, *flowinfo* and *scope_id* can be omitted just for
+ backward compatibility. Note, however, omission of *scope_id* can cause problems
in manipulating scoped IPv6 addresses.
.. versionchanged:: 3.7
- For multicast addresses (with *scopeid* meaningful) *address* may not contain
- ``%scope`` (or ``zone id``) part. This information is superfluous and may
+ For multicast addresses (with *scope_id* meaningful) *address* may not contain
+ ``%scope_id`` (or ``zone id``) part. This information is superfluous and may
be safely omitted (recommended).
- :const:`AF_NETLINK` sockets are represented as pairs ``(pid, groups)``.
@@ -118,6 +118,10 @@ created. Socket addresses are represented as follows:
- :const:`CAN_ISOTP` protocol require a tuple ``(interface, rx_addr, tx_addr)``
where both additional parameters are unsigned long integer that represent a
CAN identifier (standard or extended).
+ - :const:`CAN_J1939` protocol require a tuple ``(interface, name, pgn, addr)``
+ where additional parameters are 64-bit unsigned integer representing the
+ ECU name, a 32-bit unsigned integer representing the Parameter Group Number
+ (PGN), and an 8-bit integer representing the address.
- A string or a tuple ``(id, unit)`` is used for the :const:`SYSPROTO_CONTROL`
protocol of the :const:`PF_SYSTEM` family. The string is the name of a
@@ -408,6 +412,17 @@ Constants
.. versionadded:: 3.5
+.. data:: CAN_RAW_JOIN_FILTERS
+
+ Joins the applied CAN filters such that only CAN frames that match all
+ given CAN filters are passed to user space.
+
+ This constant is documented in the Linux documentation.
+
+ .. availability:: Linux >= 4.1.
+
+ .. versionadded:: 3.9
+
.. data:: CAN_ISOTP
CAN_ISOTP, in the CAN protocol family, is the ISO-TP (ISO 15765-2) protocol.
@@ -417,6 +432,15 @@ Constants
.. versionadded:: 3.7
+.. data:: CAN_J1939
+
+ CAN_J1939, in the CAN protocol family, is the SAE J1939 protocol.
+ J1939 constants, documented in the Linux documentation.
+
+ .. availability:: Linux >= 5.4.
+
+ .. versionadded:: 3.9
+
.. data:: AF_PACKET
PF_PACKET
@@ -533,7 +557,8 @@ The following functions all create :ref:`socket objects `.
default), :const:`SOCK_DGRAM`, :const:`SOCK_RAW` or perhaps one of the other
``SOCK_`` constants. The protocol number is usually zero and may be omitted
or in the case where the address family is :const:`AF_CAN` the protocol
- should be one of :const:`CAN_RAW`, :const:`CAN_BCM` or :const:`CAN_ISOTP`.
+ should be one of :const:`CAN_RAW`, :const:`CAN_BCM`, :const:`CAN_ISOTP` or
+ :const:`CAN_J1939`.
If *fileno* is specified, the values for *family*, *type*, and *proto* are
auto-detected from the specified file descriptor. Auto-detection can be
@@ -577,6 +602,9 @@ The following functions all create :ref:`socket objects `.
``SOCK_NONBLOCK``, but ``sock.type`` will be set to
``socket.SOCK_STREAM``.
+ .. versionchanged:: 3.9
+ The CAN_J1939 protocol was added.
+
.. function:: socketpair([family[, type[, proto]]])
Build a pair of connected socket objects using the given address family, socket
@@ -738,7 +766,7 @@ The :mod:`socket` module also offers various network-related services:
:const:`AI_CANONNAME` is part of the *flags* argument; else *canonname*
will be empty. *sockaddr* is a tuple describing a socket address, whose
format depends on the returned *family* (a ``(address, port)`` 2-tuple for
- :const:`AF_INET`, a ``(address, port, flow info, scope id)`` 4-tuple for
+ :const:`AF_INET`, a ``(address, port, flowinfo, scope_id)`` 4-tuple for
:const:`AF_INET6`), and is meant to be passed to the :meth:`socket.connect`
method.
@@ -759,7 +787,7 @@ The :mod:`socket` module also offers various network-related services:
.. versionchanged:: 3.7
for IPv6 multicast addresses, string representing an address will not
- contain ``%scope`` part.
+ contain ``%scope_id`` part.
.. function:: getfqdn([name])
@@ -827,8 +855,8 @@ The :mod:`socket` module also offers various network-related services:
or numeric address representation in *host*. Similarly, *port* can contain a
string port name or a numeric port number.
- For IPv6 addresses, ``%scope`` is appended to the host part if *sockaddr*
- contains meaningful *scopeid*. Usually this happens for multicast addresses.
+ For IPv6 addresses, ``%scope_id`` is appended to the host part if *sockaddr*
+ contains meaningful *scope_id*. Usually this happens for multicast addresses.
For more information about *flags* you can consult :manpage:`getnameinfo(3)`.
@@ -1354,7 +1382,7 @@ to sockets.
.. versionchanged:: 3.7
For multicast IPv6 address, first item of *address* does not contain
- ``%scope`` part anymore. In order to get full IPv6 address use
+ ``%scope_id`` part anymore. In order to get full IPv6 address use
:func:`getnameinfo`.
.. method:: socket.recvmsg(bufsize[, ancbufsize[, flags]])
diff --git a/Doc/library/socketserver.rst b/Doc/library/socketserver.rst
index 7c8c8d52e03d95..232c0616d925dc 100644
--- a/Doc/library/socketserver.rst
+++ b/Doc/library/socketserver.rst
@@ -237,6 +237,8 @@ Server Objects
.. method:: shutdown()
Tell the :meth:`serve_forever` loop to stop and wait until it does.
+ :meth:`shutdown` must be called while :meth:`serve_forever` is running in a
+ different thread otherwise it will deadlock.
.. method:: server_close()
diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst
index 67ea2b1d776638..ccb82278bdaa13 100644
--- a/Doc/library/sqlite3.rst
+++ b/Doc/library/sqlite3.rst
@@ -165,9 +165,10 @@ Module functions and constants
that 'mytype' is the type of the column. It will try to find an entry of
'mytype' in the converters dictionary and then use the converter function found
there to return the value. The column name found in :attr:`Cursor.description`
- is only the first word of the column name, i. e. if you use something like
- ``'as "x [datetime]"'`` in your SQL, then we will parse out everything until the
- first blank for the column name: the column name would simply be "x".
+ does not include the type, i. e. if you use something like
+ ``'as "Expiration date [datetime]"'`` in your SQL, then we will parse out
+ everything until the first ``'['`` for the column name and strip
+ the preceeding space: the column name would simply be "Expiration date".
.. function:: connect(database[, timeout, detect_types, isolation_level, check_same_thread, factory, cached_statements, uri])
@@ -927,7 +928,7 @@ a class like this::
self.x, self.y = x, y
Now you want to store the point in a single SQLite column. First you'll have to
-choose one of the supported types first to be used for representing the point.
+choose one of the supported types to be used for representing the point.
Let's just use str and separate the coordinates using a semicolon. Then you need
to give your class a method ``__conform__(self, protocol)`` which must return
the converted value. The parameter *protocol* will be :class:`PrepareProtocol`.
diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst
index 3e5fcab22d0280..852091c02ec9a4 100644
--- a/Doc/library/ssl.rst
+++ b/Doc/library/ssl.rst
@@ -42,7 +42,7 @@ This module provides a class, :class:`ssl.SSLSocket`, which is derived from the
:class:`socket.socket` type, and provides a socket-like wrapper that also
encrypts and decrypts the data going over the socket with SSL. It supports
additional methods such as :meth:`getpeercert`, which retrieves the
-certificate of the other side of the connection, and :meth:`cipher`,which
+certificate of the other side of the connection, and :meth:`cipher`, which
retrieves the cipher being used for the secure connection.
For more sophisticated applications, the :class:`ssl.SSLContext` class
@@ -615,7 +615,7 @@ Constants
Possible value for :attr:`SSLContext.verify_flags`. In this mode, only the
peer cert is checked but none of the intermediate CA certificates. The mode
requires a valid CRL that is signed by the peer cert's issuer (its direct
- ancestor CA). If no proper CRL has has been loaded with
+ ancestor CA). If no proper CRL has been loaded with
:attr:`SSLContext.load_verify_locations`, validation will fail.
.. versionadded:: 3.4
@@ -2271,7 +2271,7 @@ Visual inspection shows that the certificate does identify the desired service
(('postalCode', '03894-4801'),),
(('countryName', 'US'),),
(('stateOrProvinceName', 'NH'),),
- (('localityName', 'Wolfeboro,'),),
+ (('localityName', 'Wolfeboro'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'www.python.org'),)),
'subjectAltName': (('DNS', 'www.python.org'),
@@ -2486,14 +2486,17 @@ provided.
- :meth:`~SSLSocket.read`
- :meth:`~SSLSocket.write`
- :meth:`~SSLSocket.getpeercert`
+ - :meth:`~SSLSocket.selected_alpn_protocol`
- :meth:`~SSLSocket.selected_npn_protocol`
- :meth:`~SSLSocket.cipher`
- :meth:`~SSLSocket.shared_ciphers`
- :meth:`~SSLSocket.compression`
- :meth:`~SSLSocket.pending`
- :meth:`~SSLSocket.do_handshake`
+ - :meth:`~SSLSocket.verify_client_post_handshake`
- :meth:`~SSLSocket.unwrap`
- :meth:`~SSLSocket.get_channel_binding`
+ - :meth:`~SSLSocket.version`
When compared to :class:`SSLSocket`, this object lacks the following
features:
diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst
index 026f4aa462d3d7..38a499ab37e890 100644
--- a/Doc/library/statistics.rst
+++ b/Doc/library/statistics.rst
@@ -696,6 +696,16 @@ of applications in statistics.
Set *n* to 100 for percentiles which gives the 99 cuts points that
separate the normal distribution into 100 equal sized groups.
+ .. method:: NormalDist.zscore(x)
+
+ Compute the
+ `Standard Score `_
+ describing *x* in terms of the number of standard deviations
+ above or below the mean of the normal distribution:
+ ``(x - mean) / stdev``.
+
+ .. versionadded:: 3.9
+
Instances of :class:`NormalDist` support addition, subtraction,
multiplication and division by a constant. These operations
are used for translation and scaling. For example:
diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst
index fd3401fd18a099..6a9fdcb38d24b7 100644
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -261,8 +261,10 @@ and imaginary parts.
Python fully supports mixed arithmetic: when a binary arithmetic operator has
operands of different numeric types, the operand with the "narrower" type is
widened to that of the other, where integer is narrower than floating point,
-which is narrower than complex. Comparisons between numbers of mixed type use
-the same rule. [2]_ The constructors :func:`int`, :func:`float`, and
+which is narrower than complex. A comparison between numbers of different types
+behaves as though the exact values of those numbers were being compared. [2]_
+
+The constructors :func:`int`, :func:`float`, and
:func:`complex` can be used to produce numbers of a specific type.
All numeric types (except complex) support the following operations (for priorities of
@@ -350,7 +352,7 @@ Notes:
The numeric literals accepted include the digits ``0`` to ``9`` or any
Unicode equivalent (code points with the ``Nd`` property).
- See http://www.unicode.org/Public/12.1.0/ucd/extracted/DerivedNumericType.txt
+ See https://www.unicode.org/Public/13.0.0/ucd/extracted/DerivedNumericType.txt
for a complete list of code points with the ``Nd`` property.
@@ -432,12 +434,10 @@ Notes:
Negative shift counts are illegal and cause a :exc:`ValueError` to be raised.
(2)
- A left shift by *n* bits is equivalent to multiplication by ``pow(2, n)``
- without overflow check.
+ A left shift by *n* bits is equivalent to multiplication by ``pow(2, n)``.
(3)
- A right shift by *n* bits is equivalent to division by ``pow(2, n)`` without
- overflow check.
+ A right shift by *n* bits is equivalent to floor division by ``pow(2, n)``.
(4)
Performing these calculations with at least one extra sign extension bit in
@@ -1547,6 +1547,33 @@ expression support in the :mod:`re` module).
interpreted as in slice notation.
+.. method:: str.removeprefix(prefix, /)
+
+ If the string starts with the *prefix* string, return
+ ``string[len(prefix):]``. Otherwise, return a copy of the original
+ string::
+
+ >>> 'TestHook'.removeprefix('Test')
+ 'Hook'
+ >>> 'BaseTestCase'.removeprefix('Test')
+ 'BaseTestCase'
+
+ .. versionadded:: 3.9
+
+.. method:: str.removesuffix(suffix, /)
+
+ If the string ends with the *suffix* string and that *suffix* is not empty,
+ return ``string[:-len(suffix)]``. Otherwise, return a copy of the
+ original string::
+
+ >>> 'MiscTests'.removesuffix('Tests')
+ 'Misc'
+ >>> 'TmpDirMixin'.removesuffix('Tests')
+ 'TmpDirMixin'
+
+ .. versionadded:: 3.9
+
+
.. method:: str.encode(encoding="utf-8", errors="strict")
Return an encoded version of the string as a bytes object. Default encoding
@@ -1829,6 +1856,14 @@ expression support in the :mod:`re` module).
>>> 'www.example.com'.lstrip('cmowz.')
'example.com'
+ See :meth:`str.removeprefix` for a method that will remove a single prefix
+ string rather than all of a set of characters. For example::
+
+ >>> 'Arthur: three!'.lstrip('Arthur: ')
+ 'ee!'
+ >>> 'Arthur: three!'.removeprefix('Arthur: ')
+ 'three!'
+
.. staticmethod:: str.maketrans(x[, y[, z]])
@@ -1909,6 +1944,13 @@ expression support in the :mod:`re` module).
>>> 'mississippi'.rstrip('ipz')
'mississ'
+ See :meth:`str.removesuffix` for a method that will remove a single suffix
+ string rather than all of a set of characters. For example::
+
+ >>> 'Monty Python'.rstrip(' Python')
+ 'M'
+ >>> 'Monty Python'.removesuffix(' Python')
+ 'Monty'
.. method:: str.split(sep=None, maxsplit=-1)
@@ -2416,7 +2458,7 @@ data and are closely related to string objects in a variety of other ways.
A reverse conversion function exists to transform a bytes object into its
hexadecimal representation.
- .. method:: hex()
+ .. method:: hex([sep[, bytes_per_sep]])
Return a string object containing two hexadecimal digits for each
byte in the instance.
@@ -2510,7 +2552,7 @@ objects.
A reverse conversion function exists to transform a bytearray object into its
hexadecimal representation.
- .. method:: hex()
+ .. method:: hex([sep[, bytes_per_sep]])
Return a string object containing two hexadecimal digits for each
byte in the instance.
@@ -2520,6 +2562,11 @@ objects.
.. versionadded:: 3.5
+ .. versionchanged:: 3.8
+ Similar to :meth:`bytes.hex`, :meth:`bytearray.hex` now supports
+ optional *sep* and *bytes_per_sep* parameters to insert separators
+ between bytes in the hex output.
+
Since bytearray objects are sequences of integers (akin to a list), for a
bytearray object *b*, ``b[0]`` will be an integer, while ``b[0:1]`` will be
a bytearray object of length 1. (This contrasts with text strings, where
@@ -2584,6 +2631,50 @@ arbitrary binary data.
Also accept an integer in the range 0 to 255 as the subsequence.
+.. method:: bytes.removeprefix(prefix, /)
+ bytearray.removeprefix(prefix, /)
+
+ If the binary data starts with the *prefix* string, return
+ ``bytes[len(prefix):]``. Otherwise, return a copy of the original
+ binary data::
+
+ >>> b'TestHook'.removeprefix(b'Test')
+ b'Hook'
+ >>> b'BaseTestCase'.removeprefix(b'Test')
+ b'BaseTestCase'
+
+ The *prefix* may be any :term:`bytes-like object`.
+
+ .. note::
+
+ The bytearray version of this method does *not* operate in place -
+ it always produces a new object, even if no changes were made.
+
+ .. versionadded:: 3.9
+
+
+.. method:: bytes.removesuffix(suffix, /)
+ bytearray.removesuffix(suffix, /)
+
+ If the binary data ends with the *suffix* string and that *suffix* is
+ not empty, return ``bytes[:-len(suffix)]``. Otherwise, return a copy of
+ the original binary data::
+
+ >>> b'MiscTests'.removesuffix(b'Tests')
+ b'Misc'
+ >>> b'TmpDirMixin'.removesuffix(b'Tests')
+ b'TmpDirMixin'
+
+ The *suffix* may be any :term:`bytes-like object`.
+
+ .. note::
+
+ The bytearray version of this method does *not* operate in place -
+ it always produces a new object, even if no changes were made.
+
+ .. versionadded:: 3.9
+
+
.. method:: bytes.decode(encoding="utf-8", errors="strict")
bytearray.decode(encoding="utf-8", errors="strict")
@@ -2834,7 +2925,14 @@ produce new objects.
b'example.com'
The binary sequence of byte values to remove may be any
- :term:`bytes-like object`.
+ :term:`bytes-like object`. See :meth:`~bytes.removeprefix` for a method
+ that will remove a single prefix string rather than all of a set of
+ characters. For example::
+
+ >>> b'Arthur: three!'.lstrip(b'Arthur: ')
+ b'ee!'
+ >>> b'Arthur: three!'.removeprefix(b'Arthur: ')
+ b'three!'
.. note::
@@ -2883,7 +2981,14 @@ produce new objects.
b'mississ'
The binary sequence of byte values to remove may be any
- :term:`bytes-like object`.
+ :term:`bytes-like object`. See :meth:`~bytes.removesuffix` for a method
+ that will remove a single suffix string rather than all of a set of
+ characters. For example::
+
+ >>> b'Monty Python'.rstrip(b' Python')
+ b'M'
+ >>> b'Monty Python'.removesuffix(b' Python')
+ b'Monty'
.. note::
@@ -3673,7 +3778,7 @@ copying.
in-memory Fortran order is preserved. For non-contiguous views, the
data is converted to C first. *order=None* is the same as *order='C'*.
- .. method:: hex()
+ .. method:: hex([sep[, bytes_per_sep]])
Return a string object containing two hexadecimal digits for each
byte in the buffer. ::
@@ -3684,6 +3789,11 @@ copying.
.. versionadded:: 3.5
+ .. versionchanged:: 3.8
+ Similar to :meth:`bytes.hex`, :meth:`memoryview.hex` now supports
+ optional *sep* and *bytes_per_sep* parameters to insert separators
+ between bytes in the hex output.
+
.. method:: tolist()
Return the data in the buffer as a list of elements. ::
@@ -4382,6 +4492,22 @@ pairs within braces, for example: ``{'jack': 4098, 'sjoerd': 4127}`` or ``{4098:
>>> d.values() == d.values()
False
+ .. describe:: d | other
+
+ Create a new dictionary with the merged keys and values of *d* and
+ *other*, which must both be dictionaries. The values of *other* take
+ priority when *d* and *other* share keys.
+
+ .. versionadded:: 3.9
+
+ .. describe:: d |= other
+
+ Update the dictionary *d* with keys and values from *other*, which may be
+ either a :term:`mapping` or an :term:`iterable` of key/value pairs. The
+ values of *other* take priority when *d* and *other* share keys.
+
+ .. versionadded:: 3.9
+
Dictionaries compare equal if and only if they have the same ``(key,
value)`` pairs (regardless of ordering). Order comparisons ('<', '<=', '>=', '>') raise
:exc:`TypeError`.
diff --git a/Doc/library/string.rst b/Doc/library/string.rst
index e2983db1ac8321..fa906f799c1082 100644
--- a/Doc/library/string.rst
+++ b/Doc/library/string.rst
@@ -302,9 +302,9 @@ specification is to be interpreted.
Most built-in types implement the following options for format specifications,
although some of the formatting options are only supported by the numeric types.
-A general convention is that an empty format string (``""``) produces
+A general convention is that an empty format specification produces
the same result as if you had called :func:`str` on the value. A
-non-empty format string typically modifies the result.
+non-empty format specification typically modifies the result.
The general form of a *standard format specifier* is:
@@ -415,8 +415,9 @@ error.
.. versionchanged:: 3.6
Added the ``'_'`` option (see also :pep:`515`).
-*width* is a decimal integer defining the minimum field width. If not
-specified, then the field width will be determined by the content.
+*width* is a decimal integer defining the minimum total field width,
+including any prefixes, separators, and other formatting characters.
+If not specified, then the field width will be determined by the content.
When no explicit alignment is given, preceding the *width* field by a zero
(``'0'``) character enables
diff --git a/Doc/library/struct.rst b/Doc/library/struct.rst
index 1f90e3d1ba8557..856b6da8bb255d 100644
--- a/Doc/library/struct.rst
+++ b/Doc/library/struct.rst
@@ -259,7 +259,7 @@ Notes:
called to convert the argument to an integer before packing.
.. versionchanged:: 3.2
- Use of the :meth:`__index__` method for non-integers is new in 3.2.
+ Added use of the :meth:`__index__` method for non-integers.
(3)
The ``'n'`` and ``'N'`` conversion codes are only available for the native
@@ -312,7 +312,7 @@ When packing a value ``x`` using one of the integer formats (``'b'``,
then :exc:`struct.error` is raised.
.. versionchanged:: 3.1
- In 3.0, some of the integer formats wrapped out-of-range values and
+ Previously, some of the integer formats wrapped out-of-range values and
raised :exc:`DeprecationWarning` instead of :exc:`struct.error`.
The ``'p'`` format character encodes a "Pascal string", meaning a short
diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst
index 74857480360dc9..5988bd35e72b12 100644
--- a/Doc/library/subprocess.rst
+++ b/Doc/library/subprocess.rst
@@ -40,7 +40,7 @@ compatibility with older versions, see the :ref:`call-function-trio` section.
.. function:: run(args, *, stdin=None, input=None, stdout=None, stderr=None,\
capture_output=False, shell=False, cwd=None, timeout=None, \
check=False, encoding=None, errors=None, text=None, env=None, \
- universal_newlines=None)
+ universal_newlines=None, **other_popen_kwargs)
Run the command described by *args*. Wait for command to complete, then
return a :class:`CompletedProcess` instance.
@@ -356,14 +356,20 @@ functions.
arguments for additional differences from the default behavior. Unless
otherwise stated, it is recommended to pass *args* as a sequence.
+ An example of passing some arguments to an external program
+ as a sequence is::
+
+ Popen(["/usr/bin/git", "commit", "-m", "Fixes a bug."])
+
On POSIX, if *args* is a string, the string is interpreted as the name or
path of the program to execute. However, this can only be done if not
passing arguments to the program.
.. note::
- :meth:`shlex.split` can be useful when determining the correct
- tokenization for *args*, especially in complex cases::
+ It may not be obvious how to break a shell command into a sequence of arguments,
+ especially in complex cases. :meth:`shlex.split` can illustrate how to
+ determine the correct tokenization for *args*::
>>> import shlex, subprocess
>>> command_line = input()
@@ -785,14 +791,14 @@ Instances of the :class:`Popen` class have the following methods:
.. method:: Popen.terminate()
- Stop the child. On Posix OSs the method sends SIGTERM to the
+ Stop the child. On POSIX OSs the method sends SIGTERM to the
child. On Windows the Win32 API function :c:func:`TerminateProcess` is called
to stop the child.
.. method:: Popen.kill()
- Kills the child. On Posix OSs the function sends SIGKILL to the child.
+ Kills the child. On POSIX OSs the function sends SIGKILL to the child.
On Windows :meth:`kill` is an alias for :meth:`terminate`.
@@ -1079,7 +1085,8 @@ Prior to Python 3.5, these three functions comprised the high level API to
subprocess. You can now use :func:`run` in many cases, but lots of existing code
calls these functions.
-.. function:: call(args, *, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, timeout=None)
+.. function:: call(args, *, stdin=None, stdout=None, stderr=None, \
+ shell=False, cwd=None, timeout=None, **other_popen_kwargs)
Run the command described by *args*. Wait for command to complete, then
return the :attr:`~Popen.returncode` attribute.
@@ -1105,7 +1112,9 @@ calls these functions.
.. versionchanged:: 3.3
*timeout* was added.
-.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, shell=False, cwd=None, timeout=None)
+.. function:: check_call(args, *, stdin=None, stdout=None, stderr=None, \
+ shell=False, cwd=None, timeout=None, \
+ **other_popen_kwargs)
Run command with arguments. Wait for command to complete. If the return
code was zero then return, otherwise raise :exc:`CalledProcessError`. The
@@ -1136,7 +1145,8 @@ calls these functions.
.. function:: check_output(args, *, stdin=None, stderr=None, shell=False, \
cwd=None, encoding=None, errors=None, \
- universal_newlines=None, timeout=None, text=None)
+ universal_newlines=None, timeout=None, text=None, \
+ **other_popen_kwargs)
Run command with arguments and return its output.
diff --git a/Doc/library/symtable.rst b/Doc/library/symtable.rst
index 7c6ac4dccf8b76..3efdecb5af7102 100644
--- a/Doc/library/symtable.rst
+++ b/Doc/library/symtable.rst
@@ -67,10 +67,6 @@ Examining Symbol Tables
Return ``True`` if the block has nested namespaces within it. These can
be obtained with :meth:`get_children`.
- .. method:: has_exec()
-
- Return ``True`` if the block uses ``exec``.
-
.. method:: get_identifiers()
Return a list of names of symbols in this table.
diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst
index d28b3565c1c633..880f252f84aa0b 100644
--- a/Doc/library/sys.rst
+++ b/Doc/library/sys.rst
@@ -343,6 +343,8 @@ always available.
.. versionadded:: 3.7
__breakpointhook__
+ .. versionadded:: 3.8
+ __unraisablehook__
.. function:: exc_info()
@@ -1136,6 +1138,28 @@ always available.
system's identity.
+.. data:: platlibdir
+
+ Name of the platform-specific library directory. It is used to build the
+ path of standard library and the paths of installed extension modules.
+
+ It is equal to ``"lib"`` on most platforms. On Fedora and SuSE, it is equal
+ to ``"lib64"`` on 64-bit platforms which gives the following ``sys.path``
+ paths (where ``X.Y`` is the Python ``major.minor`` version):
+
+ * ``/usr/lib64/pythonX.Y/``:
+ Standard library (like ``os.py`` of the :mod:`os` module)
+ * ``/usr/lib64/pythonX.Y/lib-dynload/``:
+ C extension modules of the standard library (like the :mod:`errno` module,
+ the exact filename is platform specific)
+ * ``/usr/lib/pythonX.Y/site-packages/`` (always use ``lib``, not
+ :data:`sys.platlibdir`): Third-party modules
+ * ``/usr/lib64/pythonX.Y/site-packages/``:
+ C extension modules of third-party packages
+
+ .. versionadded:: 3.9
+
+
.. data:: prefix
A string giving the site-specific directory prefix where the platform
diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst
index b5a1da80c686d4..78a1dfce9ae05c 100644
--- a/Doc/library/sysconfig.rst
+++ b/Doc/library/sysconfig.rst
@@ -74,12 +74,12 @@ places.
Python currently supports seven schemes:
-- *posix_prefix*: scheme for Posix platforms like Linux or Mac OS X. This is
+- *posix_prefix*: scheme for POSIX platforms like Linux or Mac OS X. This is
the default scheme used when Python or a component is installed.
-- *posix_home*: scheme for Posix platforms used when a *home* option is used
+- *posix_home*: scheme for POSIX platforms used when a *home* option is used
upon installation. This scheme is used when a component is installed through
Distutils with a specific home prefix.
-- *posix_user*: scheme for Posix platforms used when a component is installed
+- *posix_user*: scheme for POSIX platforms used when a component is installed
through Distutils and the *user* option is used. This scheme defines paths
located under the user home directory.
- *nt*: scheme for NT platforms like Windows.
diff --git a/Doc/library/syslog.rst b/Doc/library/syslog.rst
index 7151527ce57a54..d264a3340c98b0 100644
--- a/Doc/library/syslog.rst
+++ b/Doc/library/syslog.rst
@@ -31,6 +31,8 @@ The module defines the following functions:
If :func:`openlog` has not been called prior to the call to :func:`syslog`,
``openlog()`` will be called with no arguments.
+ .. audit-event:: syslog.syslog priority,message syslog.syslog
+
.. function:: openlog([ident[, logoption[, facility]]])
@@ -45,6 +47,8 @@ The module defines the following functions:
keyword argument (default is :const:`LOG_USER`) sets the default facility for
messages which do not have a facility explicitly encoded.
+ .. audit-event:: syslog.openlog ident,logoption,facility syslog.openlog
+
.. versionchanged:: 3.2
In previous versions, keyword arguments were not allowed, and *ident* was
required. The default for *ident* was dependent on the system libraries,
@@ -60,6 +64,8 @@ The module defines the following functions:
:func:`openlog` hasn't already been called), and *ident* and other
:func:`openlog` parameters are reset to defaults.
+ .. audit-event:: syslog.closelog "" syslog.closelog
+
.. function:: setlogmask(maskpri)
@@ -70,6 +76,8 @@ The module defines the following functions:
``LOG_UPTO(pri)`` calculates the mask for all priorities up to and including
*pri*.
+ .. audit-event:: syslog.setlogmask maskpri syslog.setlogmask
+
The module defines the following constants:
Priority levels (high to low):
diff --git a/Doc/library/test.rst b/Doc/library/test.rst
index 54ad620d7dae9d..7580fb5e9b174f 100644
--- a/Doc/library/test.rst
+++ b/Doc/library/test.rst
@@ -263,11 +263,6 @@ The :mod:`test.support` module defines the following constants:
Set to a non-ASCII name for a temporary file.
-.. data:: TESTFN_ENCODING
-
- Set to :func:`sys.getfilesystemencoding`.
-
-
.. data:: TESTFN_UNENCODABLE
Set to a filename (str type) that should not be able to be encoded by file
@@ -314,7 +309,7 @@ The :mod:`test.support` module defines the following constants:
Usually, a timeout using :data:`INTERNET_TIMEOUT` should not mark a test as
failed, but skip the test instead: see
- :func:`~test.support.transient_internet`.
+ :func:`~test.support.socket_helper.transient_internet`.
Its default value is 1 minute.
@@ -348,11 +343,6 @@ The :mod:`test.support` module defines the following constants:
:data:`SHORT_TIMEOUT`.
-.. data:: IPV6_ENABLED
-
- Set to ``True`` if IPV6 is enabled on this host, ``False`` otherwise.
-
-
.. data:: SAVEDCWD
Set to :func:`os.getcwd`.
@@ -764,12 +754,6 @@ The :mod:`test.support` module defines the following functions:
A context manager that temporarily sets the process umask.
-.. function:: transient_internet(resource_name, *, timeout=30.0, errnos=())
-
- A context manager that raises :exc:`ResourceDenied` when various issues
- with the internet connection manifest themselves as exceptions.
-
-
.. function:: disable_faulthandler()
A context manager that replaces ``sys.stderr`` with ``sys.__stderr__``.
@@ -825,16 +809,28 @@ The :mod:`test.support` module defines the following functions:
target of the "as" clause, if there is one.
-.. function:: wait_threads_exit(timeout=60.0)
+.. function:: print_warning(msg)
- Context manager to wait until all threads created in the ``with`` statement
- exit.
+ Print a warning into :data:`sys.__stderr__`. Format the message as:
+ ``f"Warning -- {msg}"``. If *msg* is made of multiple lines, add
+ ``"Warning -- "`` prefix to each line.
+ .. versionadded:: 3.9
-.. function:: start_threads(threads, unlock=None)
- Context manager to start *threads*. It attempts to join the threads upon
- exit.
+.. function:: wait_process(pid, *, exitcode, timeout=None)
+
+ Wait until process *pid* completes and check that the process exit code is
+ *exitcode*.
+
+ Raise an :exc:`AssertionError` if the process exit code is not equal to
+ *exitcode*.
+
+ If the process runs longer than *timeout* seconds (:data:`SHORT_TIMEOUT` by
+ default), kill the process and raise an :exc:`AssertionError`. The timeout
+ feature is not available on Windows.
+
+ .. versionadded:: 3.9
.. function:: calcobjsize(fmt)
@@ -877,12 +873,6 @@ The :mod:`test.support` module defines the following functions:
A decorator for running tests that require support for xattr.
-.. decorator:: skip_unless_bind_unix_socket
-
- A decorator for running tests that require a functional bind() for Unix
- sockets.
-
-
.. decorator:: anticipate_failure(condition)
A decorator to conditionally mark tests with
@@ -981,11 +971,6 @@ The :mod:`test.support` module defines the following functions:
the trace function.
-.. decorator:: reap_threads(func)
-
- Decorator to ensure the threads are cleaned up even if the test fails.
-
-
.. decorator:: bigmemtest(size, memuse, dry_run=True)
Decorator for bigmem tests.
@@ -1103,23 +1088,6 @@ The :mod:`test.support` module defines the following functions:
preserve internal cache.
-.. function:: threading_setup()
-
- Return current thread count and copy of dangling threads.
-
-
-.. function:: threading_cleanup(*original_values)
-
- Cleanup up threads not specified in *original_values*. Designed to emit
- a warning if a test leaves running threads in the background.
-
-
-.. function:: join_thread(thread, timeout=30.0)
-
- Join a *thread* within *timeout*. Raise an :exc:`AssertionError` if thread
- is still alive after *timeout* seconds.
-
-
.. function:: reap_children()
Use this at the end of ``test_main`` whenever sub-processes are started.
@@ -1133,64 +1101,6 @@ The :mod:`test.support` module defines the following functions:
is raised.
-.. function:: bind_port(sock, host=HOST)
-
- Bind the socket to a free port and return the port number. Relies on
- ephemeral ports in order to ensure we are using an unbound port. This is
- important as many tests may be running simultaneously, especially in a
- buildbot environment. This method raises an exception if the
- ``sock.family`` is :const:`~socket.AF_INET` and ``sock.type`` is
- :const:`~socket.SOCK_STREAM`, and the socket has
- :const:`~socket.SO_REUSEADDR` or :const:`~socket.SO_REUSEPORT` set on it.
- Tests should never set these socket options for TCP/IP sockets.
- The only case for setting these options is testing multicasting via
- multiple UDP sockets.
-
- Additionally, if the :const:`~socket.SO_EXCLUSIVEADDRUSE` socket option is
- available (i.e. on Windows), it will be set on the socket. This will
- prevent anyone else from binding to our host/port for the duration of the
- test.
-
-
-.. function:: bind_unix_socket(sock, addr)
-
- Bind a unix socket, raising :exc:`unittest.SkipTest` if
- :exc:`PermissionError` is raised.
-
-
-.. function:: catch_threading_exception()
-
- Context manager catching :class:`threading.Thread` exception using
- :func:`threading.excepthook`.
-
- Attributes set when an exception is catched:
-
- * ``exc_type``
- * ``exc_value``
- * ``exc_traceback``
- * ``thread``
-
- See :func:`threading.excepthook` documentation.
-
- These attributes are deleted at the context manager exit.
-
- Usage::
-
- with support.catch_threading_exception() as cm:
- # code spawning a thread which raises an exception
- ...
-
- # check the thread exception, use cm attributes:
- # exc_type, exc_value, exc_traceback, thread
- ...
-
- # exc_type, exc_value, exc_traceback, thread attributes of cm no longer
- # exists at this point
- # (to avoid reference cycles)
-
- .. versionadded:: 3.8
-
-
.. function:: catch_unraisable_exception()
Context manager catching unraisable exception using
@@ -1219,29 +1129,6 @@ The :mod:`test.support` module defines the following functions:
.. versionadded:: 3.8
-.. function:: find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM)
-
- Returns an unused port that should be suitable for binding. This is
- achieved by creating a temporary socket with the same family and type as
- the ``sock`` parameter (default is :const:`~socket.AF_INET`,
- :const:`~socket.SOCK_STREAM`),
- and binding it to the specified host address (defaults to ``0.0.0.0``)
- with the port set to 0, eliciting an unused ephemeral port from the OS.
- The temporary socket is then closed and deleted, and the ephemeral port is
- returned.
-
- Either this method or :func:`bind_port` should be used for any tests
- where a server socket needs to be bound to a particular port for the
- duration of the test.
- Which one to use depends on whether the calling code is creating a Python
- socket, or if an unused port needs to be provided in a constructor
- or passed to an external program (i.e. the ``-accept`` argument to
- openssl's s_server mode). Always prefer :func:`bind_port` over
- :func:`find_unused_port` where possible. Using a hard coded port is
- discouraged since it can make multiple instances of the test impossible to
- run simultaneously, which is a problem for buildbots.
-
-
.. function:: load_package_tests(pkg_dir, loader, standard_tests, pattern)
Generic implementation of the :mod:`unittest` ``load_tests`` protocol for
@@ -1445,11 +1332,6 @@ The :mod:`test.support` module defines the following classes:
Run *test* and return the result.
-.. class:: TestHandler(logging.handlers.BufferingHandler)
-
- Class for logging support.
-
-
.. class:: FakePath(path)
Simple :term:`path-like object`. It implements the :meth:`__fspath__`
@@ -1457,6 +1339,84 @@ The :mod:`test.support` module defines the following classes:
it will be raised in :meth:`!__fspath__`.
+:mod:`test.support.socket_helper` --- Utilities for socket tests
+================================================================
+
+.. module:: test.support.socket_helper
+ :synopsis: Support for socket tests.
+
+
+The :mod:`test.support.socket_helper` module provides support for socket tests.
+
+.. versionadded:: 3.9
+
+
+.. data:: IPV6_ENABLED
+
+ Set to ``True`` if IPv6 is enabled on this host, ``False`` otherwise.
+
+
+.. function:: find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM)
+
+ Returns an unused port that should be suitable for binding. This is
+ achieved by creating a temporary socket with the same family and type as
+ the ``sock`` parameter (default is :const:`~socket.AF_INET`,
+ :const:`~socket.SOCK_STREAM`),
+ and binding it to the specified host address (defaults to ``0.0.0.0``)
+ with the port set to 0, eliciting an unused ephemeral port from the OS.
+ The temporary socket is then closed and deleted, and the ephemeral port is
+ returned.
+
+ Either this method or :func:`bind_port` should be used for any tests
+ where a server socket needs to be bound to a particular port for the
+ duration of the test.
+ Which one to use depends on whether the calling code is creating a Python
+ socket, or if an unused port needs to be provided in a constructor
+ or passed to an external program (i.e. the ``-accept`` argument to
+ openssl's s_server mode). Always prefer :func:`bind_port` over
+ :func:`find_unused_port` where possible. Using a hard coded port is
+ discouraged since it can make multiple instances of the test impossible to
+ run simultaneously, which is a problem for buildbots.
+
+
+.. function:: bind_port(sock, host=HOST)
+
+ Bind the socket to a free port and return the port number. Relies on
+ ephemeral ports in order to ensure we are using an unbound port. This is
+ important as many tests may be running simultaneously, especially in a
+ buildbot environment. This method raises an exception if the
+ ``sock.family`` is :const:`~socket.AF_INET` and ``sock.type`` is
+ :const:`~socket.SOCK_STREAM`, and the socket has
+ :const:`~socket.SO_REUSEADDR` or :const:`~socket.SO_REUSEPORT` set on it.
+ Tests should never set these socket options for TCP/IP sockets.
+ The only case for setting these options is testing multicasting via
+ multiple UDP sockets.
+
+ Additionally, if the :const:`~socket.SO_EXCLUSIVEADDRUSE` socket option is
+ available (i.e. on Windows), it will be set on the socket. This will
+ prevent anyone else from binding to our host/port for the duration of the
+ test.
+
+
+.. function:: bind_unix_socket(sock, addr)
+
+ Bind a unix socket, raising :exc:`unittest.SkipTest` if
+ :exc:`PermissionError` is raised.
+
+
+.. decorator:: skip_unless_bind_unix_socket
+
+ A decorator for running tests that require a functional ``bind()`` for Unix
+ sockets.
+
+
+.. function:: transient_internet(resource_name, *, timeout=30.0, errnos=())
+
+ A context manager that raises :exc:`~test.support.ResourceDenied` when
+ various issues with the internet connection manifest themselves as
+ exceptions.
+
+
:mod:`test.support.script_helper` --- Utilities for the Python execution tests
==============================================================================
@@ -1596,3 +1556,81 @@ The module defines the following class:
.. method:: BytecodeTestCase.assertNotInBytecode(x, opname, argval=_UNSPECIFIED)
Throws :exc:`AssertionError` if *opname* is found.
+
+
+:mod:`test.support.threading_helper` --- Utilities for threading tests
+======================================================================
+
+.. module:: test.support.threading_helper
+ :synopsis: Support for threading tests.
+
+The :mod:`test.support.threading_helper` module provides support for threading tests.
+
+.. versionadded:: 3.10
+
+
+.. function:: join_thread(thread, timeout=None)
+
+ Join a *thread* within *timeout*. Raise an :exc:`AssertionError` if thread
+ is still alive after *timeout* seconds.
+
+
+.. decorator:: reap_threads(func)
+
+ Decorator to ensure the threads are cleaned up even if the test fails.
+
+
+.. function:: start_threads(threads, unlock=None)
+
+ Context manager to start *threads*. It attempts to join the threads upon
+ exit.
+
+
+.. function:: threading_cleanup(*original_values)
+
+ Cleanup up threads not specified in *original_values*. Designed to emit
+ a warning if a test leaves running threads in the background.
+
+
+.. function:: threading_setup()
+
+ Return current thread count and copy of dangling threads.
+
+
+.. function:: wait_threads_exit(timeout=None)
+
+ Context manager to wait until all threads created in the ``with`` statement
+ exit.
+
+
+.. function:: catch_threading_exception()
+
+ Context manager catching :class:`threading.Thread` exception using
+ :func:`threading.excepthook`.
+
+ Attributes set when an exception is catched:
+
+ * ``exc_type``
+ * ``exc_value``
+ * ``exc_traceback``
+ * ``thread``
+
+ See :func:`threading.excepthook` documentation.
+
+ These attributes are deleted at the context manager exit.
+
+ Usage::
+
+ with threading_helper.catch_threading_exception() as cm:
+ # code spawning a thread which raises an exception
+ ...
+
+ # check the thread exception, use cm attributes:
+ # exc_type, exc_value, exc_traceback, thread
+ ...
+
+ # exc_type, exc_value, exc_traceback, thread attributes of cm no longer
+ # exists at this point
+ # (to avoid reference cycles)
+
+ .. versionadded:: 3.8
diff --git a/Doc/library/textwrap.rst b/Doc/library/textwrap.rst
index 0f11ef401569de..16837104b6cebc 100644
--- a/Doc/library/textwrap.rst
+++ b/Doc/library/textwrap.rst
@@ -239,7 +239,7 @@ hyphenated words; only then will long words be broken if necessary, unless
:attr:`fix_sentence_endings` is false by default.
Since the sentence detection algorithm relies on ``string.lowercase`` for
- the definition of "lowercase letter," and a convention of using two spaces
+ the definition of "lowercase letter", and a convention of using two spaces
after a period to separate sentences on the same line, it is specific to
English-language texts.
diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst
index 1e902941427960..3a446adfac8c5a 100644
--- a/Doc/library/threading.rst
+++ b/Doc/library/threading.rst
@@ -280,8 +280,6 @@ since it is impossible to detect the termination of alien threads.
base class constructor (``Thread.__init__()``) before doing anything else to
the thread.
- Daemon threads must not be used in subinterpreters.
-
.. versionchanged:: 3.3
Added the *daemon* argument.
@@ -296,12 +294,6 @@ since it is impossible to detect the termination of alien threads.
This method will raise a :exc:`RuntimeError` if called more than once
on the same thread object.
- Raise a :exc:`RuntimeError` if the thread is a daemon thread and the
- method is called from a subinterpreter.
-
- .. versionchanged:: 3.9
- In a subinterpreter, spawning a daemon thread now raises an exception.
-
.. method:: run()
Method representing the thread's activity.
diff --git a/Doc/library/time.rst b/Doc/library/time.rst
index 6842e9075e1a68..cff6320b526db5 100644
--- a/Doc/library/time.rst
+++ b/Doc/library/time.rst
@@ -774,6 +774,16 @@ These constants are used as parameters for :func:`clock_getres` and
.. versionadded:: 3.7
+.. data:: CLOCK_TAI
+
+ `International Atomic Time `_
+
+ The system must have a current leap second table in order for this to give
+ the correct answer. PTP or NTP software can maintain a leap second table.
+
+ .. availability:: Linux.
+
+ .. versionadded:: 3.9
.. data:: CLOCK_THREAD_CPUTIME_ID
@@ -805,7 +815,6 @@ These constants are used as parameters for :func:`clock_getres` and
.. versionadded:: 3.8
-
The following constant is the only parameter that can be sent to
:func:`clock_settime`.
diff --git a/Doc/library/timeit.rst b/Doc/library/timeit.rst
index ef7a4e40be6590..46fa62c15fc2ef 100644
--- a/Doc/library/timeit.rst
+++ b/Doc/library/timeit.rst
@@ -251,7 +251,8 @@ quotes and using leading spaces. Multiple :option:`-s` options are treated
similarly.
If :option:`-n` is not given, a suitable number of loops is calculated by trying
-successive powers of 10 until the total time is at least 0.2 seconds.
+increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the total
+time is at least 0.2 seconds.
:func:`default_timer` measurements can be affected by other programs running on
the same machine, so the best thing to do when accurate timing is necessary is
diff --git a/Doc/library/token.rst b/Doc/library/token.rst
index 1777929be739d1..dab8f0fa9b64fc 100644
--- a/Doc/library/token.rst
+++ b/Doc/library/token.rst
@@ -87,7 +87,7 @@ the :mod:`tokenize` module.
now tokenized as :data:`NAME` tokens.
.. versionchanged:: 3.8
- Added :data:`TYPE_COMMENT`.
+ Added :data:`TYPE_COMMENT`, :data:`TYPE_IGNORE`, :data:`COLONEQUAL`.
Added :data:`AWAIT` and :data:`ASYNC` tokens back (they're needed
to support parsing older Python versions for :func:`ast.parse` with
``feature_version`` set to 6 or lower).
diff --git a/Doc/library/tokenize.rst b/Doc/library/tokenize.rst
index 96778f23f8f062..11f569df2e7cde 100644
--- a/Doc/library/tokenize.rst
+++ b/Doc/library/tokenize.rst
@@ -13,7 +13,7 @@
The :mod:`tokenize` module provides a lexical scanner for Python source code,
implemented in Python. The scanner in this module returns comments as tokens
-as well, making it useful for implementing "pretty-printers," including
+as well, making it useful for implementing "pretty-printers", including
colorizers for on-screen displays.
To simplify token stream handling, all :ref:`operator ` and
diff --git a/Doc/library/tracemalloc.rst b/Doc/library/tracemalloc.rst
index e423470036558f..fba1caab455d73 100644
--- a/Doc/library/tracemalloc.rst
+++ b/Doc/library/tracemalloc.rst
@@ -202,10 +202,8 @@ ignoring ```` and ```` files::
print("Top %s lines" % limit)
for index, stat in enumerate(top_stats[:limit], 1):
frame = stat.traceback[0]
- # replace "/path/to/module/file.py" with "module/file.py"
- filename = os.sep.join(frame.filename.split(os.sep)[-2:])
print("#%s: %s:%s: %.1f KiB"
- % (index, filename, frame.lineno, stat.size / 1024))
+ % (index, frame.filename, frame.lineno, stat.size / 1024))
line = linecache.getline(frame.filename, frame.lineno).strip()
if line:
print(' %s' % line)
@@ -251,6 +249,47 @@ Example of output of the Python test suite::
See :meth:`Snapshot.statistics` for more options.
+Record the current and peak size of all traced memory blocks
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The following code computes two sums like ``0 + 1 + 2 + ...`` inefficiently, by
+creating a list of those numbers. This list consumes a lot of memory
+temporarily. We can use :func:`get_traced_memory` and :func:`reset_peak` to
+observe the small memory usage after the sum is computed as well as the peak
+memory usage during the computations::
+
+ import tracemalloc
+
+ tracemalloc.start()
+
+ # Example code: compute a sum with a large temporary list
+ large_sum = sum(list(range(100000)))
+
+ first_size, first_peak = tracemalloc.get_traced_memory()
+
+ tracemalloc.reset_peak()
+
+ # Example code: compute a sum with a small temporary list
+ small_sum = sum(list(range(1000)))
+
+ second_size, second_peak = tracemalloc.get_traced_memory()
+
+ print(f"{first_size=}, {first_peak=}")
+ print(f"{second_size=}, {second_peak=}")
+
+Output::
+
+ first_size=664, first_peak=3592984
+ second_size=804, second_peak=29704
+
+Using :func:`reset_peak` ensured we could accurately record the peak during the
+computation of ``small_sum``, even though it is much smaller than the overall
+peak size of memory blocks since the :func:`start` call. Without the call to
+:func:`reset_peak`, ``second_peak`` would still be the peak from the
+computation ``large_sum`` (that is, equal to ``first_peak``). In this case,
+both peaks are much higher than the final memory usage, and which suggests we
+could optimise (by removing the unnecessary call to :class:`list`, and writing
+``sum(range(...))``).
API
---
@@ -291,6 +330,24 @@ Functions
:mod:`tracemalloc` module as a tuple: ``(current: int, peak: int)``.
+.. function:: reset_peak()
+
+ Set the peak size of memory blocks traced by the :mod:`tracemalloc` module
+ to the current size.
+
+ Do nothing if the :mod:`tracemalloc` module is not tracing memory
+ allocations.
+
+ This function only modifies the recorded peak size, and does not modify or
+ clear any traces, unlike :func:`clear_traces`. Snapshots taken with
+ :func:`take_snapshot` before a call to :func:`reset_peak` can be
+ meaningfully compared to snapshots taken after the call.
+
+ See also :func:`get_traced_memory`.
+
+ .. versionadded:: 3.10
+
+
.. function:: get_tracemalloc_memory()
Get the memory usage in bytes of the :mod:`tracemalloc` module used to store
diff --git a/Doc/library/types.rst b/Doc/library/types.rst
index 3529c2b0edb896..79acdf4499afd2 100644
--- a/Doc/library/types.rst
+++ b/Doc/library/types.rst
@@ -282,6 +282,11 @@ Standard names are defined for the following types:
.. versionadded:: 3.3
+ .. versionchanged:: 3.9
+
+ Updated to support the new union (``|``) operator from :pep:`584`, which
+ simply delegates to the underlying mapping.
+
.. describe:: key in proxy
Return ``True`` if the underlying mapping has a key *key*, else
@@ -324,6 +329,12 @@ Standard names are defined for the following types:
Return a new view of the underlying mapping's values.
+ .. describe:: reversed(proxy)
+
+ Return a reverse iterator over the keys of the underlying mapping.
+
+ .. versionadded:: 3.9
+
Additional Utility Classes and Functions
----------------------------------------
@@ -344,8 +355,7 @@ Additional Utility Classes and Functions
self.__dict__.update(kwargs)
def __repr__(self):
- keys = sorted(self.__dict__)
- items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys)
+ items = (f"{k}={v!r}" for k, v in self.__dict__.items())
return "{}({})".format(type(self).__name__, ", ".join(items))
def __eq__(self, other):
@@ -357,6 +367,9 @@ Additional Utility Classes and Functions
.. versionadded:: 3.3
+ .. versionchanged:: 3.9
+ Attribute order in the repr changed from alphabetical to insertion (like
+ ``dict``).
.. function:: DynamicClassAttribute(fget=None, fset=None, fdel=None, doc=None)
@@ -368,7 +381,7 @@ Additional Utility Classes and Functions
class's __getattr__ method; this is done by raising AttributeError.
This allows one to have properties active on an instance, and have virtual
- attributes on the class with the same name (see Enum for an example).
+ attributes on the class with the same name (see :class:`enum.Enum` for an example).
.. versionadded:: 3.4
diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst
index d3bab94c6dd409..e85b6d697f79d4 100644
--- a/Doc/library/typing.rst
+++ b/Doc/library/typing.rst
@@ -959,14 +959,15 @@ The module defines the following classes, functions and decorators:
.. versionchanged:: 3.6.1
Added support for default values, methods, and docstrings.
- .. versionchanged:: 3.8
- Deprecated the ``_field_types`` attribute in favor of the more
- standard ``__annotations__`` attribute which has the same information.
-
.. versionchanged:: 3.8
The ``_field_types`` and ``__annotations__`` attributes are
now regular dictionaries instead of instances of ``OrderedDict``.
+ .. versionchanged:: 3.9
+ Removed the ``_field_types`` attribute in favor of the more
+ standard ``__annotations__`` attribute which has the same information.
+
+
.. class:: TypedDict(dict)
A simple typed namespace. At runtime it is equivalent to
@@ -996,8 +997,20 @@ The module defines the following classes, functions and decorators:
Point2D = TypedDict('Point2D', x=int, y=int, label=str)
Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
- See :pep:`589` for more examples and detailed rules of using ``TypedDict``
- with type checkers.
+ By default, all keys must be present in a TypedDict. It is possible
+ to override this by specifying totality.
+ Usage::
+
+ class point2D(TypedDict, total=False):
+ x: int
+ y: int
+
+ This means that a point2D TypedDict can have any of the keys omitted. A type
+ checker is only expected to support a literal False or True as the value of
+ the total argument. True is the default, and makes all items defined in the
+ class body be required.
+
+ See :pep:`589` for more examples and detailed rules of using ``TypedDict``.
.. versionadded:: 3.8
@@ -1008,9 +1021,9 @@ The module defines the following classes, functions and decorators:
``List[ForwardRef("SomeClass")]``. This class should not be instantiated by
a user, but may be used by introspection tools.
-.. function:: NewType(typ)
+.. function:: NewType(name, tp)
- A helper function to indicate a distinct types to a typechecker,
+ A helper function to indicate a distinct type to a typechecker,
see :ref:`distinct`. At runtime it returns a function that returns
its argument. Usage::
diff --git a/Doc/library/unicodedata.rst b/Doc/library/unicodedata.rst
index 225384cf391e99..dade3f265b56a7 100644
--- a/Doc/library/unicodedata.rst
+++ b/Doc/library/unicodedata.rst
@@ -17,8 +17,8 @@
This module provides access to the Unicode Character Database (UCD) which
defines character properties for all Unicode characters. The data contained in
-this database is compiled from the `UCD version 12.1.0
-`_.
+this database is compiled from the `UCD version 13.0.0
+`_.
The module uses the same names and symbols as defined by Unicode
Standard Annex #44, `"Unicode Character Database"
@@ -175,6 +175,6 @@ Examples:
.. rubric:: Footnotes
-.. [#] http://www.unicode.org/Public/12.1.0/ucd/NameAliases.txt
+.. [#] https://www.unicode.org/Public/13.0.0/ucd/NameAliases.txt
-.. [#] http://www.unicode.org/Public/12.1.0/ucd/NamedSequences.txt
+.. [#] https://www.unicode.org/Public/13.0.0/ucd/NamedSequences.txt
diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst
index 515bdd060a198c..3643d1ad96ed11 100644
--- a/Doc/library/unittest.mock.rst
+++ b/Doc/library/unittest.mock.rst
@@ -1327,8 +1327,7 @@ patch
.. note::
- :func:`patch` is straightforward to use. The key is to do the patching in the
- right namespace. See the section `where to patch`_.
+ The key is to do the patching in the right namespace. See the section `where to patch`_.
.. function:: patch(target, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs)
diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst
index e2e4f2cdc220aa..b2e16cf331e036 100644
--- a/Doc/library/unittest.rst
+++ b/Doc/library/unittest.rst
@@ -910,10 +910,10 @@ Test cases
.. versionadded:: 3.1
- .. method:: assertIn(first, second, msg=None)
- assertNotIn(first, second, msg=None)
+ .. method:: assertIn(member, container, msg=None)
+ assertNotIn(member, container, msg=None)
- Test that *first* is (or is not) in *second*.
+ Test that *member* is (or is not) in *container*.
.. versionadded:: 3.1
diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst
index 03712c1f4a6eea..288ce14d36f016 100644
--- a/Doc/library/urllib.request.rst
+++ b/Doc/library/urllib.request.rst
@@ -1235,7 +1235,7 @@ Here is an example of doing a ``PUT`` request using :class:`Request`::
import urllib.request
DATA = b'some data'
- req = urllib.request.Request(url='http://localhost:8080', data=DATA,method='PUT')
+ req = urllib.request.Request(url='http://localhost:8080', data=DATA, method='PUT')
with urllib.request.urlopen(req) as f:
pass
print(f.status)
diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst
index d778486b0a5d94..5d4a36481f1dcc 100644
--- a/Doc/library/venv.rst
+++ b/Doc/library/venv.rst
@@ -27,7 +27,7 @@ See :pep:`405` for more information about Python virtual environments.
.. seealso::
`Python Packaging User Guide: Creating and using virtual environments
- `__
+ `__
Creating virtual environments
@@ -249,7 +249,8 @@ creation according to their needs, the :class:`EnvBuilder` class.
There is also a module-level convenience function:
.. function:: create(env_dir, system_site_packages=False, clear=False, \
- symlinks=False, with_pip=False, prompt=None)
+ symlinks=False, with_pip=False, prompt=None, \
+ upgrade_deps=False)
Create an :class:`EnvBuilder` with the given keyword arguments, and call its
:meth:`~EnvBuilder.create` method with the *env_dir* argument.
@@ -262,6 +263,9 @@ There is also a module-level convenience function:
.. versionchanged:: 3.6
Added the ``prompt`` parameter
+ .. versionchanged:: 3.9
+ Added the ``upgrade_deps`` parameter
+
An example of extending ``EnvBuilder``
--------------------------------------
diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst
index c3519e45beb6b1..12eb985c344359 100644
--- a/Doc/library/weakref.rst
+++ b/Doc/library/weakref.rst
@@ -171,6 +171,9 @@ Extension types can easily be made to support weak references; see
performed by the program during iteration may cause items in the
dictionary to vanish "by magic" (as a side effect of garbage collection).
+ .. versionchanged:: 3.9
+ Added support for ``|`` and ``|=`` operators, specified in :pep:`584`.
+
:class:`WeakKeyDictionary` objects have an additional method that
exposes the internal references directly. The references are not guaranteed to
be "live" at the time they are used, so the result of calling the references
@@ -197,6 +200,9 @@ than needed.
by the program during iteration may cause items in the dictionary to vanish "by
magic" (as a side effect of garbage collection).
+ .. versionchanged:: 3.9
+ Added support for ``|`` and ``|=`` operators, as specified in :pep:`584`.
+
:class:`WeakValueDictionary` objects have an additional method that has the
same issues as the :meth:`keyrefs` method of :class:`WeakKeyDictionary`
objects.
@@ -327,12 +333,6 @@ objects.
types.
-.. exception:: ReferenceError
-
- Exception raised when a proxy object is used but the underlying object has been
- collected. This is the same as the standard :exc:`ReferenceError` exception.
-
-
.. seealso::
:pep:`205` - Weak References
diff --git a/Doc/library/xml.dom.minidom.rst b/Doc/library/xml.dom.minidom.rst
index 8711242d95d741..2c78cd939243a8 100644
--- a/Doc/library/xml.dom.minidom.rst
+++ b/Doc/library/xml.dom.minidom.rst
@@ -132,7 +132,8 @@ module documentation. This section lists the differences between the API and
... # Work with dom.
-.. method:: Node.writexml(writer, indent="", addindent="", newl="")
+.. method:: Node.writexml(writer, indent="", addindent="", newl="",
+ encoding=None, standalone=None)
Write XML to the writer object. The writer receives texts but not bytes as input,
it should have a :meth:`write` method which matches that of the file object
@@ -144,11 +145,18 @@ module documentation. This section lists the differences between the API and
For the :class:`Document` node, an additional keyword argument *encoding* can
be used to specify the encoding field of the XML header.
+ Silimarly, explicitly stating the *standalone* argument causes the
+ standalone document declarations to be added to the prologue of the XML
+ document.
+ If the value is set to `True`, `standalone="yes"` is added,
+ otherwise it is set to `"no"`.
+ Not stating the argument will omit the declaration from the document.
+
.. versionchanged:: 3.8
The :meth:`writexml` method now preserves the attribute order specified
by the user.
-.. method:: Node.toxml(encoding=None)
+.. method:: Node.toxml(encoding=None, standalone=None)
Return a string or byte string containing the XML represented by
the DOM node.
@@ -160,11 +168,14 @@ module documentation. This section lists the differences between the API and
encoding. Encoding this string in an encoding other than UTF-8 is
likely incorrect, since UTF-8 is the default encoding of XML.
+ The *standalone* argument behaves exactly as in :meth:`writexml`.
+
.. versionchanged:: 3.8
The :meth:`toxml` method now preserves the attribute order specified
by the user.
-.. method:: Node.toprettyxml(indent="\\t", newl="\\n", encoding=None)
+.. method:: Node.toprettyxml(indent="\\t", newl="\\n", encoding=None,
+ standalone=None)
Return a pretty-printed version of the document. *indent* specifies the
indentation string and defaults to a tabulator; *newl* specifies the string
@@ -173,6 +184,8 @@ module documentation. This section lists the differences between the API and
The *encoding* argument behaves like the corresponding argument of
:meth:`toxml`.
+ The *standalone* argument behaves exactly as in :meth:`writexml`.
+
.. versionchanged:: 3.8
The :meth:`toprettyxml` method now preserves the attribute order specified
by the user.
diff --git a/Doc/library/zipapp.rst b/Doc/library/zipapp.rst
index 728315251e0824..fb40a2b3e964e4 100644
--- a/Doc/library/zipapp.rst
+++ b/Doc/library/zipapp.rst
@@ -198,7 +198,7 @@ Pack up a directory into an archive, and run it.
The same can be done using the :func:`create_archive` function::
>>> import zipapp
- >>> zipapp.create_archive('myapp.pyz', 'myapp')
+ >>> zipapp.create_archive('myapp', 'myapp.pyz')
To make the application directly executable on POSIX, specify an interpreter
to use.
diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst
index e8a2530fb8c171..7126d8bd703f62 100644
--- a/Doc/library/zipfile.rst
+++ b/Doc/library/zipfile.rst
@@ -489,10 +489,20 @@ Path objects are traversable using the ``/`` operator.
The final path component.
-.. method:: Path.open(*, **)
-
- Invoke :meth:`ZipFile.open` on the current path. Accepts
- the same arguments as :meth:`ZipFile.open`.
+.. method:: Path.open(mode='r', *, pwd, **)
+
+ Invoke :meth:`ZipFile.open` on the current path.
+ Allows opening for read or write, text or binary
+ through supported modes: 'r', 'w', 'rb', 'wb'.
+ Positional and keyword arguments are passed through to
+ :class:`io.TextIOWrapper` when opened as text and
+ ignored otherwise.
+ ``pwd`` is the ``pwd`` parameter to
+ :meth:`ZipFile.open`.
+
+ .. versionchanged:: 3.9
+ Added support for text and binary modes for open. Default
+ mode is now text.
.. method:: Path.iterdir()
diff --git a/Doc/library/zoneinfo.rst b/Doc/library/zoneinfo.rst
new file mode 100644
index 00000000000000..3a4c12a73acd72
--- /dev/null
+++ b/Doc/library/zoneinfo.rst
@@ -0,0 +1,413 @@
+:mod:`zoneinfo` --- IANA time zone support
+==========================================
+
+.. module:: zoneinfo
+ :synopsis: IANA time zone support
+
+.. versionadded:: 3.9
+
+.. moduleauthor:: Paul Ganssle
+.. sectionauthor:: Paul Ganssle
+
+--------------
+
+The :mod:`zoneinfo` module provides a concrete time zone implementation to
+support the IANA time zone database as originally specified in :pep:`615`. By
+default, :mod:`zoneinfo` uses the system's time zone data if available; if no
+system time zone data is available, the library will fall back to using the
+first-party `tzdata`_ package available on PyPI.
+
+.. seealso::
+
+ Module: :mod:`datetime`
+ Provides the :class:`~datetime.time` and :class:`~datetime.datetime`
+ types with which the :class:`ZoneInfo` class is designed to be used.
+
+ Package `tzdata`_
+ First-party package maintained by the CPython core developers to supply
+ time zone data via PyPI.
+
+
+Using ``ZoneInfo``
+------------------
+
+:class:`ZoneInfo` is a concrete implementation of the :class:`datetime.tzinfo`
+abstract base class, and is intended to be attached to ``tzinfo``, either via
+the constructor, the :meth:`datetime.replace `
+method or :meth:`datetime.astimezone `::
+
+ >>> from zoneinfo import ZoneInfo
+ >>> from datetime import datetime, timedelta
+
+ >>> dt = datetime(2020, 10, 31, 12, tzinfo=ZoneInfo("America/Los_Angeles"))
+ >>> print(dt)
+ 2020-10-31 12:00:00-07:00
+
+ >>> dt.tzname()
+ 'PDT'
+
+Datetimes constructed in this way are compatible with datetime arithmetic and
+handle daylight saving time transitions with no further intervention::
+
+ >>> dt_add = dt + timedelta(days=1)
+
+ >>> print(dt_add)
+ 2020-11-01 12:00:00-08:00
+
+ >>> dt_add.tzname()
+ 'PST'
+
+These time zones also support the :attr:`~datetime.datetime.fold` attribute
+introduced in :pep:`495`. During offset transitions which induce ambiguous
+times (such as a daylight saving time to standard time transition), the offset
+from *before* the transition is used when ``fold=0``, and the offset *after*
+the transition is used when ``fold=1``, for example::
+
+ >>> dt = datetime(2020, 11, 1, 1, tzinfo=ZoneInfo("America/Los_Angeles"))
+ >>> print(dt)
+ 2020-11-01 01:00:00-07:00
+
+ >>> print(dt.replace(fold=1))
+ 2020-11-01 01:00:00-08:00
+
+When converting from another time zone, the fold will be set to the correct
+value::
+
+ >>> from datetime import timezone
+ >>> LOS_ANGELES = ZoneInfo("America/Los_Angeles")
+ >>> dt_utc = datetime(2020, 11, 1, 8, tzinfo=timezone.utc)
+
+ >>> # Before the PDT -> PST transition
+ >>> print(dt_utc.astimezone(LOS_ANGELES))
+ 2020-11-01 01:00:00-07:00
+
+ >>> # After the PDT -> PST transition
+ >>> print((dt_utc + timedelta(hours=1)).astimezone(LOS_ANGELES))
+ 2020-11-01 01:00:00-08:00
+
+Data sources
+------------
+
+The ``zoneinfo`` module does not directly provide time zone data, and instead
+pulls time zone information from the system time zone database or the
+first-party PyPI package `tzdata`_, if available. Some systems, including
+notably Windows systems, do not have an IANA database available, and so for
+projects targeting cross-platform compatibility that require time zone data, it
+is recommended to declare a dependency on tzdata. If neither system data nor
+tzdata are available, all calls to :class:`ZoneInfo` will raise
+:exc:`ZoneInfoNotFoundError`.
+
+.. _zoneinfo_data_configuration:
+
+Configuring the data sources
+****************************
+
+When ``ZoneInfo(key)`` is called, the constructor first searches the
+directories specified in :data:`TZPATH` for a file matching ``key``, and on
+failure looks for a match in the tzdata package. This behavior can be
+configured in three ways:
+
+1. The default :data:`TZPATH` when not otherwise specified can be configured at
+ :ref:`compile time `.
+2. :data:`TZPATH` can be configured using :ref:`an environment variable
+ `.
+3. At :ref:`runtime `, the search path can be
+ manipulated using the :func:`reset_tzpath` function.
+
+.. _zoneinfo_data_compile_time_config:
+
+Compile-time configuration
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The default :data:`TZPATH` includes several common deployment locations for the
+time zone database (except on Windows, where there are no "well-known"
+locations for time zone data). On POSIX systems, downstream distributors and
+those building Python from source who know where their system
+time zone data is deployed may change the default time zone path by specifying
+the compile-time option ``TZPATH`` (or, more likely, the ``configure`` flag
+``--with-tzpath``), which should be a string delimited by :data:`os.pathsep`.
+
+On all platforms, the configured value is available as the ``TZPATH`` key in
+:func:`sysconfig.get_config_var`.
+
+.. _zoneinfo_data_environment_var:
+
+Environment configuration
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+When initializing :data:`TZPATH` (either at import time or whenever
+:func:`reset_tzpath` is called with no arguments), the ``zoneinfo`` module will
+use the environment variable ``PYTHONTZPATH``, if it exists, to set the search
+path.
+
+.. envvar:: PYTHONTZPATH
+
+ This is an :data:`os.pathsep`-separated string containing the time zone
+ search path to use. It must consist of only absolute rather than relative
+ paths. Relative components specified in ``PYTHONTZPATH`` will not be used,
+ but otherwise the behavior when a relative path is specified is
+ implementation-defined; CPython will raise :exc:`InvalidTZPathWarning`, but
+ other implementations are free to silently ignore the erroneous component
+ or raise an exception.
+
+To set the system to ignore the system data and use the tzdata package
+instead, set ``PYTHONTZPATH=""``.
+
+.. _zoneinfo_data_runtime_config:
+
+Runtime configuration
+^^^^^^^^^^^^^^^^^^^^^
+
+The TZ search path can also be configured at runtime using the
+:func:`reset_tzpath` function. This is generally not an advisable operation,
+though it is reasonable to use it in test functions that require the use of a
+specific time zone path (or require disabling access to the system time zones).
+
+
+The ``ZoneInfo`` class
+----------------------
+
+.. class:: ZoneInfo(key)
+
+ A concrete :class:`datetime.tzinfo` subclass that represents an IANA time
+ zone specified by the string ``key``. Calls to the primary constructor will
+ always return objects that compare identically; put another way, barring
+ cache invalidation via :meth:`ZoneInfo.clear_cache`, for all values of
+ ``key``, the following assertion will always be true:
+
+ .. code-block:: python
+
+ a = ZoneInfo(key)
+ b = ZoneInfo(key)
+ assert a is b
+
+ ``key`` must be in the form of a relative, normalized POSIX path, with no
+ up-level references. The constructor will raise :exc:`ValueError` if a
+ non-conforming key is passed.
+
+ If no file matching ``key`` is found, the constructor will raise
+ :exc:`ZoneInfoNotFoundError`.
+
+
+The ``ZoneInfo`` class has two alternate constructors:
+
+.. classmethod:: ZoneInfo.from_file(fobj, /, key=None)
+
+ Constructs a ``ZoneInfo`` object from a file-like object returning bytes
+ (e.g. a file opened in binary mode or an :class:`io.BytesIO` object).
+ Unlike the primary constructor, this always constructs a new object.
+
+ The ``key`` parameter sets the name of the zone for the purposes of
+ :py:meth:`~object.__str__` and :py:meth:`~object.__repr__`.
+
+ Objects created via this constructor cannot be pickled (see `pickling`_).
+
+.. classmethod:: ZoneInfo.no_cache(key)
+
+ An alternate constructor that bypasses the constructor's cache. It is
+ identical to the primary constructor, but returns a new object on each
+ call. This is most likely to be useful for testing or demonstration
+ purposes, but it can also be used to create a system with a different cache
+ invalidation strategy.
+
+ Objects created via this constructor will also bypass the cache of a
+ deserializing process when unpickled.
+
+ .. TODO: Add "See `cache_behavior`_" reference when that section is ready.
+
+ .. caution::
+
+ Using this constructor may change the semantics of your datetimes in
+ surprising ways, only use it if you know that you need to.
+
+The following class methods are also available:
+
+.. classmethod:: ZoneInfo.clear_cache(*, only_keys=None)
+
+ A method for invalidating the cache on the ``ZoneInfo`` class. If no
+ arguments are passed, all caches are invalidated and the next call to
+ the primary constructor for each key will return a new instance.
+
+ If an iterable of key names is passed to the ``only_keys`` parameter, only
+ the specified keys will be removed from the cache. Keys passed to
+ ``only_keys`` but not found in the cache are ignored.
+
+ .. TODO: Add "See `cache_behavior`_" reference when that section is ready.
+
+ .. warning::
+
+ Invoking this function may change the semantics of datetimes using
+ ``ZoneInfo`` in surprising ways; this modifies process-wide global state
+ and thus may have wide-ranging effects. Only use it if you know that you
+ need to.
+
+The class has one attribute:
+
+.. attribute:: ZoneInfo.key
+
+ This is a read-only :term:`attribute` that returns the value of ``key``
+ passed to the constructor, which should be a lookup key in the IANA time
+ zone database (e.g. ``America/New_York``, ``Europe/Paris`` or
+ ``Asia/Tokyo``).
+
+ For zones constructed from file without specifying a ``key`` parameter,
+ this will be set to ``None``.
+
+ .. note::
+
+ Although it is a somewhat common practice to expose these to end users,
+ these values are designed to be primary keys for representing the
+ relevant zones and not necessarily user-facing elements. Projects like
+ CLDR (the Unicode Common Locale Data Repository) can be used to get
+ more user-friendly strings from these keys.
+
+String representations
+**********************
+
+The string representation returned when calling :py:class:`str` on a
+:class:`ZoneInfo` object defaults to using the :attr:`ZoneInfo.key` attribute (see
+the note on usage in the attribute documentation)::
+
+ >>> zone = ZoneInfo("Pacific/Kwajalein")
+ >>> str(zone)
+ 'Pacific/Kwajalein'
+
+ >>> dt = datetime(2020, 4, 1, 3, 15, tzinfo=zone)
+ >>> f"{dt.isoformat()} [{dt.tzinfo}]"
+ '2020-04-01T03:15:00+12:00 [Pacific/Kwajalein]'
+
+For objects constructed from a file without specifying a ``key`` parameter,
+``str`` falls back to calling :func:`repr`. ``ZoneInfo``'s ``repr`` is
+implementation-defined and not necessarily stable between versions, but it is
+guaranteed not to be a valid ``ZoneInfo`` key.
+
+.. _pickling:
+
+Pickle serialization
+********************
+
+Rather than serializing all transition data, ``ZoneInfo`` objects are
+serialized by key, and ``ZoneInfo`` objects constructed from files (even those
+with a value for ``key`` specified) cannot be pickled.
+
+The behavior of a ``ZoneInfo`` file depends on how it was constructed:
+
+1. ``ZoneInfo(key)``: When constructed with the primary constructor, a
+ ``ZoneInfo`` object is serialized by key, and when deserialized, the
+ deserializing process uses the primary and thus it is expected that these
+ are expected to be the same object as other references to the same time
+ zone. For example, if ``europe_berlin_pkl`` is a string containing a pickle
+ constructed from ``ZoneInfo("Europe/Berlin")``, one would expect the
+ following behavior:
+
+ .. code-block:: pycon
+
+ >>> a = ZoneInfo("Europe/Berlin")
+ >>> b = pickle.loads(europe_berlin_pkl)
+ >>> a is b
+ True
+
+2. ``ZoneInfo.no_cache(key)``: When constructed from the cache-bypassing
+ constructor, the ``ZoneInfo`` object is also serialized by key, but when
+ deserialized, the deserializing process uses the cache bypassing
+ constructor. If ``europe_berlin_pkl_nc`` is a string containing a pickle
+ constructed from ``ZoneInfo.no_cache("Europe/Berlin")``, one would expect
+ the following behavior:
+
+ .. code-block:: pycon
+
+ >>> a = ZoneInfo("Europe/Berlin")
+ >>> b = pickle.loads(europe_berlin_pkl_nc)
+ >>> a is b
+ False
+
+3. ``ZoneInfo.from_file(fobj, /, key=None)``: When constructed from a file, the
+ ``ZoneInfo`` object raises an exception on pickling. If an end user wants to
+ pickle a ``ZoneInfo`` constructed from a file, it is recommended that they
+ use a wrapper type or a custom serialization function: either serializing by
+ key or storing the contents of the file object and serializing that.
+
+This method of serialization requires that the time zone data for the required
+key be available on both the serializing and deserializing side, similar to the
+way that references to classes and functions are expected to exist in both the
+serializing and deserializing environments. It also means that no guarantees
+are made about the consistency of results when unpickling a ``ZoneInfo``
+pickled in an environment with a different version of the time zone data.
+
+Functions
+---------
+
+.. function:: available_timezones()
+
+ Get a set containing all the valid keys for IANA time zones available
+ anywhere on the time zone path. This is recalculated on every call to the
+ function.
+
+ This function only includes canonical zone names and does not include
+ "special" zones such as those under the ``posix/`` and ``right/``
+ directories, or the ``posixrules`` zone.
+
+ .. caution::
+
+ This function may open a large number of files, as the best way to
+ determine if a file on the time zone path is a valid time zone is to
+ read the "magic string" at the beginning.
+
+ .. note::
+
+ These values are not designed to be exposed to end-users; for user
+ facing elements, applications should use something like CLDR (the
+ Unicode Common Locale Data Repository) to get more user-friendly
+ strings. See also the cautionary note on :attr:`ZoneInfo.key`.
+
+.. function:: reset_tzpath(to=None)
+
+ Sets or resets the time zone search path (:data:`TZPATH`) for the module.
+ When called with no arguments, :data:`TZPATH` is set to the default value.
+
+ Calling ``reset_tzpath`` will not invalidate the :class:`ZoneInfo` cache,
+ and so calls to the primary ``ZoneInfo`` constructor will only use the new
+ ``TZPATH`` in the case of a cache miss.
+
+ The ``to`` parameter must be a :term:`sequence` of strings or
+ :class:`os.PathLike` and not a string, all of which must be absolute paths.
+ :exc:`ValueError` will be raised if something other than an absolute path
+ is passed.
+
+Globals
+-------
+
+.. data:: TZPATH
+
+ A read-only sequence representing the time zone search path -- when
+ constructing a ``ZoneInfo`` from a key, the key is joined to each entry in
+ the ``TZPATH``, and the first file found is used.
+
+ ``TZPATH`` may contain only absolute paths, never relative paths,
+ regardless of how it is configured.
+
+ The object that ``zoneinfo.TZPATH`` points to may change in response to a
+ call to :func:`reset_tzpath`, so it is recommended to use
+ ``zoneinfo.TZPATH`` rather than importing ``TZPATH`` from ``zoneinfo`` or
+ assigning a long-lived variable to ``zoneinfo.TZPATH``.
+
+ For more information on configuring the time zone search path, see
+ :ref:`zoneinfo_data_configuration`.
+
+Exceptions and warnings
+-----------------------
+
+.. exception:: ZoneInfoNotFoundError
+
+ Raised when construction of a :class:`ZoneInfo` object fails because the
+ specified key could not be found on the system. This is a subclass of
+ :exc:`KeyError`.
+
+.. exception:: InvalidTZPathWarning
+
+ Raised when :envvar:`PYTHONTZPATH` contains an invalid component that will
+ be filtered out, such as a relative path.
+
+.. Links and references:
+
+.. _tzdata: https://pypi.org/project/tzdata/
diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst
index e2f44a55b180b1..c14e7c79fe14cf 100644
--- a/Doc/reference/compound_stmts.rst
+++ b/Doc/reference/compound_stmts.rst
@@ -90,8 +90,8 @@ The :keyword:`!if` statement
The :keyword:`if` statement is used for conditional execution:
.. productionlist::
- if_stmt: "if" `expression` ":" `suite`
- : ("elif" `expression` ":" `suite`)*
+ if_stmt: "if" `assignment_expression` ":" `suite`
+ : ("elif" `assignment_expression` ":" `suite`)*
: ["else" ":" `suite`]
It selects exactly one of the suites by evaluating the expressions one by one
@@ -116,7 +116,7 @@ The :keyword:`while` statement is used for repeated execution as long as an
expression is true:
.. productionlist::
- while_stmt: "while" `expression` ":" `suite`
+ while_stmt: "while" `assignment_expression` ":" `suite`
: ["else" ":" `suite`]
This repeatedly tests the expression and, if it is true, executes the first
@@ -507,7 +507,7 @@ A function definition defines a user-defined function object (see section
funcdef: [`decorators`] "def" `funcname` "(" [`parameter_list`] ")"
: ["->" `expression`] ":" `suite`
decorators: `decorator`+
- decorator: "@" `dotted_name` ["(" [`argument_list` [","]] ")"] NEWLINE
+ decorator: "@" `assignment_expression` NEWLINE
dotted_name: `identifier` ("." `identifier`)*
parameter_list: `defparameter` ("," `defparameter`)* "," "/" ["," [`parameter_list_no_posonly`]]
: | `parameter_list_no_posonly`
@@ -550,6 +550,11 @@ is roughly equivalent to ::
except that the original function is not temporarily bound to the name ``func``.
+.. versionchanged:: 3.9
+ Functions may be decorated with any valid :token:`assignment_expression`.
+ Previously, the grammar was much more restrictive; see :pep:`614` for
+ details.
+
.. index::
triple: default; parameter; value
single: argument; function definition
@@ -717,6 +722,11 @@ is roughly equivalent to ::
The evaluation rules for the decorator expressions are the same as for function
decorators. The result is then bound to the class name.
+.. versionchanged:: 3.9
+ Classes may be decorated with any valid :token:`assignment_expression`.
+ Previously, the grammar was much more restrictive; see :pep:`614` for
+ details.
+
**Programmer's note:** Variables defined in the class definition are class
attributes; they are shared by instances. Instance attributes can be set in a
method with ``self.name = value``. Both class and instance attributes are
diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst
index 9520f824287f6b..c5a7f046992dd1 100644
--- a/Doc/reference/datamodel.rst
+++ b/Doc/reference/datamodel.rst
@@ -17,7 +17,7 @@ Objects, values and types
:dfn:`Objects` are Python's abstraction for data. All data in a Python program
is represented by objects or by relations between objects. (In a sense, and in
-conformance to Von Neumann's model of a "stored program computer," code is also
+conformance to Von Neumann's model of a "stored program computer", code is also
represented by objects.)
.. index::
@@ -156,13 +156,18 @@ NotImplemented
object is accessed through the built-in name ``NotImplemented``. Numeric methods
and rich comparison methods should return this value if they do not implement the
operation for the operands provided. (The interpreter will then try the
- reflected operation, or some other fallback, depending on the operator.) Its
- truth value is true.
+ reflected operation, or some other fallback, depending on the operator.) It
+ should not be evaluated in a boolean context.
See
:ref:`implementing-the-arithmetic-operations`
for more details.
+ .. versionchanged:: 3.9
+ Evaluating ``NotImplemented`` in a boolean context is deprecated. While
+ it currently evaluates as true, it will emit a :exc:`DeprecationWarning`.
+ It will raise a :exc:`TypeError` in a future version of Python.
+
Ellipsis
.. index::
@@ -420,6 +425,11 @@ Mappings
equal (e.g., ``1`` and ``1.0``) then they can be used interchangeably to index
the same dictionary entry.
+ Dictionaries preserve insertion order, meaning that keys will be produced
+ in the same order they were added sequentially over the dictionary.
+ Replacing an existing key does not change the order, however removing a key
+ and re-inserting it will add it to the end instead of keeping its old place.
+
Dictionaries are mutable; they can be created by the ``{...}`` notation (see
section :ref:`dict`).
@@ -431,6 +441,11 @@ Mappings
additional examples of mapping types, as does the :mod:`collections`
module.
+ .. versionchanged:: 3.7
+ Dictionaries did not preserve insertion order in versions of Python before 3.6.
+ In CPython 3.6, insertion order was preserved, but it was considered
+ an implementation detail at that time rather than a language guarantee.
+
Callable types
.. index::
object: callable
@@ -1335,7 +1350,7 @@ Basic customization
.. versionchanged:: 3.7
``object.__format__(x, '')`` is now equivalent to ``str(x)`` rather
- than ``format(str(self), '')``.
+ than ``format(str(x), '')``.
.. _richcmpfuncs:
@@ -1946,7 +1961,9 @@ Once the appropriate metaclass has been identified, then the class namespace
is prepared. If the metaclass has a ``__prepare__`` attribute, it is called
as ``namespace = metaclass.__prepare__(name, bases, **kwds)`` (where the
additional keyword arguments, if any, come from the class definition). The
-``__prepare__`` method should be implemented as a :func:`classmethod`.
+``__prepare__`` method should be implemented as a :func:`classmethod`. The
+namespace returned by ``__prepare__`` is passed in to ``__new__``, but when
+the final class object is created the namespace is copied into a new ``dict``.
If the metaclass has no ``__prepare__`` attribute, then the class namespace
is initialised as an empty ordered mapping.
diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst
index d9db33a78568e4..8036a491c29ab1 100644
--- a/Doc/reference/expressions.rst
+++ b/Doc/reference/expressions.rst
@@ -28,7 +28,7 @@ Arithmetic conversions
.. index:: pair: arithmetic; conversion
When a description of an arithmetic operator below uses the phrase "the numeric
-arguments are converted to a common type," this means that the operator
+arguments are converted to a common type", this means that the operator
implementation for built-in types works as follows:
* If either argument is a complex number, the other is converted to complex;
@@ -178,7 +178,7 @@ called "displays", each of them in two flavors:
Common syntax elements for comprehensions are:
.. productionlist::
- comprehension: `expression` `comp_for`
+ comprehension: `assignment_expression` `comp_for`
comp_for: ["async"] "for" `target_list` "in" `or_test` [`comp_iter`]
comp_iter: `comp_for` | `comp_if`
comp_if: "if" `expression_nocond` [`comp_iter`]
@@ -911,7 +911,8 @@ series of :term:`arguments `:
: ["," `keywords_arguments`]
: | `starred_and_keywords` ["," `keywords_arguments`]
: | `keywords_arguments`
- positional_arguments: ["*"] `expression` ("," ["*"] `expression`)*
+ positional_arguments: positional_item ("," positional_item)*
+ positional_item: `assignment_expression` | "*" `expression`
starred_and_keywords: ("*" `expression` | `keyword_item`)
: ("," "*" `expression` | "," `keyword_item`)*
keywords_arguments: (`keyword_item` | "**" `expression`)
@@ -1421,8 +1422,9 @@ built-in types.
The not-a-number values ``float('NaN')`` and ``decimal.Decimal('NaN')`` are
special. Any ordered comparison of a number to a not-a-number value is false.
A counter-intuitive implication is that not-a-number values are not equal to
- themselves. For example, if ``x = float('NaN')``, ``3 < x``, ``x < 3``, ``x
- == x``, ``x != x`` are all false. This behavior is compliant with IEEE 754.
+ themselves. For example, if ``x = float('NaN')``, ``3 < x``, ``x < 3`` and
+ ``x == x`` are all false, while ``x != x`` is true. This behavior is
+ compliant with IEEE 754.
* ``None`` and ``NotImplemented`` are singletons. :PEP:`8` advises that
comparisons for singletons should always be done with ``is`` or ``is not``,
@@ -1642,6 +1644,17 @@ returns a boolean value regardless of the type of its argument
(for example, ``not 'foo'`` produces ``False`` rather than ``''``.)
+Assignment expressions
+======================
+
+.. productionlist::
+ assignment_expression: [`identifier` ":="] `expression`
+
+.. TODO: BPO-39868
+
+See :pep:`572` for more details about assignment expressions.
+
+
.. _if_expr:
Conditional expressions
@@ -1711,7 +1724,7 @@ Expression lists
expression_list: `expression` ("," `expression`)* [","]
starred_list: `starred_item` ("," `starred_item`)* [","]
starred_expression: `expression` | (`starred_item` ",")* [`starred_item`]
- starred_item: `expression` | "*" `or_expr`
+ starred_item: `assignment_expression` | "*" `or_expr`
.. index:: object: tuple
diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst
index c0e13b53698e6b..e3a3a88757ed29 100644
--- a/Doc/reference/lexical_analysis.rst
+++ b/Doc/reference/lexical_analysis.rst
@@ -316,7 +316,7 @@ The Unicode category codes mentioned above stand for:
* *Nd* - decimal numbers
* *Pc* - connector punctuations
* *Other_ID_Start* - explicit list of characters in `PropList.txt
- `_ to support backwards
+ `_ to support backwards
compatibility
* *Other_ID_Continue* - likewise
@@ -325,7 +325,7 @@ of identifiers is based on NFKC.
A non-normative HTML file listing all valid identifier characters for Unicode
4.1 can be found at
-https://www.dcl.hpi.uni-potsdam.de/home/loewis/table-3131.html.
+https://www.unicode.org/Public/13.0.0/ucd/DerivedCoreProperties.txt
.. _keywords:
@@ -376,11 +376,11 @@ characters:
information on this convention.
``__*__``
- System-defined names. These names are defined by the interpreter and its
- implementation (including the standard library). Current system names are
- discussed in the :ref:`specialnames` section and elsewhere. More will likely
- be defined in future versions of Python. *Any* use of ``__*__`` names, in
- any context, that does not follow explicitly documented use, is subject to
+ System-defined names, informally known as "dunder" names. These names are
+ defined by the interpreter and its implementation (including the standard library).
+ Current system names are discussed in the :ref:`specialnames` section and elsewhere.
+ More will likely be defined in future versions of Python. *Any* use of ``__*__`` names,
+ in any context, that does not follow explicitly documented use, is subject to
breakage without warning.
``__*``
@@ -685,6 +685,11 @@ strings), but they cannot contain comments. Each expression is evaluated
in the context where the formatted string literal appears, in order from
left to right.
+.. versionchanged:: 3.7
+ Prior to Python 3.7, an :keyword:`await` expression and comprehensions
+ containing an :keyword:`async for` clause were illegal in the expressions
+ in formatted string literals due to a problem with the implementation.
+
If a conversion is specified, the result of evaluating the expression
is converted before formatting. Conversion ``'!s'`` calls :func:`str` on
the result, ``'!r'`` calls :func:`repr`, and ``'!a'`` calls :func:`ascii`.
@@ -929,4 +934,4 @@ occurrence outside string literals and comments is an unconditional error:
.. rubric:: Footnotes
-.. [#] http://www.unicode.org/Public/11.0.0/ucd/NameAliases.txt
+.. [#] https://www.unicode.org/Public/11.0.0/ucd/NameAliases.txt
diff --git a/Doc/tools/extensions/asdl_highlight.py b/Doc/tools/extensions/asdl_highlight.py
new file mode 100644
index 00000000000000..7d2ef011c1b766
--- /dev/null
+++ b/Doc/tools/extensions/asdl_highlight.py
@@ -0,0 +1,51 @@
+import os
+import sys
+sys.path.append(os.path.abspath("../Parser/"))
+
+from pygments.lexer import RegexLexer, bygroups, include, words
+from pygments.token import (Comment, Generic, Keyword, Name, Operator,
+ Punctuation, Text)
+
+from asdl import builtin_types
+from sphinx.highlighting import lexers
+
+class ASDLLexer(RegexLexer):
+ name = "ASDL"
+ aliases = ["asdl"]
+ filenames = ["*.asdl"]
+ _name = r"([^\W\d]\w*)"
+ _text_ws = r"(\s*)"
+
+ tokens = {
+ "ws": [
+ (r"\n", Text),
+ (r"\s+", Text),
+ (r"--.*?$", Comment.Singleline),
+ ],
+ "root": [
+ include("ws"),
+ (
+ r"(module)" + _text_ws + _name,
+ bygroups(Keyword, Text, Name.Tag),
+ ),
+ (
+ r"(\w+)(\*\s|\?\s|\s)(\w+)",
+ bygroups(Name.Builtin.Pseudo, Operator, Name),
+ ),
+ (words(builtin_types), Name.Builtin),
+ (r"attributes", Name.Builtin),
+ (
+ _name + _text_ws + "(=)",
+ bygroups(Name, Text, Operator),
+ ),
+ (_name, Name.Class),
+ (r"\|", Operator),
+ (r"{|}|\(|\)", Punctuation),
+ (r".", Text),
+ ],
+ }
+
+
+def setup(app):
+ lexers["asdl"] = ASDLLexer()
+ return {'version': '1.0', 'parallel_read_safe': True}
diff --git a/Doc/tools/static/switchers.js b/Doc/tools/static/switchers.js
index e1ef91a8dfc686..c51f178ce645c0 100644
--- a/Doc/tools/static/switchers.js
+++ b/Doc/tools/static/switchers.js
@@ -10,7 +10,8 @@
'(?:release/\\d.\\d[\\x\\d\\.]*)'];
var all_versions = {
- '3.9': 'dev (3.9)',
+ '3.10': 'dev (3.10)',
+ '3.9': 'pre (3.9)',
'3.8': '3.8',
'3.7': '3.7',
'3.6': '3.6',
diff --git a/Doc/tools/susp-ignored.csv b/Doc/tools/susp-ignored.csv
index 5cdfd40f5b027e..7be8d0abd69a57 100644
--- a/Doc/tools/susp-ignored.csv
+++ b/Doc/tools/susp-ignored.csv
@@ -108,6 +108,8 @@ howto/pyporting,,::,Programming Language :: Python :: 3
howto/regex,,::,
howto/regex,,:foo,(?:foo)
howto/urllib2,,:password,"""joe:password@example.com"""
+library/ast,,:upper,lower:upper
+library/ast,,:step,lower:upper:step
library/audioop,,:ipos,"# factor = audioop.findfactor(in_test[ipos*2:ipos*2+len(out_test)],"
library/bisect,32,:hi,all(val >= x for val in a[i:hi])
library/bisect,42,:hi,all(val > x for val in a[i:hi])
@@ -147,6 +149,8 @@ library/ipaddress,,:db8,>>> ipaddress.IPv6Address('2001:db8::1000')
library/ipaddress,,::,>>> ipaddress.IPv6Address('2001:db8::1000')
library/ipaddress,,:db8,IPv6Address('2001:db8::1000')
library/ipaddress,,::,IPv6Address('2001:db8::1000')
+library/ipaddress,,::,IPv6Address('ff02::5678%1')
+library/ipaddress,,::,fe80::1234
library/ipaddress,,:db8,">>> ipaddress.ip_address(""2001:db8::1"").reverse_pointer"
library/ipaddress,,::,">>> ipaddress.ip_address(""2001:db8::1"").reverse_pointer"
library/ipaddress,,::,"""::abc:7:def"""
diff --git a/Doc/tools/templates/download.html b/Doc/tools/templates/download.html
index d9364d6ced729f..987c63a7f3575c 100644
--- a/Doc/tools/templates/download.html
+++ b/Doc/tools/templates/download.html
@@ -40,7 +40,7 @@
Download Python {{ release }} Documentation
These archives contain all the content in the documentation.
-
HTML Help (.chm) files are made available in the "Windows" section
+
HTML Help (.chm) files are made available in the "Windows" section
on the Python
download page.
diff --git a/Doc/tutorial/classes.rst b/Doc/tutorial/classes.rst
index f781fecf832f08..06bdd0d93515ed 100644
--- a/Doc/tutorial/classes.rst
+++ b/Doc/tutorial/classes.rst
@@ -323,7 +323,7 @@ Instance Objects
Now what can we do with instance objects? The only operations understood by
instance objects are attribute references. There are two kinds of valid
-attribute names, data attributes and methods.
+attribute names: data attributes and methods.
*data attributes* correspond to "instance variables" in Smalltalk, and to "data
members" in C++. Data attributes need not be declared; like local variables,
diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst
index 7dfd33af258867..26de866aab90cb 100644
--- a/Doc/tutorial/controlflow.rst
+++ b/Doc/tutorial/controlflow.rst
@@ -70,6 +70,9 @@ Code that modifies a collection while iterating over that same collection can
be tricky to get right. Instead, it is usually more straight-forward to loop
over a copy of the collection or to create a new collection::
+ # Create a sample collection
+ users = {'Hans': 'active', 'Éléonore': 'inactive', '景太郎': 'active'}
+
# Strategy: Iterate over a copy
for user, status in users.copy().items():
if status == 'inactive':
@@ -142,7 +145,7 @@ the list, thus saving space.
We say such an object is :term:`iterable`, that is, suitable as a target for
functions and constructs that expect something from which they can
obtain successive items until the supply is exhausted. We have seen that
-the :keyword:`for` statement is such a construct, while an example of function
+the :keyword:`for` statement is such a construct, while an example of a function
that takes an iterable is :func:`sum`::
>>> sum(range(4)) # 0 + 1 + 2 + 3
diff --git a/Doc/tutorial/datastructures.rst b/Doc/tutorial/datastructures.rst
index 0edb73ad736919..ff4c797f66cd63 100644
--- a/Doc/tutorial/datastructures.rst
+++ b/Doc/tutorial/datastructures.rst
@@ -613,6 +613,21 @@ direction and then call the :func:`reversed` function. ::
To loop over a sequence in sorted order, use the :func:`sorted` function which
returns a new sorted list while leaving the source unaltered. ::
+ >>> basket = ['apple', 'orange', 'apple', 'pear', 'orange', 'banana']
+ >>> for i in sorted(basket):
+ ... print(i)
+ ...
+ apple
+ apple
+ banana
+ orange
+ orange
+ pear
+
+Using :func:`set` on a sequence eliminates duplicate elements. The use of
+:func:`sorted` in combination with :func:`set` over a sequence is an idiomatic
+way to loop over unique elements of the sequence in sorted order. ::
+
>>> basket = ['apple', 'orange', 'apple', 'pear', 'orange', 'banana']
>>> for f in sorted(set(basket)):
... print(f)
diff --git a/Doc/tutorial/errors.rst b/Doc/tutorial/errors.rst
index 8f86eca02483a2..0ce96466e8c286 100644
--- a/Doc/tutorial/errors.rst
+++ b/Doc/tutorial/errors.rst
@@ -67,7 +67,7 @@ The rest of the line provides detail based on the type of exception and what
caused it.
The preceding part of the error message shows the context where the exception
-happened, in the form of a stack traceback. In general it contains a stack
+occurred, in the form of a stack traceback. In general it contains a stack
traceback listing source lines; however, it will not display lines read from
standard input.
@@ -388,15 +388,33 @@ example::
File "", line 2, in
KeyboardInterrupt
-If a :keyword:`finally` clause is present, the :keyword:`finally` clause will execute as the last task before the :keyword:`try` statement completes. The :keyword:`finally` clause runs whether or not the :keyword:`try` statement produces an exception. The following points discuss more complex cases when an exception occurs:
-
-* If an exception occurs during execution of the :keyword:`!try` clause, the exception may be handled by an :keyword:`except` clause. If the exception is not handled by an :keyword:`except` clause, the exception is re-raised after the :keyword:`!finally` clause has been executed.
-
-* An exception could occur during execution of an :keyword:`!except` or :keyword:`!else` clause. Again, the exception is re-raised after the :keyword:`!finally` clause has been executed.
-
-* If the :keyword:`!try` statement reaches a :keyword:`break`, :keyword:`continue` or :keyword:`return` statement, the :keyword:`finally` clause will execute just prior to the :keyword:`break`, :keyword:`continue` or :keyword:`return` statement's execution.
-
-* If a :keyword:`finally` clause includes a :keyword:`return` statement, the :keyword:`finally` clause's :keyword:`return` statement will execute before, and instead of, the :keyword:`return` statement in a :keyword:`try` clause.
+If a :keyword:`finally` clause is present, the :keyword:`!finally`
+clause will execute as the last task before the :keyword:`try`
+statement completes. The :keyword:`!finally` clause runs whether or
+not the :keyword:`!try` statement produces an exception. The following
+points discuss more complex cases when an exception occurs:
+
+* If an exception occurs during execution of the :keyword:`!try`
+ clause, the exception may be handled by an :keyword:`except`
+ clause. If the exception is not handled by an :keyword:`!except`
+ clause, the exception is re-raised after the :keyword:`!finally`
+ clause has been executed.
+
+* An exception could occur during execution of an :keyword:`!except`
+ or :keyword:`!else` clause. Again, the exception is re-raised after
+ the :keyword:`!finally` clause has been executed.
+
+* If the :keyword:`!try` statement reaches a :keyword:`break`,
+ :keyword:`continue` or :keyword:`return` statement, the
+ :keyword:`!finally` clause will execute just prior to the
+ :keyword:`!break`, :keyword:`!continue` or :keyword:`!return`
+ statement's execution.
+
+* If a :keyword:`!finally` clause includes a :keyword:`!return`
+ statement, the returned value will be the one from the
+ :keyword:`!finally` clause's :keyword:`!return` statement, not the
+ value from the :keyword:`!try` clause's :keyword:`!return`
+ statement.
For example::
diff --git a/Doc/tutorial/inputoutput.rst b/Doc/tutorial/inputoutput.rst
index a404f4be19f1b9..366a532e817afa 100644
--- a/Doc/tutorial/inputoutput.rst
+++ b/Doc/tutorial/inputoutput.rst
@@ -172,7 +172,7 @@ Positional and keyword arguments can be arbitrarily combined::
If you have a really long format string that you don't want to split up, it
would be nice if you could reference the variables to be formatted by name
instead of by position. This can be done by simply passing the dict and using
-square brackets ``'[]'`` to access the keys ::
+square brackets ``'[]'`` to access the keys. ::
>>> table = {'Sjoerd': 4127, 'Jack': 4098, 'Dcab': 8637678}
>>> print('Jack: {0[Jack]:d}; Sjoerd: {0[Sjoerd]:d}; '
@@ -257,10 +257,10 @@ left with zeros. It understands about plus and minus signs::
Old string formatting
---------------------
-The ``%`` operator can also be used for string formatting. It interprets the
-left argument much like a :c:func:`sprintf`\ -style format string to be applied
-to the right argument, and returns the string resulting from this formatting
-operation. For example::
+The % operator (modulo) can also be used for string formatting. Given ``'string'
+% values``, instances of ``%`` in ``string`` are replaced with zero or more
+elements of ``values``. This operation is commonly known as string
+interpolation. For example::
>>> import math
>>> print('The value of pi is approximately %5.3f.' % math.pi)
diff --git a/Doc/tutorial/interpreter.rst b/Doc/tutorial/interpreter.rst
index b78d2960ac5432..d2733a9968fb1d 100644
--- a/Doc/tutorial/interpreter.rst
+++ b/Doc/tutorial/interpreter.rst
@@ -10,13 +10,13 @@ Using the Python Interpreter
Invoking the Interpreter
========================
-The Python interpreter is usually installed as :file:`/usr/local/bin/python3.9`
+The Python interpreter is usually installed as :file:`/usr/local/bin/python3.10`
on those machines where it is available; putting :file:`/usr/local/bin` in your
Unix shell's search path makes it possible to start it by typing the command:
.. code-block:: text
- python3.9
+ python3.10
to the shell. [#]_ Since the choice of the directory where the interpreter lives
is an installation option, other places are possible; check with your local
@@ -24,7 +24,7 @@ Python guru or system administrator. (E.g., :file:`/usr/local/python` is a
popular alternative location.)
On Windows machines where you have installed Python from the :ref:`Microsoft Store
-`, the :file:`python3.9` command will be available. If you have
+`, the :file:`python3.10` command will be available. If you have
the :ref:`py.exe launcher ` installed, you can use the :file:`py`
command. See :ref:`setting-envvars` for other ways to launch Python.
@@ -97,8 +97,8 @@ before printing the first prompt:
.. code-block:: shell-session
- $ python3.9
- Python 3.9 (default, June 4 2019, 09:25:04)
+ $ python3.10
+ Python 3.10 (default, June 4 2019, 09:25:04)
[GCC 4.8.2] on linux
Type "help", "copyright", "credits" or "license" for more information.
>>>
diff --git a/Doc/tutorial/stdlib.rst b/Doc/tutorial/stdlib.rst
index a52653b94a3253..f33265cd2b0eb4 100644
--- a/Doc/tutorial/stdlib.rst
+++ b/Doc/tutorial/stdlib.rst
@@ -15,7 +15,7 @@ operating system::
>>> import os
>>> os.getcwd() # Return the current working directory
- 'C:\\Python39'
+ 'C:\\Python310'
>>> os.chdir('/server/accesslogs') # Change current working directory
>>> os.system('mkdir today') # Run the command mkdir in the system shell
0
diff --git a/Doc/tutorial/stdlib2.rst b/Doc/tutorial/stdlib2.rst
index 299482856ff324..298034d3b48440 100644
--- a/Doc/tutorial/stdlib2.rst
+++ b/Doc/tutorial/stdlib2.rst
@@ -278,7 +278,7 @@ applications include caching objects that are expensive to create::
Traceback (most recent call last):
File "", line 1, in
d['primary'] # entry was automatically removed
- File "C:/python39/lib/weakref.py", line 46, in __getitem__
+ File "C:/python310/lib/weakref.py", line 46, in __getitem__
o = self.data[key]()
KeyError: 'primary'
diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst
index 2206e5065be1fd..b0911956a9eb86 100644
--- a/Doc/using/cmdline.rst
+++ b/Doc/using/cmdline.rst
@@ -109,8 +109,8 @@ source.
Many standard library modules contain code that is invoked on their execution
as a script. An example is the :mod:`timeit` module::
- python -mtimeit -s 'setup here' 'benchmarked code here'
- python -mtimeit -h # for details
+ python -m timeit -s 'setup here' 'benchmarked code here'
+ python -m timeit -h # for details
.. audit-event:: cpython.run_module module-name cmdoption-m
@@ -426,6 +426,8 @@ Miscellaneous options
defines the following possible values:
* ``-X faulthandler`` to enable :mod:`faulthandler`;
+ * ``-X oldparser``: enable the traditional LL(1) parser. See also
+ :envvar:`PYTHONOLDPARSER` and :pep:`617`.
* ``-X showrefcount`` to output the total reference count and number of used
memory blocks when the program finishes or after each statement in the
interactive interpreter. This only works on debug builds.
@@ -478,6 +480,9 @@ Miscellaneous options
The ``-X showalloccount`` option has been removed.
+ .. deprecated-removed:: 3.9 3.10
+ The ``-X oldparser`` option.
+
Options you shouldn't use
~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -574,6 +579,15 @@ conflict.
:option:`-d` multiple times.
+.. envvar:: PYTHONOLDPARSER
+
+ If this is set to a non-empty string, enable the traditional LL(1) parser.
+
+ See also the :option:`-X` ``oldparser`` option and :pep:`617`.
+
+ .. deprecated-removed:: 3.9 3.10
+
+
.. envvar:: PYTHONINSPECT
If this is set to a non-empty string it is equivalent to specifying the
diff --git a/Doc/using/mac.rst b/Doc/using/mac.rst
index baf737ddaa9171..ead71e1b079b3b 100644
--- a/Doc/using/mac.rst
+++ b/Doc/using/mac.rst
@@ -27,9 +27,8 @@ What you get after installing is a number of things:
* A :file:`Python 3.9` folder in your :file:`Applications` folder. In here
you find IDLE, the development environment that is a standard part of official
- Python distributions; PythonLauncher, which handles double-clicking Python
- scripts from the Finder; and the "Build Applet" tool, which allows you to
- package Python scripts as standalone applications on your system.
+ Python distributions; and PythonLauncher, which handles double-clicking Python
+ scripts from the Finder.
* A framework :file:`/Library/Frameworks/Python.framework`, which includes the
Python executable and libraries. The installer adds this location to your shell
@@ -159,11 +158,6 @@ https://riverbankcomputing.com/software/pyqt/intro.
Distributing Python Applications on the Mac
===========================================
-The "Build Applet" tool that is placed in the MacPython 3.6 folder is fine for
-packaging small Python scripts on your own machine to run as a standard Mac
-application. This tool, however, is not robust enough to distribute Python
-applications to other users.
-
The standard tool for deploying standalone Python applications on the Mac is
:program:`py2app`. More information on installing and using py2app can be found
at http://undefined.org/python/#py2app.
diff --git a/Doc/using/venv-create.inc b/Doc/using/venv-create.inc
index 6c6617dc158f7e..c8f6e8f87d5674 100644
--- a/Doc/using/venv-create.inc
+++ b/Doc/using/venv-create.inc
@@ -83,6 +83,17 @@ The command, if run with ``-h``, will show the available options::
particular note is that double-clicking ``python.exe`` in File Explorer
will resolve the symlink eagerly and ignore the virtual environment.
+.. note::
+ On Microsoft Windows, it may be required to enable the ``Activate.ps1``
+ script by setting the execution policy for the user. You can do this by
+ issuing the following PowerShell command:
+
+ PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
+
+ See `About Execution Policies
+ `_
+ for more information.
+
The created ``pyvenv.cfg`` file also includes the
``include-system-site-packages`` key, set to ``true`` if ``venv`` is
run with the ``--system-site-packages`` option, ``false`` otherwise.
diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst
new file mode 100644
index 00000000000000..34a09fe4b505cc
--- /dev/null
+++ b/Doc/whatsnew/3.10.rst
@@ -0,0 +1,156 @@
+****************************
+ What's New In Python 3.10
+****************************
+
+:Release: |release|
+:Date: |today|
+
+.. Rules for maintenance:
+
+ * Anyone can add text to this document. Do not spend very much time
+ on the wording of your changes, because your text will probably
+ get rewritten to some degree.
+
+ * The maintainer will go through Misc/NEWS periodically and add
+ changes; it's therefore more important to add your changes to
+ Misc/NEWS than to this file.
+
+ * This is not a complete list of every single change; completeness
+ is the purpose of Misc/NEWS. Some changes I consider too small
+ or esoteric to include. If such a change is added to the text,
+ I'll just remove it. (This is another reason you shouldn't spend
+ too much time on writing your addition.)
+
+ * If you want to draw your new text to the attention of the
+ maintainer, add 'XXX' to the beginning of the paragraph or
+ section.
+
+ * It's OK to just add a fragmentary note about a change. For
+ example: "XXX Describe the transmogrify() function added to the
+ socket module." The maintainer will research the change and
+ write the necessary text.
+
+ * You can comment out your additions if you like, but it's not
+ necessary (especially when a final release is some months away).
+
+ * Credit the author of a patch or bugfix. Just the name is
+ sufficient; the e-mail address isn't necessary.
+
+ * It's helpful to add the bug/patch number as a comment:
+
+ XXX Describe the transmogrify() function added to the socket
+ module.
+ (Contributed by P.Y. Developer in :issue:`12345`.)
+
+ This saves the maintainer the effort of going through the Mercurial log
+ when researching a change.
+
+This article explains the new features in Python 3.10, compared to 3.9.
+
+For full details, see the :ref:`changelog `.
+
+.. note::
+
+ Prerelease users should be aware that this document is currently in draft
+ form. It will be updated substantially as Python 3.10 moves towards release,
+ so it's worth checking back even after reading earlier versions.
+
+
+Summary -- Release highlights
+=============================
+
+.. This section singles out the most important changes in Python 3.10.
+ Brevity is key.
+
+
+.. PEP-sized items next.
+
+
+
+New Features
+============
+
+
+
+Other Language Changes
+======================
+
+* Builtin and extension functions that take integer arguments no longer accept
+ :class:`~decimal.Decimal`\ s, :class:`~fractions.Fraction`\ s and other
+ objects that can be converted to integers only with a loss (e.g. that have
+ the :meth:`~object.__int__` method but do not have the
+ :meth:`~object.__index__` method).
+ (Contributed by Serhiy Storchaka in :issue:`37999`.)
+
+
+New Modules
+===========
+
+* None yet.
+
+
+Improved Modules
+================
+
+tracemalloc
+-----------
+
+Added :func:`tracemalloc.reset_peak` to set the peak size of traced memory
+blocks to the current size, to measure the peak of specific pieces of code.
+(Contributed by Huon Wilson in :issue:`40630`.)
+
+Optimizations
+=============
+
+
+Deprecated
+==========
+
+
+Removed
+=======
+
+
+Porting to Python 3.10
+======================
+
+This section lists previously described changes and other bugfixes
+that may require changes to your code.
+
+
+
+Build Changes
+=============
+
+
+C API Changes
+=============
+
+New Features
+------------
+
+ The result of :c:func:`PyNumber_Index` now always has exact type :class:`int`.
+ Previously, the result could have been an instance of a subclass of ``int``.
+ (Contributed by Serhiy Storchaka in :issue:`40792`.)
+
+
+Porting to Python 3.10
+----------------------
+
+* Since :c:func:`Py_TYPE()` is changed to the inline static function,
+ ``Py_TYPE(obj) = new_type`` must be replaced with ``Py_SET_TYPE(obj, new_type)``:
+ see :c:func:`Py_SET_TYPE()` (available since Python 3.9).
+ (Contributed by Dong-hee Na in :issue:`39573`.)
+
+* Since :c:func:`Py_REFCNT()` is changed to the inline static function,
+ ``Py_REFCNT(obj) = new_refcnt`` must be replaced with ``Py_SET_REFCNT(obj, new_refcnt)``:
+ see :c:func:`Py_SET_REFCNT()` (available since Python 3.9).
+ (Contributed by Victor Stinner in :issue:`39573`.)
+
+* Since :c:func:`Py_SIZE()` is changed to the inline static function,
+ ``Py_SIZE(obj) = new_size`` must be replaced with ``Py_SET_SIZE(obj, new_size)``:
+ see :c:func:`Py_SET_SIZE()` (available since Python 3.9).
+ (Contributed by Victor Stinner in :issue:`39573`.)
+
+Removed
+-------
diff --git a/Doc/whatsnew/3.7.rst b/Doc/whatsnew/3.7.rst
index 04cfa57e1446fd..59b96621bdd4b5 100644
--- a/Doc/whatsnew/3.7.rst
+++ b/Doc/whatsnew/3.7.rst
@@ -493,6 +493,11 @@ description.
Other Language Changes
======================
+* An :keyword:`await` expression and comprehensions containing an
+ :keyword:`async for` clause were illegal in the expressions in
+ :ref:`formatted string literals ` due to a problem with the
+ implementation. In Python 3.7 this restriction was lifted.
+
* More than 255 arguments can now be passed to a function, and a function can
now have more than 255 parameters. (Contributed by Serhiy Storchaka in
:issue:`12844` and :issue:`18896`.)
diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst
index cb4c518662d6ba..b6ed2da36889a3 100644
--- a/Doc/whatsnew/3.8.rst
+++ b/Doc/whatsnew/3.8.rst
@@ -142,12 +142,11 @@ However, these are invalid calls::
One use case for this notation is that it allows pure Python functions
to fully emulate behaviors of existing C coded functions. For example,
-the built-in :func:`pow` function does not accept keyword arguments::
+the built-in :func:`divmod` function does not accept keyword arguments::
- def pow(x, y, z=None, /):
- "Emulate the built in pow() function"
- r = x ** y
- return r if z is None else r%z
+ def divmod(a, b, /):
+ "Emulate the built in divmod() function"
+ return (a // b, a % b)
Another use case is to preclude keyword arguments when the parameter
name is not helpful. For example, the builtin :func:`len` function has
@@ -429,8 +428,8 @@ Other Language Changes
lastname, *members = family.split()
return lastname.upper(), *members
- >>> parse('simpsons homer marge bart lisa sally')
- ('SIMPSONS', 'homer', 'marge', 'bart', 'lisa', 'sally')
+ >>> parse('simpsons homer marge bart lisa maggie')
+ ('SIMPSONS', 'homer', 'marge', 'bart', 'lisa', 'maggie')
(Contributed by David Cuthbert and Jordan Chapman in :issue:`32117`.)
@@ -1693,7 +1692,7 @@ Deprecated
:meth:`~gettext.NullTranslations.set_output_charset`, and the *codeset*
parameter of functions :func:`~gettext.translation` and
:func:`~gettext.install` are also deprecated, since they are only used for
- for the ``l*gettext()`` functions.
+ the ``l*gettext()`` functions.
(Contributed by Serhiy Storchaka in :issue:`33710`.)
* The :meth:`~threading.Thread.isAlive()` method of :class:`threading.Thread`
@@ -1940,6 +1939,12 @@ Changes in the Python API
:exc:`dbm.gnu.error` or :exc:`dbm.ndbm.error`) instead of :exc:`KeyError`.
(Contributed by Xiang Zhang in :issue:`33106`.)
+* Simplified AST for literals. All constants will be represented as
+ :class:`ast.Constant` instances. Instantiating old classes ``Num``,
+ ``Str``, ``Bytes``, ``NameConstant`` and ``Ellipsis`` will return
+ an instance of ``Constant``.
+ (Contributed by Serhiy Storchaka in :issue:`32892`.)
+
* :func:`~os.path.expanduser` on Windows now prefers the :envvar:`USERPROFILE`
environment variable and does not use :envvar:`HOME`, which is not normally
set for regular user accounts.
@@ -2203,7 +2208,13 @@ Here's a summary of performance improvements since Python 3.3:
Timing loop:
loop_overhead 0.3 0.5 0.6 0.4 0.3 0.3
- (Measured from the macOS 64-bit builds found at python.org)
+The benchmarks were measured on an
+`Intel® Core™ i7-4960HQ processor
+`_
+running the macOS 64-bit builds found at
+`python.org `_.
+The benchmark script displays timings in nanoseconds.
+
Notable changes in Python 3.8.1
===============================
diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst
index 66caf3f6f82135..6c3cbbe641b573 100644
--- a/Doc/whatsnew/3.9.rst
+++ b/Doc/whatsnew/3.9.rst
@@ -66,10 +66,97 @@ Summary -- Release highlights
.. PEP-sized items next.
+You should check for DeprecationWarning in your code
+====================================================
+
+When Python 2.7 was still supported, many functions were kept for backward
+compatibility with Python 2.7. With the end of Python 2.7 support, these
+backward compatibility layers have been removed, or will be removed soon.
+Most of them emitted a :exc:`DeprecationWarning` warning for several years. For
+example, using ``collections.Mapping`` instead of ``collections.abc.Mapping``
+emits a :exc:`DeprecationWarning` since Python 3.3, released in 2012.
+
+Test your application with the :option:`-W` ``default`` command-line option to see
+:exc:`DeprecationWarning` and :exc:`PendingDeprecationWarning`, or even with
+:option:`-W` ``error`` to treat them as errors. :ref:`Warnings Filter
+` can be used to ignore warnings from third-party code.
+
+It has been decided to keep a few backward compatibility layers for one last
+release, to give more time to Python projects maintainers to organize the
+removal of the Python 2 support and add support for Python 3.9.
+
+Aliases to :ref:`Abstract Base Classes ` in
+the :mod:`collections` module, like ``collections.Mapping`` alias to
+:class:`collections.abc.Mapping`, are kept for one last release for backward
+compatibility. They will be removed from Python 3.10.
+
+More generally, try to run your tests in the :ref:`Python Development Mode
+` which helps to prepare your code to make it compatible with the
+next Python version.
+
New Features
============
+Dictionary Merge & Update Operators
+-----------------------------------
+
+Merge (``|``) and update (``|=``) operators have been added to the built-in
+:class:`dict` class. See :pep:`584` for a full description.
+(Contributed by Brandt Bucher in :issue:`36144`.)
+
+PEP 616: New removeprefix() and removesuffix() string methods
+-------------------------------------------------------------
+
+:meth:`str.removeprefix(prefix)` and
+:meth:`str.removesuffix(suffix)` have been added
+to easily remove an unneeded prefix or a suffix from a string. Corresponding
+``bytes``, ``bytearray``, and ``collections.UserString`` methods have also been
+added. See :pep:`616` for a full description. (Contributed by Dennis Sweeney in
+:issue:`39939`.)
+
+PEP 585: Builtin Generic Types
+------------------------------
+
+In type annotations you can now use built-in collection types such as
+``list`` and ``dict`` as generic types instead of importing the
+corresponding capitalized types (e.g. ``List`` or ``Dict``) from
+``typing``. Some other types in the standard library are also now generic,
+for example ``queue.Queue``.
+
+Example:
+
+.. code-block:: python
+
+ def greet_all(names: list[str]) -> None:
+ for name in names:
+ print("Hello", name)
+
+See :pep:`585` for more details. (Contributed by Guido van Rossum,
+Ethan Smith, and Batuhan Taşkaya in :issue:`39481`.)
+
+PEP 617: New Parser
+-------------------
+
+Python 3.9 uses a new parser, based on `PEG
+`_ instead
+of `LL(1) `_. The new
+parser's performance is roughly comparable to that of the old parser,
+but the PEG formalism is more flexible than LL(1) when it comes to
+designing new language features. We'll start using this flexibility
+in Python 3.10 and later.
+
+The :mod:`ast` module uses the new parser and produces the same AST as
+the old parser.
+
+In Python 3.10, the old parser will be deleted and so will all
+functionality that depends on it (primarily the :mod:`parser` module,
+which has long been deprecated). In Python 3.9 *only*, you can switch
+back to the LL(1) parser using a command line switch (``-X
+oldparser``) or an environment variable (``PYTHONOLDPARSER=1``).
+
+See :pep:`617` for more details. (Contributed by Guido van Rossum,
+Pablo Galindo and Lysandros Nikolau in :issue:`40334`.)
Other Language Changes
@@ -83,11 +170,10 @@ Other Language Changes
* Python now gets the absolute path of the script filename specified on
the command line (ex: ``python3 script.py``): the ``__file__`` attribute of
- the :mod:`__main__` module and ``sys.path[0]`` become an
- absolute path, rather than a relative path. These paths now remain valid
- after the current directory is changed by :func:`os.chdir`. As a side effect,
- a traceback also displays the absolute path for :mod:`__main__` module frames
- in this case.
+ the :mod:`__main__` module became an absolute path, rather than a relative
+ path. These paths now remain valid after the current directory is changed
+ by :func:`os.chdir`. As a side effect, the traceback also displays the
+ absolute path for :mod:`__main__` module frames in this case.
(Contributed by Victor Stinner in :issue:`20443`.)
* In the :ref:`Python Development Mode ` and in debug build, the
@@ -105,11 +191,58 @@ Other Language Changes
There are similar changes for :class:`bytes` and :class:`bytearray` objects.
(Contributed by Serhiy Storchaka in :issue:`28029`.)
+* Any valid expression can now be used as a :term:`decorator`. Previously, the
+ grammar was much more restrictive. See :pep:`614` for details.
+ (Contributed by Brandt Bucher in :issue:`39702`.)
+
+* Improved help for the :mod:`typing` module. Docstrings are now shown for
+ all special forms and special generic aliases (like ``Union`` and ``List``).
+ Using :func:`help` with generic alias like ``List[int]`` will show the help
+ for the correspondent concrete type (``list`` in this case).
+ (Contributed by Serhiy Storchaka in :issue:`40257`.)
+
New Modules
===========
-* None yet.
+zoneinfo
+--------
+
+The :mod:`zoneinfo` module brings support for the IANA time zone database to
+the standard library. It adds :class:`zoneinfo.ZoneInfo`, a concrete
+:class:`datetime.tzinfo` implementation backed by the system's time zone data.
+
+Example::
+
+ >>> from zoneinfo import ZoneInfo
+ >>> from datetime import datetime, timedelta
+
+ >>> # Daylight saving time
+ >>> dt = datetime(2020, 10, 31, 12, tzinfo=ZoneInfo("America/Los_Angeles"))
+ >>> print(dt)
+ 2020-10-31 12:00:00-07:00
+ >>> dt.tzname()
+ 'PDT'
+
+ >>> # Standard time
+ >>> dt += timedelta(days=7)
+ >>> print(dt)
+ 2020-11-07 12:00:00-08:00
+ >>> print(dt.tzname())
+ PST
+
+
+As a fall-back source of data for platforms that don't ship the IANA database,
+the |tzdata|_ module was released as a first-party package -- distributed via
+PyPI and maintained by the CPython core team.
+
+.. |tzdata| replace:: ``tzdata``
+.. _tzdata: https://pypi.org/project/tzdata/
+
+.. seealso::
+
+ :pep:`615` -- Support for the IANA Time Zone Database in the Standard Library
+ PEP written and implemented by Paul Ganssle
Improved Modules
@@ -127,6 +260,9 @@ be used to unparse an :class:`ast.AST` object and produce a string with code
that would produce an equivalent :class:`ast.AST` object when parsed.
(Contributed by Pablo Galindo and Batuhan Taskaya in :issue:`38870`.)
+Added docstrings to AST nodes that contains the ASDL signature used to
+construct that node. (Contributed by Batuhan Taskaya in :issue:`39638`.)
+
asyncio
-------
@@ -146,6 +282,22 @@ that schedules a shutdown for the default executor that waits on the
Added :class:`asyncio.PidfdChildWatcher`, a Linux-specific child watcher
implementation that polls process file descriptors. (:issue:`38692`)
+Added a new :term:`coroutine` :func:`asyncio.to_thread`. It is mainly used for
+running IO-bound functions in a separate thread to avoid blocking the event
+loop, and essentially works as a high-level version of
+:meth:`~asyncio.loop.run_in_executor` that can directly take keyword arguments.
+(Contributed by Kyle Stanley and Yury Selivanov in :issue:`32309`.)
+
+compileall
+----------
+
+Added new possibility to use hardlinks for duplicated ``.pyc`` files: *hardlink_dupes* parameter and --hardlink-dupes command line option.
+(Contributed by Lumír 'Frenzy' Balhar in :issue:`40495`.)
+
+Added new options for path manipulation in resulting ``.pyc`` files: *stripdir*, *prependdir*, *limit_sl_dest* parameters and -s, -p, -e command line options.
+Added the possibility to specify the option for an optimization level multiple times.
+(Contributed by Lumír 'Frenzy' Balhar in :issue:`38112`.)
+
concurrent.futures
------------------
@@ -155,6 +307,16 @@ which have not started running, instead of waiting for them to complete before
shutting down the executor.
(Contributed by Kyle Stanley in :issue:`39349`.)
+Removed daemon threads from :class:`~concurrent.futures.ThreadPoolExecutor`
+and :class:`~concurrent.futures.ProcessPoolExecutor`. This improves
+compatibility with subinterpreters and predictability in their shutdown
+processes. (Contributed by Kyle Stanley in :issue:`39812`.)
+
+Workers in :class:`~concurrent.futures.ProcessPoolExecutor` are now spawned on
+demand, only when there are no available idle workers to reuse. This optimizes
+startup overhead and reduces the amount of lost CPU time to idle workers.
+(Contributed by Kyle Stanley in :issue:`39207`.)
+
curses
------
@@ -162,6 +324,20 @@ Add :func:`curses.get_escdelay`, :func:`curses.set_escdelay`,
:func:`curses.get_tabsize`, and :func:`curses.set_tabsize` functions.
(Contributed by Anthony Sottile in :issue:`38312`.)
+datetime
+--------
+The :meth:`~datetime.date.isocalendar()` of :class:`datetime.date`
+and :meth:`~datetime.datetime.isocalendar()` of :class:`datetime.datetime`
+methods now returns a :func:`~collections.namedtuple` instead of a :class:`tuple`.
+(Contributed by Dong-hee Na in :issue:`24416`.)
+
+distutils
+---------
+
+The :command:`upload` command now creates SHA2-256 and Blake2b-256 hash
+digests. It skips MD5 on platforms that block MD5 digest.
+(Contributed by Christian Heimes in :issue:`40698`.)
+
fcntl
-----
@@ -195,6 +371,21 @@ Added a new function :func:`gc.is_finalized` to check if an object has been
finalized by the garbage collector. (Contributed by Pablo Galindo in
:issue:`39322`.)
+hashlib
+-------
+
+Builtin hash modules can now be disabled with
+``./configure --without-builtin-hashlib-hashes`` or selectively enabled with
+e.g. ``./configure --with-builtin-hashlib-hashes=sha3,blake2`` to force use
+of OpenSSL based implementation.
+(Contributed by Christian Heimes in :issue:`40479`)
+
+http
+----
+
+HTTP status codes ``103 EARLY_HINTS``, ``418 IM_A_TEAPOT`` and ``425 TOO_EARLY`` are added to
+:class:`http.HTTPStatus`. (Contributed by Dong-hee Na in :issue:`39509` and Ross Rhodes in :issue:`39507`.)
+
imaplib
-------
@@ -205,9 +396,47 @@ with this change. The overridden methods of :class:`~imaplib.IMAP4_SSL` and
:class:`~imaplib.IMAP4_stream` were applied to this change.
(Contributed by Dong-hee Na in :issue:`38615`.)
+:meth:`imaplib.IMAP4.unselect` is added.
+:meth:`imaplib.IMAP4.unselect` frees server's resources associated with the
+selected mailbox and returns the server to the authenticated
+state. This command performs the same actions as :meth:`imaplib.IMAP4.close`, except
+that no messages are permanently removed from the currently
+selected mailbox. (Contributed by Dong-hee Na in :issue:`40375`.)
+
+importlib
+---------
+
+To improve consistency with import statements, :func:`importlib.util.resolve_name`
+now raises :exc:`ImportError` instead of :exc:`ValueError` for invalid relative
+import attempts.
+(Contributed by Ngalim Siregar in :issue:`37444`.)
+
+inspect
+-------
+
+:attr:`inspect.BoundArguments.arguments` is changed from ``OrderedDict`` to regular
+dict. (Contributed by Inada Naoki in :issue:`36350` and :issue:`39775`.)
+
+ipaddress
+---------
+
+:mod:`ipaddress` now supports IPv6 Scoped Addresses (IPv6 address with suffix ``%``).
+
+Scoped IPv6 addresses can be parsed using :class:`ipaddress.IPv6Address`.
+If present, scope zone ID is available through the :attr:`~ipaddress.IPv6Address.scope_id` attribute.
+(Contributed by Oleksandr Pavliuk in :issue:`34788`.)
+
math
----
+Expanded the :func:`math.gcd` function to handle multiple arguments.
+Formerly, it only supported two arguments.
+(Contributed by Serhiy Storchaka in :issue:`39648`.)
+
+Add :func:`math.lcm`: return the least common multiple of specified arguments.
+(Contributed by Mark Dickinson, Ananthakrishnan and Serhiy Storchaka in
+:issue:`39479` and :issue:`39648`.)
+
Add :func:`math.nextafter`: return the next floating-point value after *x*
towards *y*.
(Contributed by Victor Stinner in :issue:`39288`.)
@@ -216,6 +445,14 @@ Add :func:`math.ulp`: return the value of the least significant bit
of a float.
(Contributed by Victor Stinner in :issue:`39310`.)
+multiprocessing
+---------------
+
+The :class:`multiprocessing.SimpleQueue` class has a new
+:meth:`~multiprocessing.SimpleQueue.close` method to explicitly close the
+queue.
+(Contributed by Victor Stinner in :issue:`30966`.)
+
nntplib
-------
@@ -240,31 +477,9 @@ The :func:`os.putenv` and :func:`os.unsetenv` functions are now always
available.
(Contributed by Victor Stinner in :issue:`39395`.)
-poplib
-------
-
-:class:`~poplib.POP3` and :class:`~poplib.POP3_SSL` now raise a :class:`ValueError`
-if the given timeout for their constructor is zero to prevent the creation of
-a non-blocking socket. (Contributed by Dong-hee Na in :issue:`39259`.)
-
-threading
----------
-
-In a subinterpreter, spawning a daemon thread now raises a :exc:`RuntimeError`. Daemon
-threads were never supported in subinterpreters. Previously, the subinterpreter
-finalization crashed with a Python fatal error if a daemon thread was still
-running.
-(Contributed by Victor Stinner in :issue:`37266`.)
-
-venv
-----
-
-The activation scripts provided by :mod:`venv` now all specify their prompt
-customization consistently by always using the value specified by
-``__VENV_PROMPT__``. Previously some scripts unconditionally used
-``__VENV_PROMPT__``, others only if it happened to be set (which was the default
-case), and one used ``__VENV_NAME__`` instead.
-(Contributed by Brett Cannon in :issue:`37663`.)
+Add :func:`os.waitstatus_to_exitcode` function:
+convert a wait status to an exit code.
+(Contributed by Victor Stinner in :issue:`40094`.)
pathlib
-------
@@ -273,19 +488,37 @@ Added :meth:`pathlib.Path.readlink()` which acts similarly to
:func:`os.readlink`.
(Contributed by Girts Folkmanis in :issue:`30618`)
+poplib
+------
+
+:class:`~poplib.POP3` and :class:`~poplib.POP3_SSL` now raise a :class:`ValueError`
+if the given timeout for their constructor is zero to prevent the creation of
+a non-blocking socket. (Contributed by Dong-hee Na in :issue:`39259`.)
+
pprint
------
:mod:`pprint` can now pretty-print :class:`types.SimpleNamespace`.
(Contributed by Carl Bordum Hansen in :issue:`37376`.)
-importlib
----------
+pydoc
+-----
-To improve consistency with import statements, :func:`importlib.util.resolve_name`
-now raises :exc:`ImportError` instead of :exc:`ValueError` for invalid relative
-import attempts.
-(Contributed by Ngalim Siregar in :issue:`37444`.)
+The documentation string is now shown not only for class, function,
+method etc, but for any object that has its own ``__doc__`` attribute.
+(Contributed by Serhiy Storchaka in :issue:`40257`.)
+
+random
+------
+
+Add a new :attr:`random.Random.randbytes` method: generate random bytes.
+(Contributed by Victor Stinner in :issue:`40286`.)
+
+signal
+------
+
+Exposed the Linux-specific :func:`signal.pidfd_send_signal` for sending to
+signals to a process using a file descriptor instead of a pid. (:issue:`38712`)
smtplib
-------
@@ -297,11 +530,37 @@ a non-blocking socket. (Contributed by Dong-hee Na in :issue:`39259`.)
:class:`~smtplib.LMTP` constructor now has an optional *timeout* parameter.
(Contributed by Dong-hee Na in :issue:`39329`.)
-signal
+socket
------
-Exposed the Linux-specific :func:`signal.pidfd_send_signal` for sending to
-signals to a process using a file descriptor instead of a pid. (:issue:`38712`)
+The :mod:`socket` module now exports the :data:`~socket.CAN_RAW_JOIN_FILTERS`
+constant on Linux 4.1 and greater.
+(Contributed by Stefan Tatschner and Zackery Spytz in :issue:`25780`.)
+
+The socket module now supports the :data:`~socket.CAN_J1939` protocol on
+platforms that support it. (Contributed by Karl Ding in :issue:`40291`.)
+
+time
+----
+
+On AIX, :func:`~time.thread_time` is now implemented with ``thread_cputime()``
+which has nanosecond resolution, rather than
+``clock_gettime(CLOCK_THREAD_CPUTIME_ID)`` which has a resolution of 10 ms.
+(Contributed by Batuhan Taskaya in :issue:`40192`)
+
+sys
+---
+
+Add a new :attr:`sys.platlibdir` attribute: name of the platform-specific
+library directory. It is used to build the path of standard library and the
+paths of installed extension modules. It is equal to ``"lib"`` on most
+platforms. On Fedora and SuSE, it is equal to ``"lib64"`` on 64-bit platforms.
+(Contributed by Jan Matějek, Matěj Cepl, Charalampos Stratakis and Victor Stinner in :issue:`1294959`.)
+
+Previously, :attr:`sys.stderr` was block-buffered when non-interactive. Now
+``stderr`` defaults to always being line-buffered.
+(Contributed by Jendrik Seipp in :issue:`13601`.)
+
typing
------
@@ -311,58 +570,122 @@ types with context-specific metadata and new ``include_extras`` parameter to
:func:`typing.get_type_hints` to access the metadata at runtime. (Contributed
by Till Varoquaux and Konstantin Kashin.)
+unicodedata
+-----------
-Optimizations
-=============
+The Unicode database has been updated to version 13.0.0. (:issue:`39926`).
+venv
+----
-Build and C API Changes
-=======================
-
-* Provide :c:func:`Py_EnterRecursiveCall` and :c:func:`Py_LeaveRecursiveCall`
- as regular functions for the limited API. Previously, there were defined as
- macros, but these macros didn't work with the limited API which cannot access
- ``PyThreadState.recursion_depth`` field. Remove ``_Py_CheckRecursionLimit``
- from the stable ABI.
- (Contributed by Victor Stinner in :issue:`38644`.)
-
-* Add a new public :c:func:`PyObject_CallNoArgs` function to the C API, which
- calls a callable Python object without any arguments. It is the most efficient
- way to call a callable Python object without any argument.
- (Contributed by Victor Stinner in :issue:`37194`.)
-
-* The global variable :c:data:`PyStructSequence_UnnamedField` is now a constant
- and refers to a constant string.
- (Contributed by Serhiy Storchaka in :issue:`38650`.)
+The activation scripts provided by :mod:`venv` now all specify their prompt
+customization consistently by always using the value specified by
+``__VENV_PROMPT__``. Previously some scripts unconditionally used
+``__VENV_PROMPT__``, others only if it happened to be set (which was the default
+case), and one used ``__VENV_NAME__`` instead.
+(Contributed by Brett Cannon in :issue:`37663`.)
-* Exclude ``PyFPE_START_PROTECT()`` and ``PyFPE_END_PROTECT()`` macros of
- ``pyfpe.h`` from ``Py_LIMITED_API`` (stable API).
- (Contributed by Victor Stinner in :issue:`38835`.)
+xml
+---
-* Remove ``PyMethod_ClearFreeList()`` and ``PyCFunction_ClearFreeList()``
- functions: the free lists of bound method objects have been removed.
- (Contributed by Inada Naoki and Victor Stinner in :issue:`37340`.)
+White space characters within attributes are now preserved when serializing
+:mod:`xml.etree.ElementTree` to XML file. EOLNs are no longer normalized
+to "\n". This is the result of discussion about how to interpret
+section 2.11 of XML spec.
+(Contributed by Mefistotelis in :issue:`39011`.)
-* Remove ``PyUnicode_ClearFreeList()`` function: the Unicode free list has been
- removed in Python 3.3.
- (Contributed by Victor Stinner in :issue:`38896`.)
-* The ``tp_print`` slot of :ref:`PyTypeObject ` has been removed.
- It was used for printing objects to files in Python 2.7 and before. Since
- Python 3.0, it has been ignored and unused.
- (Contributed by Jeroen Demeyer in :issue:`36974`.)
-
-* On non-Windows platforms, the :c:func:`setenv` and :c:func:`unsetenv`
- functions are now required to build Python.
- (Contributed by Victor Stinner in :issue:`39395`.)
+Optimizations
+=============
-* The ``COUNT_ALLOCS`` special build macro has been removed.
- (Contributed by Victor Stinner in :issue:`39489`.)
+* Optimized the idiom for assignment a temporary variable in comprehensions.
+ Now ``for y in [expr]`` in comprehensions is as fast as a simple assignment
+ ``y = expr``. For example:
+
+ sums = [s for s in [0] for x in data for s in [s + x]]
+
+ Unlike the ``:=`` operator this idiom does not leak a variable to the
+ outer scope.
+
+ (Contributed by Serhiy Storchaka in :issue:`32856`.)
+
+* Optimize signal handling in multithreaded applications. If a thread different
+ than the main thread gets a signal, the bytecode evaluation loop is no longer
+ interrupted at each bytecode instruction to check for pending signals which
+ cannot be handled. Only the main thread of the main interpreter can handle
+ signals.
+
+ Previously, the bytecode evaluation loop was interrupted at each instruction
+ until the main thread handles signals.
+ (Contributed by Victor Stinner in :issue:`40010`.)
+
+* Optimize the :mod:`subprocess` module on FreeBSD using ``closefrom()``.
+ (Contributed by Ed Maste, Conrad Meyer, Kyle Evans, Kubilay Kocak and Victor
+ Stinner in :issue:`38061`.)
+
+Here's a summary of performance improvements from Python 3.4 through Python 3.9:
+
+.. code-block:: none
+
+ Python version 3.4 3.5 3.6 3.7 3.8 3.9
+ -------------- --- --- --- --- --- ---
+
+ Variable and attribute read access:
+ read_local 7.1 7.1 5.4 5.1 3.9 4.0
+ read_nonlocal 7.1 8.1 5.8 5.4 4.4 4.8
+ read_global 15.5 19.0 14.3 13.6 7.6 7.7
+ read_builtin 21.1 21.6 18.5 19.0 7.5 7.7
+ read_classvar_from_class 25.6 26.5 20.7 19.5 18.4 18.6
+ read_classvar_from_instance 22.8 23.5 18.8 17.1 16.4 20.1
+ read_instancevar 32.4 33.1 28.0 26.3 25.4 27.7
+ read_instancevar_slots 27.8 31.3 20.8 20.8 20.2 24.5
+ read_namedtuple 73.8 57.5 45.0 46.8 18.4 23.2
+ read_boundmethod 37.6 37.9 29.6 26.9 27.7 45.9
+
+ Variable and attribute write access:
+ write_local 8.7 9.3 5.5 5.3 4.3 4.2
+ write_nonlocal 10.5 11.1 5.6 5.5 4.7 4.9
+ write_global 19.7 21.2 18.0 18.0 15.8 17.2
+ write_classvar 92.9 96.0 104.6 102.1 39.2 43.2
+ write_instancevar 44.6 45.8 40.0 38.9 35.5 40.7
+ write_instancevar_slots 35.6 36.1 27.3 26.6 25.7 27.7
+
+ Data structure read access:
+ read_list 24.2 24.5 20.8 20.8 19.0 21.1
+ read_deque 24.7 25.5 20.2 20.6 19.8 21.6
+ read_dict 24.3 25.7 22.3 23.0 21.0 22.5
+ read_strdict 22.6 24.3 19.5 21.2 18.9 21.6
+
+ Data structure write access:
+ write_list 27.1 28.5 22.5 21.6 20.0 21.6
+ write_deque 28.7 30.1 22.7 21.8 23.5 23.2
+ write_dict 31.4 33.3 29.3 29.2 24.7 27.8
+ write_strdict 28.4 29.9 27.5 25.2 23.1 29.8
+
+ Stack (or queue) operations:
+ list_append_pop 93.4 112.7 75.4 74.2 50.8 53.9
+ deque_append_pop 43.5 57.0 49.4 49.2 42.5 45.5
+ deque_append_popleft 43.7 57.3 49.7 49.7 42.8 45.5
+
+ Timing loop:
+ loop_overhead 0.5 0.6 0.4 0.3 0.3 0.3
+
+These results were generated from the variable access benchmark script at:
+``Tools/scripts/var_access_benchmark.py``. The benchmark script displays timings
+in nanoseconds. The benchmarks were measured on an
+`Intel® Core™ i7-4960HQ processor
+`_
+running the macOS 64-bit builds found at
+`python.org `_.
Deprecated
==========
+* The distutils ``bdist_msi`` command is now deprecated, use
+ ``bdist_wheel`` (wheel packages) instead.
+ (Contributed by Hugo van Kemenade in :issue:`39586`.)
+
* Currently :func:`math.factorial` accepts :class:`float` instances with
non-negative integer values (like ``5.0``). It raises a :exc:`ValueError`
for non-integral and negative floats. It is now deprecated. In future
@@ -373,6 +696,12 @@ Deprecated
of Python. For the majority of use cases, users can leverage the Abstract Syntax
Tree (AST) generation and compilation stage, using the :mod:`ast` module.
+* Using :data:`NotImplemented` in a boolean context has been deprecated,
+ as it is almost exclusively the result of incorrect rich comparator
+ implementations. It will be made a :exc:`TypeError` in a future version
+ of Python.
+ (Contributed by Josh Rosenberg in :issue:`35712`.)
+
* The :mod:`random` module currently accepts any hashable type as a
possible seed value. Unfortunately, some of those types are not
guaranteed to have a deterministic hash value. After Python 3.9,
@@ -394,7 +723,7 @@ Deprecated
deprecated and will be removed in version 3.11.
(Contributed by Yury Selivanov and Kyle Stanley in :issue:`34790`.)
-* binhex4 and hexbin4 standards are now deprecated. The :`binhex` module
+* binhex4 and hexbin4 standards are now deprecated. The :mod:`binhex` module
and the following :mod:`binascii` functions are now deprecated:
* :func:`~binascii.b2a_hqx`, :func:`~binascii.a2b_hqx`
@@ -402,6 +731,38 @@ Deprecated
(Contributed by Victor Stinner in :issue:`39353`.)
+* :mod:`ast` classes ``slice``, ``Index`` and ``ExtSlice`` are considered deprecated
+ and will be removed in future Python versions. ``value`` itself should be
+ used instead of ``Index(value)``. ``Tuple(slices, Load())`` should be
+ used instead of ``ExtSlice(slices)``.
+ (Contributed by Serhiy Storchaka in :issue:`32892`.)
+
+* :mod:`ast` classes ``Suite``, ``Param``, ``AugLoad`` and ``AugStore``
+ are considered deprecated and will be removed in future Python versions.
+ They were not generated by the parser and not accepted by the code
+ generator in Python 3.
+ (Contributed by Batuhan Taskaya in :issue:`39639` and :issue:`39969`
+ and Serhiy Storchaka in :issue:`39988`.)
+
+* The :c:func:`PyEval_InitThreads` and :c:func:`PyEval_ThreadsInitialized`
+ functions are now deprecated and will be removed in Python 3.11. Calling
+ :c:func:`PyEval_InitThreads` now does nothing. The :term:`GIL` is initialized
+ by :c:func:`Py_Initialize()` since Python 3.7.
+ (Contributed by Victor Stinner in :issue:`39877`.)
+
+* Passing ``None`` as the first argument to the :func:`shlex.split` function
+ has been deprecated. (Contributed by Zackery Spytz in :issue:`33262`.)
+
+* The :mod:`lib2to3` module now emits a :exc:`PendingDeprecationWarning`.
+ Python 3.9 switched to a PEG parser (see :pep:`617`), and Python 3.10 may
+ include new language syntax that is not parsable by lib2to3's LL(1) parser.
+ The ``lib2to3`` module may be removed from the standard library in a future
+ Python version. Consider third-party alternatives such as `LibCST`_ or
+ `parso`_.
+ (Contributed by Carl Meyer in :issue:`40360`.)
+
+.. _LibCST: https://libcst.readthedocs.io/
+.. _parso: https://parso.readthedocs.io/
Removed
=======
@@ -420,11 +781,6 @@ Removed
since Python 3.2.
(Contributed by Victor Stinner in :issue:`38916`.)
-* The abstract base classes in :mod:`collections.abc` no longer are
- exposed in the regular :mod:`collections` module. This will help
- create a clearer distinction between the concrete classes and the abstract
- base classes.
-
* The undocumented ``sys.callstats()`` function has been removed. Since Python
3.7, it was deprecated and always returned :const:`None`. It required a special
build option ``CALL_PROFILE`` which was already removed in Python 3.7.
@@ -453,10 +809,17 @@ Removed
Use :meth:`~threading.Thread.is_alive()` instead.
(Contributed by Dong-hee Na in :issue:`37804`.)
-* Methods ``getchildren()`` and ``getiterator()`` in the
- :mod:`~xml.etree.ElementTree` module have been removed. They were
- deprecated in Python 3.2. Use functions :func:`list` and :func:`iter`
- instead. The ``xml.etree.cElementTree`` module has been removed.
+* Methods ``getchildren()`` and ``getiterator()`` of classes
+ :class:`~xml.etree.ElementTree.ElementTree` and
+ :class:`~xml.etree.ElementTree.Element` in the :mod:`~xml.etree.ElementTree`
+ module have been removed. They were deprecated in Python 3.2.
+ Use ``iter(x)`` or ``list(x)`` instead of ``x.getchildren()`` and
+ ``x.iter()`` or ``list(x.iter())`` instead of ``x.getiterator()``.
+ The ``xml.etree.cElementTree`` module has been removed,
+ use the :mod:`xml.etree.ElementTree` module instead.
+ Since Python 3.3 the ``xml.etree.cElementTree`` module has been deprecated,
+ the ``xml.etree.ElementTree`` module uses a fast implementation whenever
+ available.
(Contributed by Serhiy Storchaka in :issue:`36543`.)
* The old :mod:`plistlib` API has been removed, it was deprecated since Python
@@ -465,9 +828,6 @@ Removed
removed, standard :class:`bytes` objects are always used instead.
(Contributed by Jon Janzen in :issue:`36409`.)
-* The C function ``PyThreadState_DeleteCurrent()`` has been removed. It was not documented.
- (Contributed by Joannah Nanjekye in :issue:`37878`.)
-
* The C function ``PyGen_NeedsFinalizing`` has been removed. It was not
documented, tested, or used anywhere within CPython after the implementation
of :pep:`442`. Patch by Joannah Nanjekye.
@@ -503,6 +863,15 @@ Removed
defining ``COUNT_ALLOCS`` macro.
(Contributed by Victor Stinner in :issue:`39489`.)
+* The ``_field_types`` attribute of the :class:`typing.NamedTuple` class
+ has been removed. It was deprecated since Python 3.8. Use
+ the ``__annotations__`` attribute instead.
+ (Contributed by Serhiy Storchaka in :issue:`40182`.)
+
+* The :meth:`symtable.SymbolTable.has_exec` method has been removed. It was
+ deprecated since 2006, and only returning ``False`` when it's called.
+ (Contributed by Batuhan Taskaya in :issue:`40208`)
+
Porting to Python 3.9
=====================
@@ -514,14 +883,6 @@ that may require changes to your code.
Changes in the Python API
-------------------------
-* :func:`open`, :func:`io.open`, :func:`codecs.open` and
- :class:`fileinput.FileInput` no longer accept ``'U'`` ("universal newline")
- in the file mode. This flag was deprecated since Python 3.3. In Python 3, the
- "universal newline" is used by default when a file is open in text mode. The
- :ref:`newline parameter ` of :func:`open` controls
- how universal newlines works.
- (Contributed by Victor Stinner in :issue:`37330`.)
-
* :func:`__import__` and :func:`importlib.util.resolve_name` now raise
:exc:`ImportError` where it previously raised :exc:`ValueError`. Callers
catching the specific exception type and supporting both Python 3.9 and
@@ -538,6 +899,84 @@ Changes in the Python API
since the *buffering* parameter has been removed.
(Contributed by Victor Stinner in :issue:`39357`.)
+* Simplified AST for subscription. Simple indices will be represented by
+ their value, extended slices will be represented as tuples.
+ ``Index(value)`` will return a ``value`` itself, ``ExtSlice(slices)``
+ will return ``Tuple(slices, Load())``.
+ (Contributed by Serhiy Storchaka in :issue:`34822`.)
+
+* The :mod:`importlib` module now ignores the :envvar:`PYTHONCASEOK`
+ environment variable when the :option:`-E` or :option:`-I` command line
+ options are being used.
+
+* The *encoding* parameter has been added to the classes :class:`ftplib.FTP` and
+ :class:`ftplib.FTP_TLS` as a keyword-only parameter, and the default encoding
+ is changed from Latin-1 to UTF-8 to follow :rfc:`2640`.
+
+* :meth:`asyncio.loop.shutdown_default_executor` has been added to
+ :class:`~asyncio.AbstractEventLoop`, meaning alternative event loops that
+ inherit from it should have this method defined.
+ (Contributed by Kyle Stanley in :issue:`34037`.)
+
+* The constant values of future flags in the :mod:`__future__` module
+ is updated in order to prevent collision with compiler flags. Previously
+ ``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``.
+ (Contributed by Batuhan Taskaya in :issue:`39562`)
+
+* ``array('u')`` now uses ``wchar_t`` as C type instead of ``Py_UNICODE``.
+ This change doesn't affect to its behavior because ``Py_UNICODE`` is alias
+ of ``wchar_t`` since Python 3.3.
+ (Contributed by Inada Naoki in :issue:`34538`.)
+
+
+Changes in the C API
+--------------------
+
+* Instances of heap-allocated types (such as those created with
+ :c:func:`PyType_FromSpec` and similar APIs) hold a reference to their type
+ object since Python 3.8. As indicated in the "Changes in the C API" of Python
+ 3.8, for the vast majority of cases, there should be no side effect but for
+ types that have a custom :c:member:`~PyTypeObject.tp_traverse` function,
+ ensure that all custom ``tp_traverse`` functions of heap-allocated types
+ visit the object's type.
+
+ Example:
+
+ .. code-block:: c
+
+ int
+ foo_traverse(foo_struct *self, visitproc visit, void *arg) {
+ // Rest of the traverse function
+ #if PY_VERSION_HEX >= 0x03090000
+ // This was not needed before Python 3.9 (Python issue 35810 and 40217)
+ Py_VISIT(Py_TYPE(self));
+ #endif
+ }
+
+ If your traverse function delegates to ``tp_traverse`` of its base class
+ (or another type), ensure that ``Py_TYPE(self)`` is visited only once.
+ Note that only heap types are expected to visit the type in ``tp_traverse``.
+
+ For example, if your ``tp_traverse`` function includes:
+
+ .. code-block:: c
+
+ base->tp_traverse(self, visit, arg)
+
+ then add:
+
+ .. code-block:: c
+
+ #if PY_VERSION_HEX >= 0x03090000
+ // This was not needed before Python 3.9 (Python issue 35810 and 40217)
+ if (base->tp_flags & Py_TPFLAGS_HEAPTYPE) {
+ // a heap type's tp_traverse already visited Py_TYPE(self)
+ } else {
+ Py_VISIT(Py_TYPE(self));
+ }
+ #else
+
+ (See :issue:`35810` and :issue:`40217` for more information.)
CPython bytecode changes
------------------------
@@ -546,3 +985,173 @@ CPython bytecode changes
:keyword:`assert` statement. Previously, the assert statement would not work
correctly if the :exc:`AssertionError` exception was being shadowed.
(Contributed by Zackery Spytz in :issue:`34880`.)
+
+
+Build Changes
+=============
+
+* Add ``--with-platlibdir`` option to the ``configure`` script: name of the
+ platform-specific library directory, stored in the new :attr:`sys.platlibdir`
+ attribute. See :attr:`sys.platlibdir` attribute for more information.
+ (Contributed by Jan Matějek, Matěj Cepl, Charalampos Stratakis
+ and Victor Stinner in :issue:`1294959`.)
+
+* The ``COUNT_ALLOCS`` special build macro has been removed.
+ (Contributed by Victor Stinner in :issue:`39489`.)
+
+* On non-Windows platforms, the :c:func:`setenv` and :c:func:`unsetenv`
+ functions are now required to build Python.
+ (Contributed by Victor Stinner in :issue:`39395`.)
+
+
+C API Changes
+=============
+
+New Features
+------------
+
+* Add :c:func:`PyFrame_GetCode` function: get a frame code.
+ Add :c:func:`PyFrame_GetBack` function: get the frame next outer frame.
+ (Contributed by Victor Stinner in :issue:`40421`.)
+
+* Add :c:func:`PyFrame_GetLineNumber` to the limited C API.
+ (Contributed by Victor Stinner in :issue:`40421`.)
+
+* Add :c:func:`PyThreadState_GetInterpreter` and
+ :c:func:`PyInterpreterState_Get` functions to get the interpreter.
+ Add :c:func:`PyThreadState_GetFrame` function to get the current frame of a
+ Python thread state.
+ Add :c:func:`PyThreadState_GetID` function: get the unique identifier of a
+ Python thread state.
+ (Contributed by Victor Stinner in :issue:`39947`.)
+
+* Add a new public :c:func:`PyObject_CallNoArgs` function to the C API, which
+ calls a callable Python object without any arguments. It is the most efficient
+ way to call a callable Python object without any argument.
+ (Contributed by Victor Stinner in :issue:`37194`.)
+
+* Changes in the limited C API (if ``Py_LIMITED_API`` macro is defined):
+
+ * Provide :c:func:`Py_EnterRecursiveCall` and :c:func:`Py_LeaveRecursiveCall`
+ as regular functions for the limited API. Previously, there were defined as
+ macros, but these macros didn't compile with the limited C API which cannot
+ access ``PyThreadState.recursion_depth`` field (the structure is opaque in
+ the limited C API).
+
+ * ``PyObject_INIT()`` and ``PyObject_INIT_VAR()`` become regular "opaque"
+ function to hide implementation details.
+
+ (Contributed by Victor Stinner in :issue:`38644` and :issue:`39542`.)
+
+* The :c:func:`PyModule_AddType` function is added to help adding a type
+ to a module.
+ (Contributed by Dong-hee Na in :issue:`40024`.)
+
+* Add the functions :c:func:`PyObject_GC_IsTracked` and
+ :c:func:`PyObject_GC_IsFinalized` to the public API to allow to query if
+ Python objects are being currently tracked or have been already finalized by
+ the garbage collector respectively. (Contributed by Pablo Galindo in
+ :issue:`40241`.)
+
+
+Porting to Python 3.9
+---------------------
+
+* ``PyInterpreterState.eval_frame`` (:pep:`523`) now requires a new mandatory
+ *tstate* parameter (``PyThreadState*``).
+ (Contributed by Victor Stinner in :issue:`38500`.)
+
+* Extension modules: :c:member:`~PyModuleDef.m_traverse`,
+ :c:member:`~PyModuleDef.m_clear` and :c:member:`~PyModuleDef.m_free`
+ functions of :c:type:`PyModuleDef` are no longer called if the module state
+ was requested but is not allocated yet. This is the case immediately after
+ the module is created and before the module is executed
+ (:c:data:`Py_mod_exec` function). More precisely, these functions are not called
+ if :c:member:`~PyModuleDef.m_size` is greater than 0 and the module state (as
+ returned by :c:func:`PyModule_GetState`) is ``NULL``.
+
+ Extension modules without module state (``m_size <= 0``) are not affected.
+
+* If :c:func:`Py_AddPendingCall` is called in a subinterpreter, the function is
+ now scheduled to be called from the subinterpreter, rather than being called
+ from the main interpreter. Each subinterpreter now has its own list of
+ scheduled calls.
+ (Contributed by Victor Stinner in :issue:`39984`.)
+
+* The Windows registry is no longer used to initialize :data:`sys.path` when
+ the ``-E`` option is used (if :c:member:`PyConfig.use_environment` is set to
+ ``0``). This is significant when embedding Python on Windows.
+ (Contributed by Zackery Spytz in :issue:`8901`.)
+
+* The global variable :c:data:`PyStructSequence_UnnamedField` is now a constant
+ and refers to a constant string.
+ (Contributed by Serhiy Storchaka in :issue:`38650`.)
+
+
+Removed
+-------
+
+* Exclude ``PyFPE_START_PROTECT()`` and ``PyFPE_END_PROTECT()`` macros of
+ ``pyfpe.h`` from the limited C API.
+ (Contributed by Victor Stinner in :issue:`38835`.)
+
+* The ``tp_print`` slot of :ref:`PyTypeObject ` has been removed.
+ It was used for printing objects to files in Python 2.7 and before. Since
+ Python 3.0, it has been ignored and unused.
+ (Contributed by Jeroen Demeyer in :issue:`36974`.)
+
+* Changes in the limited C API (if ``Py_LIMITED_API`` macro is defined):
+
+ * Exclude the following functions from the limited C API:
+
+ * ``PyThreadState_DeleteCurrent()``
+ (Contributed by Joannah Nanjekye in :issue:`37878`.)
+ * ``_Py_CheckRecursionLimit``
+ * ``_Py_NewReference()``
+ * ``_Py_ForgetReference()``
+ * ``_PyTraceMalloc_NewReference()``
+ * ``_Py_GetRefTotal()``
+ * The trashcan mechanism which never worked in the limited C API.
+ * ``PyTrash_UNWIND_LEVEL``
+ * ``Py_TRASHCAN_BEGIN_CONDITION``
+ * ``Py_TRASHCAN_BEGIN``
+ * ``Py_TRASHCAN_END``
+ * ``Py_TRASHCAN_SAFE_BEGIN``
+ * ``Py_TRASHCAN_SAFE_END``
+
+ * Move following functions and definitions to the internal C API:
+
+ * ``_PyDebug_PrintTotalRefs()``
+ * ``_Py_PrintReferences()``
+ * ``_Py_PrintReferenceAddresses()``
+ * ``_Py_tracemalloc_config``
+ * ``_Py_AddToAllObjects()`` (specific to ``Py_TRACE_REFS`` build)
+
+ (Contributed by Victor Stinner in :issue:`38644` and :issue:`39542`.)
+
+* Remove ``_PyRuntime.getframe`` hook and remove ``_PyThreadState_GetFrame``
+ macro which was an alias to ``_PyRuntime.getframe``. They were only exposed
+ by the internal C API. Remove also ``PyThreadFrameGetter`` type.
+ (Contributed by Victor Stinner in :issue:`39946`.)
+
+* Remove the following functions from the C API. Call :c:func:`PyGC_Collect`
+ explicitly to clear all free lists.
+ (Contributed by Inada Naoki and Victor Stinner in :issue:`37340`,
+ :issue:`38896` and :issue:`40428`.)
+
+ * ``PyAsyncGen_ClearFreeLists()``
+ * ``PyContext_ClearFreeList()``
+ * ``PyDict_ClearFreeList()``
+ * ``PyFloat_ClearFreeList()``
+ * ``PyFrame_ClearFreeList()``
+ * ``PyList_ClearFreeList()``
+ * ``PyMethod_ClearFreeList()`` and ``PyCFunction_ClearFreeList()``:
+ the free lists of bound method objects have been removed.
+ * ``PySet_ClearFreeList()``: the set free list has been removed
+ in Python 3.4.
+ * ``PyTuple_ClearFreeList()``
+ * ``PyUnicode_ClearFreeList()``: the Unicode free list has been removed in
+ Python 3.3.
+
+* Remove ``_PyUnicode_ClearStaticStrings()`` function.
+ (Contributed by Victor Stinner in :issue:`39465`.)
diff --git a/Doc/whatsnew/index.rst b/Doc/whatsnew/index.rst
index 954e38bc6f1e3d..a1ff8ec2889c9d 100644
--- a/Doc/whatsnew/index.rst
+++ b/Doc/whatsnew/index.rst
@@ -11,6 +11,7 @@ anyone wishing to stay up-to-date after a new release.
.. toctree::
:maxdepth: 2
+ 3.10.rst
3.9.rst
3.8.rst
3.7.rst
diff --git a/Grammar/Grammar b/Grammar/Grammar
index 21f7e1a89115b2..170518af74e5a7 100644
--- a/Grammar/Grammar
+++ b/Grammar/Grammar
@@ -14,7 +14,7 @@ single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
file_input: (NEWLINE | stmt)* ENDMARKER
eval_input: testlist NEWLINE* ENDMARKER
-decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
+decorator: '@' namedexpr_test NEWLINE
decorators: decorator+
decorated: decorators (classdef | funcdef | async_funcdef)
diff --git a/Grammar/python.gram b/Grammar/python.gram
new file mode 100644
index 00000000000000..19d9bb36fed5f3
--- /dev/null
+++ b/Grammar/python.gram
@@ -0,0 +1,684 @@
+# Simplified grammar for Python
+
+@bytecode True
+@trailer '''
+void *
+_PyPegen_parse(Parser *p)
+{
+ // Initialize keywords
+ p->keywords = reserved_keywords;
+ p->n_keyword_lists = n_keyword_lists;
+
+ // Run parser
+ void *result = NULL;
+ if (p->start_rule == Py_file_input) {
+ result = file_rule(p);
+ } else if (p->start_rule == Py_single_input) {
+ result = interactive_rule(p);
+ } else if (p->start_rule == Py_eval_input) {
+ result = eval_rule(p);
+ } else if (p->start_rule == Py_func_type_input) {
+ result = func_type_rule(p);
+ } else if (p->start_rule == Py_fstring_input) {
+ result = fstring_rule(p);
+ }
+
+ return result;
+}
+
+// The end
+'''
+file[mod_ty]: a=[statements] ENDMARKER { _PyPegen_make_module(p, a) }
+interactive[mod_ty]: a=statement_newline { Interactive(a, p->arena) }
+eval[mod_ty]: a=expressions NEWLINE* ENDMARKER { Expression(a, p->arena) }
+func_type[mod_ty]: '(' a=[type_expressions] ')' '->' b=expression NEWLINE* ENDMARKER { FunctionType(a, b, p->arena) }
+fstring[expr_ty]: star_expressions
+
+# type_expressions allow */** but ignore them
+type_expressions[asdl_seq*]:
+ | a=','.expression+ ',' '*' b=expression ',' '**' c=expression {
+ _PyPegen_seq_append_to_end(p, CHECK(_PyPegen_seq_append_to_end(p, a, b)), c) }
+ | a=','.expression+ ',' '*' b=expression { _PyPegen_seq_append_to_end(p, a, b) }
+ | a=','.expression+ ',' '**' b=expression { _PyPegen_seq_append_to_end(p, a, b) }
+ | '*' a=expression ',' '**' b=expression {
+ _PyPegen_seq_append_to_end(p, CHECK(_PyPegen_singleton_seq(p, a)), b) }
+ | '*' a=expression { _PyPegen_singleton_seq(p, a) }
+ | '**' a=expression { _PyPegen_singleton_seq(p, a) }
+ | ','.expression+
+
+statements[asdl_seq*]: a=statement+ { _PyPegen_seq_flatten(p, a) }
+statement[asdl_seq*]: a=compound_stmt { _PyPegen_singleton_seq(p, a) } | simple_stmt
+statement_newline[asdl_seq*]:
+ | a=compound_stmt NEWLINE { _PyPegen_singleton_seq(p, a) }
+ | simple_stmt
+ | NEWLINE { _PyPegen_singleton_seq(p, CHECK(_Py_Pass(EXTRA))) }
+ | ENDMARKER { _PyPegen_interactive_exit(p) }
+simple_stmt[asdl_seq*]:
+ | a=small_stmt !';' NEWLINE { _PyPegen_singleton_seq(p, a) } # Not needed, there for speedup
+ | a=';'.small_stmt+ [';'] NEWLINE { a }
+# NOTE: assignment MUST precede expression, else parsing a simple assignment
+# will throw a SyntaxError.
+small_stmt[stmt_ty] (memo):
+ | assignment
+ | e=star_expressions { _Py_Expr(e, EXTRA) }
+ | &'return' return_stmt
+ | &('import' | 'from') import_stmt
+ | &'raise' raise_stmt
+ | 'pass' { _Py_Pass(EXTRA) }
+ | &'del' del_stmt
+ | &'yield' yield_stmt
+ | &'assert' assert_stmt
+ | 'break' { _Py_Break(EXTRA) }
+ | 'continue' { _Py_Continue(EXTRA) }
+ | &'global' global_stmt
+ | &'nonlocal' nonlocal_stmt
+compound_stmt[stmt_ty]:
+ | &('def' | '@' | ASYNC) function_def
+ | &'if' if_stmt
+ | &('class' | '@') class_def
+ | &('with' | ASYNC) with_stmt
+ | &('for' | ASYNC) for_stmt
+ | &'try' try_stmt
+ | &'while' while_stmt
+
+# NOTE: annotated_rhs may start with 'yield'; yield_expr must start with 'yield'
+assignment[stmt_ty]:
+ | a=NAME ':' b=expression c=['=' d=annotated_rhs { d }] {
+ CHECK_VERSION(
+ 6,
+ "Variable annotation syntax is",
+ _Py_AnnAssign(CHECK(_PyPegen_set_expr_context(p, a, Store)), b, c, 1, EXTRA)
+ ) }
+ | a=('(' b=single_target ')' { b }
+ | single_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] {
+ CHECK_VERSION(6, "Variable annotations syntax is", _Py_AnnAssign(a, b, c, 0, EXTRA)) }
+ | a=(z=star_targets '=' { z })+ b=(yield_expr | star_expressions) tc=[TYPE_COMMENT] {
+ _Py_Assign(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }
+ | a=single_target b=augassign c=(yield_expr | star_expressions) {
+ _Py_AugAssign(a, b->kind, c, EXTRA) }
+ | invalid_assignment
+
+augassign[AugOperator*]:
+ | '+=' { _PyPegen_augoperator(p, Add) }
+ | '-=' { _PyPegen_augoperator(p, Sub) }
+ | '*=' { _PyPegen_augoperator(p, Mult) }
+ | '@=' { CHECK_VERSION(5, "The '@' operator is", _PyPegen_augoperator(p, MatMult)) }
+ | '/=' { _PyPegen_augoperator(p, Div) }
+ | '%=' { _PyPegen_augoperator(p, Mod) }
+ | '&=' { _PyPegen_augoperator(p, BitAnd) }
+ | '|=' { _PyPegen_augoperator(p, BitOr) }
+ | '^=' { _PyPegen_augoperator(p, BitXor) }
+ | '<<=' { _PyPegen_augoperator(p, LShift) }
+ | '>>=' { _PyPegen_augoperator(p, RShift) }
+ | '**=' { _PyPegen_augoperator(p, Pow) }
+ | '//=' { _PyPegen_augoperator(p, FloorDiv) }
+
+global_stmt[stmt_ty]: 'global' a=','.NAME+ {
+ _Py_Global(CHECK(_PyPegen_map_names_to_ids(p, a)), EXTRA) }
+nonlocal_stmt[stmt_ty]: 'nonlocal' a=','.NAME+ {
+ _Py_Nonlocal(CHECK(_PyPegen_map_names_to_ids(p, a)), EXTRA) }
+
+yield_stmt[stmt_ty]: y=yield_expr { _Py_Expr(y, EXTRA) }
+
+assert_stmt[stmt_ty]: 'assert' a=expression b=[',' z=expression { z }] { _Py_Assert(a, b, EXTRA) }
+
+del_stmt[stmt_ty]: 'del' a=del_targets { _Py_Delete(a, EXTRA) }
+
+import_stmt[stmt_ty]: import_name | import_from
+import_name[stmt_ty]: 'import' a=dotted_as_names { _Py_Import(a, EXTRA) }
+# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS
+import_from[stmt_ty]:
+ | 'from' a=('.' | '...')* b=dotted_name 'import' c=import_from_targets {
+ _Py_ImportFrom(b->v.Name.id, c, _PyPegen_seq_count_dots(a), EXTRA) }
+ | 'from' a=('.' | '...')+ 'import' b=import_from_targets {
+ _Py_ImportFrom(NULL, b, _PyPegen_seq_count_dots(a), EXTRA) }
+import_from_targets[asdl_seq*]:
+ | '(' a=import_from_as_names [','] ')' { a }
+ | import_from_as_names !','
+ | '*' { _PyPegen_singleton_seq(p, CHECK(_PyPegen_alias_for_star(p))) }
+ | invalid_import_from_targets
+import_from_as_names[asdl_seq*]:
+ | a=','.import_from_as_name+ { a }
+import_from_as_name[alias_ty]:
+ | a=NAME b=['as' z=NAME { z }] { _Py_alias(a->v.Name.id,
+ (b) ? ((expr_ty) b)->v.Name.id : NULL,
+ p->arena) }
+dotted_as_names[asdl_seq*]:
+ | a=','.dotted_as_name+ { a }
+dotted_as_name[alias_ty]:
+ | a=dotted_name b=['as' z=NAME { z }] { _Py_alias(a->v.Name.id,
+ (b) ? ((expr_ty) b)->v.Name.id : NULL,
+ p->arena) }
+dotted_name[expr_ty]:
+ | a=dotted_name '.' b=NAME { _PyPegen_join_names_with_dot(p, a, b) }
+ | NAME
+
+if_stmt[stmt_ty]:
+ | 'if' a=named_expression ':' b=block c=elif_stmt { _Py_If(a, b, CHECK(_PyPegen_singleton_seq(p, c)), EXTRA) }
+ | 'if' a=named_expression ':' b=block c=[else_block] { _Py_If(a, b, c, EXTRA) }
+elif_stmt[stmt_ty]:
+ | 'elif' a=named_expression ':' b=block c=elif_stmt { _Py_If(a, b, CHECK(_PyPegen_singleton_seq(p, c)), EXTRA) }
+ | 'elif' a=named_expression ':' b=block c=[else_block] { _Py_If(a, b, c, EXTRA) }
+else_block[asdl_seq*]: 'else' ':' b=block { b }
+
+while_stmt[stmt_ty]:
+ | 'while' a=named_expression ':' b=block c=[else_block] { _Py_While(a, b, c, EXTRA) }
+
+for_stmt[stmt_ty]:
+ | 'for' t=star_targets 'in' ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] {
+ _Py_For(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA) }
+ | ASYNC 'for' t=star_targets 'in' ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] {
+ CHECK_VERSION(5, "Async for loops are", _Py_AsyncFor(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA)) }
+
+with_stmt[stmt_ty]:
+ | 'with' '(' a=','.with_item+ ','? ')' ':' b=block {
+ _Py_With(a, b, NULL, EXTRA) }
+ | 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
+ _Py_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) }
+ | ASYNC 'with' '(' a=','.with_item+ ','? ')' ':' b=block {
+ CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NULL, EXTRA)) }
+ | ASYNC 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block {
+ CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA)) }
+with_item[withitem_ty]:
+ | e=expression o=['as' t=target { t }] { _Py_withitem(e, o, p->arena) }
+
+try_stmt[stmt_ty]:
+ | 'try' ':' b=block f=finally_block { _Py_Try(b, NULL, NULL, f, EXTRA) }
+ | 'try' ':' b=block ex=except_block+ el=[else_block] f=[finally_block] { _Py_Try(b, ex, el, f, EXTRA) }
+except_block[excepthandler_ty]:
+ | 'except' e=expression t=['as' z=NAME { z }] ':' b=block {
+ _Py_ExceptHandler(e, (t) ? ((expr_ty) t)->v.Name.id : NULL, b, EXTRA) }
+ | 'except' ':' b=block { _Py_ExceptHandler(NULL, NULL, b, EXTRA) }
+finally_block[asdl_seq*]: 'finally' ':' a=block { a }
+
+return_stmt[stmt_ty]:
+ | 'return' a=[star_expressions] { _Py_Return(a, EXTRA) }
+
+raise_stmt[stmt_ty]:
+ | 'raise' a=expression b=['from' z=expression { z }] { _Py_Raise(a, b, EXTRA) }
+ | 'raise' { _Py_Raise(NULL, NULL, EXTRA) }
+
+function_def[stmt_ty]:
+ | d=decorators f=function_def_raw { _PyPegen_function_def_decorators(p, d, f) }
+ | function_def_raw
+
+function_def_raw[stmt_ty]:
+ | 'def' n=NAME '(' params=[params] ')' a=['->' z=expression { z }] ':' tc=[func_type_comment] b=block {
+ _Py_FunctionDef(n->v.Name.id,
+ (params) ? params : CHECK(_PyPegen_empty_arguments(p)),
+ b, NULL, a, NEW_TYPE_COMMENT(p, tc), EXTRA) }
+ | ASYNC 'def' n=NAME '(' params=[params] ')' a=['->' z=expression { z }] ':' tc=[func_type_comment] b=block {
+ CHECK_VERSION(
+ 5,
+ "Async functions are",
+ _Py_AsyncFunctionDef(n->v.Name.id,
+ (params) ? params : CHECK(_PyPegen_empty_arguments(p)),
+ b, NULL, a, NEW_TYPE_COMMENT(p, tc), EXTRA)
+ ) }
+func_type_comment[Token*]:
+ | NEWLINE t=TYPE_COMMENT &(NEWLINE INDENT) { t } # Must be followed by indented block
+ | invalid_double_type_comments
+ | TYPE_COMMENT
+
+params[arguments_ty]:
+ | invalid_parameters
+ | parameters
+
+parameters[arguments_ty]:
+ | a=slash_no_default b=param_no_default* c=param_with_default* d=[star_etc] {
+ _PyPegen_make_arguments(p, a, NULL, b, c, d) }
+ | a=slash_with_default b=param_with_default* c=[star_etc] {
+ _PyPegen_make_arguments(p, NULL, a, NULL, b, c) }
+ | a=param_no_default+ b=param_with_default* c=[star_etc] {
+ _PyPegen_make_arguments(p, NULL, NULL, a, b, c) }
+ | a=param_with_default+ b=[star_etc] { _PyPegen_make_arguments(p, NULL, NULL, NULL, a, b)}
+ | a=star_etc { _PyPegen_make_arguments(p, NULL, NULL, NULL, NULL, a) }
+
+# Some duplication here because we can't write (',' | &')'),
+# which is because we don't support empty alternatives (yet).
+#
+slash_no_default[asdl_seq*]:
+ | a=param_no_default+ '/' ',' { a }
+ | a=param_no_default+ '/' &')' { a }
+slash_with_default[SlashWithDefault*]:
+ | a=param_no_default* b=param_with_default+ '/' ',' { _PyPegen_slash_with_default(p, a, b) }
+ | a=param_no_default* b=param_with_default+ '/' &')' { _PyPegen_slash_with_default(p, a, b) }
+
+star_etc[StarEtc*]:
+ | '*' a=param_no_default b=param_maybe_default* c=[kwds] {
+ _PyPegen_star_etc(p, a, b, c) }
+ | '*' ',' b=param_maybe_default+ c=[kwds] {
+ _PyPegen_star_etc(p, NULL, b, c) }
+ | a=kwds { _PyPegen_star_etc(p, NULL, NULL, a) }
+ | invalid_star_etc
+
+kwds[arg_ty]: '**' a=param_no_default { a }
+
+# One parameter. This *includes* a following comma and type comment.
+#
+# There are three styles:
+# - No default
+# - With default
+# - Maybe with default
+#
+# There are two alternative forms of each, to deal with type comments:
+# - Ends in a comma followed by an optional type comment
+# - No comma, optional type comment, must be followed by close paren
+# The latter form is for a final parameter without trailing comma.
+#
+param_no_default[arg_ty]:
+ | a=param ',' tc=TYPE_COMMENT? { _PyPegen_add_type_comment_to_arg(p, a, tc) }
+ | a=param tc=TYPE_COMMENT? &')' { _PyPegen_add_type_comment_to_arg(p, a, tc) }
+param_with_default[NameDefaultPair*]:
+ | a=param c=default ',' tc=TYPE_COMMENT? { _PyPegen_name_default_pair(p, a, c, tc) }
+ | a=param c=default tc=TYPE_COMMENT? &')' { _PyPegen_name_default_pair(p, a, c, tc) }
+param_maybe_default[NameDefaultPair*]:
+ | a=param c=default? ',' tc=TYPE_COMMENT? { _PyPegen_name_default_pair(p, a, c, tc) }
+ | a=param c=default? tc=TYPE_COMMENT? &')' { _PyPegen_name_default_pair(p, a, c, tc) }
+param[arg_ty]: a=NAME b=annotation? { _Py_arg(a->v.Name.id, b, NULL, EXTRA) }
+
+annotation[expr_ty]: ':' a=expression { a }
+default[expr_ty]: '=' a=expression { a }
+
+decorators[asdl_seq*]: a=('@' f=named_expression NEWLINE { f })+ { a }
+
+class_def[stmt_ty]:
+ | a=decorators b=class_def_raw { _PyPegen_class_def_decorators(p, a, b) }
+ | class_def_raw
+class_def_raw[stmt_ty]:
+ | 'class' a=NAME b=['(' z=[arguments] ')' { z }] ':' c=block {
+ _Py_ClassDef(a->v.Name.id,
+ (b) ? ((expr_ty) b)->v.Call.args : NULL,
+ (b) ? ((expr_ty) b)->v.Call.keywords : NULL,
+ c, NULL, EXTRA) }
+
+block[asdl_seq*] (memo):
+ | NEWLINE INDENT a=statements DEDENT { a }
+ | simple_stmt
+ | invalid_block
+
+expressions_list[asdl_seq*]: a=','.star_expression+ [','] { a }
+star_expressions[expr_ty]:
+ | a=star_expression b=(',' c=star_expression { c })+ [','] {
+ _Py_Tuple(CHECK(_PyPegen_seq_insert_in_front(p, a, b)), Load, EXTRA) }
+ | a=star_expression ',' { _Py_Tuple(CHECK(_PyPegen_singleton_seq(p, a)), Load, EXTRA) }
+ | star_expression
+star_expression[expr_ty] (memo):
+ | '*' a=bitwise_or { _Py_Starred(a, Load, EXTRA) }
+ | expression
+
+star_named_expressions[asdl_seq*]: a=','.star_named_expression+ [','] { a }
+star_named_expression[expr_ty]:
+ | '*' a=bitwise_or { _Py_Starred(a, Load, EXTRA) }
+ | named_expression
+named_expression[expr_ty]:
+ | a=NAME ':=' b=expression { _Py_NamedExpr(CHECK(_PyPegen_set_expr_context(p, a, Store)), b, EXTRA) }
+ | expression !':='
+ | invalid_named_expression
+
+annotated_rhs[expr_ty]: yield_expr | star_expressions
+
+expressions[expr_ty]:
+ | a=expression b=(',' c=expression { c })+ [','] {
+ _Py_Tuple(CHECK(_PyPegen_seq_insert_in_front(p, a, b)), Load, EXTRA) }
+ | a=expression ',' { _Py_Tuple(CHECK(_PyPegen_singleton_seq(p, a)), Load, EXTRA) }
+ | expression
+expression[expr_ty] (memo):
+ | a=disjunction 'if' b=disjunction 'else' c=expression { _Py_IfExp(b, a, c, EXTRA) }
+ | disjunction
+ | lambdef
+
+lambdef[expr_ty]:
+ | 'lambda' a=[lambda_parameters] ':' b=expression { _Py_Lambda((a) ? a : CHECK(_PyPegen_empty_arguments(p)), b, EXTRA) }
+
+# lambda_parameters etc. duplicates parameters but without annotations
+# or type comments, and if there's no comma after a parameter, we expect
+# a colon, not a close parenthesis. (For more, see parameters above.)
+#
+lambda_parameters[arguments_ty]:
+ | a=lambda_slash_no_default b=lambda_param_no_default* c=lambda_param_with_default* d=[lambda_star_etc] {
+ _PyPegen_make_arguments(p, a, NULL, b, c, d) }
+ | a=lambda_slash_with_default b=lambda_param_with_default* c=[lambda_star_etc] {
+ _PyPegen_make_arguments(p, NULL, a, NULL, b, c) }
+ | a=lambda_param_no_default+ b=lambda_param_with_default* c=[lambda_star_etc] {
+ _PyPegen_make_arguments(p, NULL, NULL, a, b, c) }
+ | a=lambda_param_with_default+ b=[lambda_star_etc] { _PyPegen_make_arguments(p, NULL, NULL, NULL, a, b)}
+ | a=lambda_star_etc { _PyPegen_make_arguments(p, NULL, NULL, NULL, NULL, a) }
+
+lambda_slash_no_default[asdl_seq*]:
+ | a=lambda_param_no_default+ '/' ',' { a }
+ | a=lambda_param_no_default+ '/' &':' { a }
+lambda_slash_with_default[SlashWithDefault*]:
+ | a=lambda_param_no_default* b=lambda_param_with_default+ '/' ',' { _PyPegen_slash_with_default(p, a, b) }
+ | a=lambda_param_no_default* b=lambda_param_with_default+ '/' &':' { _PyPegen_slash_with_default(p, a, b) }
+
+lambda_star_etc[StarEtc*]:
+ | '*' a=lambda_param_no_default b=lambda_param_maybe_default* c=[lambda_kwds] {
+ _PyPegen_star_etc(p, a, b, c) }
+ | '*' ',' b=lambda_param_maybe_default+ c=[lambda_kwds] {
+ _PyPegen_star_etc(p, NULL, b, c) }
+ | a=lambda_kwds { _PyPegen_star_etc(p, NULL, NULL, a) }
+ | invalid_lambda_star_etc
+
+lambda_kwds[arg_ty]: '**' a=lambda_param_no_default { a }
+
+lambda_param_no_default[arg_ty]:
+ | a=lambda_param ',' { a }
+ | a=lambda_param &':' { a }
+lambda_param_with_default[NameDefaultPair*]:
+ | a=lambda_param c=default ',' { _PyPegen_name_default_pair(p, a, c, NULL) }
+ | a=lambda_param c=default &':' { _PyPegen_name_default_pair(p, a, c, NULL) }
+lambda_param_maybe_default[NameDefaultPair*]:
+ | a=lambda_param c=default? ',' { _PyPegen_name_default_pair(p, a, c, NULL) }
+ | a=lambda_param c=default? &':' { _PyPegen_name_default_pair(p, a, c, NULL) }
+lambda_param[arg_ty]: a=NAME { _Py_arg(a->v.Name.id, NULL, NULL, EXTRA) }
+
+disjunction[expr_ty] (memo):
+ | a=conjunction b=('or' c=conjunction { c })+ { _Py_BoolOp(
+ Or,
+ CHECK(_PyPegen_seq_insert_in_front(p, a, b)),
+ EXTRA) }
+ | conjunction
+conjunction[expr_ty] (memo):
+ | a=inversion b=('and' c=inversion { c })+ { _Py_BoolOp(
+ And,
+ CHECK(_PyPegen_seq_insert_in_front(p, a, b)),
+ EXTRA) }
+ | inversion
+inversion[expr_ty] (memo):
+ | 'not' a=inversion { _Py_UnaryOp(Not, a, EXTRA) }
+ | comparison
+comparison[expr_ty]:
+ | a=bitwise_or b=compare_op_bitwise_or_pair+ {
+ _Py_Compare(a, CHECK(_PyPegen_get_cmpops(p, b)), CHECK(_PyPegen_get_exprs(p, b)), EXTRA) }
+ | bitwise_or
+compare_op_bitwise_or_pair[CmpopExprPair*]:
+ | eq_bitwise_or
+ | noteq_bitwise_or
+ | lte_bitwise_or
+ | lt_bitwise_or
+ | gte_bitwise_or
+ | gt_bitwise_or
+ | notin_bitwise_or
+ | in_bitwise_or
+ | isnot_bitwise_or
+ | is_bitwise_or
+eq_bitwise_or[CmpopExprPair*]: '==' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, Eq, a) }
+noteq_bitwise_or[CmpopExprPair*]:
+ | (tok='!=' {_PyPegen_check_barry_as_flufl(p) ? NULL : tok}) a=bitwise_or {_PyPegen_cmpop_expr_pair(p, NotEq, a) }
+lte_bitwise_or[CmpopExprPair*]: '<=' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, LtE, a) }
+lt_bitwise_or[CmpopExprPair*]: '<' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, Lt, a) }
+gte_bitwise_or[CmpopExprPair*]: '>=' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, GtE, a) }
+gt_bitwise_or[CmpopExprPair*]: '>' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, Gt, a) }
+notin_bitwise_or[CmpopExprPair*]: 'not' 'in' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, NotIn, a) }
+in_bitwise_or[CmpopExprPair*]: 'in' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, In, a) }
+isnot_bitwise_or[CmpopExprPair*]: 'is' 'not' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, IsNot, a) }
+is_bitwise_or[CmpopExprPair*]: 'is' a=bitwise_or { _PyPegen_cmpop_expr_pair(p, Is, a) }
+
+bitwise_or[expr_ty]:
+ | a=bitwise_or '|' b=bitwise_xor { _Py_BinOp(a, BitOr, b, EXTRA) }
+ | bitwise_xor
+bitwise_xor[expr_ty]:
+ | a=bitwise_xor '^' b=bitwise_and { _Py_BinOp(a, BitXor, b, EXTRA) }
+ | bitwise_and
+bitwise_and[expr_ty]:
+ | a=bitwise_and '&' b=shift_expr { _Py_BinOp(a, BitAnd, b, EXTRA) }
+ | shift_expr
+shift_expr[expr_ty]:
+ | a=shift_expr '<<' b=sum { _Py_BinOp(a, LShift, b, EXTRA) }
+ | a=shift_expr '>>' b=sum { _Py_BinOp(a, RShift, b, EXTRA) }
+ | sum
+
+sum[expr_ty]:
+ | a=sum '+' b=term { _Py_BinOp(a, Add, b, EXTRA) }
+ | a=sum '-' b=term { _Py_BinOp(a, Sub, b, EXTRA) }
+ | term
+term[expr_ty]:
+ | a=term '*' b=factor { _Py_BinOp(a, Mult, b, EXTRA) }
+ | a=term '/' b=factor { _Py_BinOp(a, Div, b, EXTRA) }
+ | a=term '//' b=factor { _Py_BinOp(a, FloorDiv, b, EXTRA) }
+ | a=term '%' b=factor { _Py_BinOp(a, Mod, b, EXTRA) }
+ | a=term '@' b=factor { CHECK_VERSION(5, "The '@' operator is", _Py_BinOp(a, MatMult, b, EXTRA)) }
+ | factor
+factor[expr_ty] (memo):
+ | '+' a=factor { _Py_UnaryOp(UAdd, a, EXTRA) }
+ | '-' a=factor { _Py_UnaryOp(USub, a, EXTRA) }
+ | '~' a=factor { _Py_UnaryOp(Invert, a, EXTRA) }
+ | power
+power[expr_ty]:
+ | a=await_primary '**' b=factor { _Py_BinOp(a, Pow, b, EXTRA) }
+ | await_primary
+await_primary[expr_ty] (memo):
+ | AWAIT a=primary { CHECK_VERSION(5, "Await expressions are", _Py_Await(a, EXTRA)) }
+ | primary
+primary[expr_ty]:
+ | a=primary '.' b=NAME { _Py_Attribute(a, b->v.Name.id, Load, EXTRA) }
+ | a=primary b=genexp { _Py_Call(a, CHECK(_PyPegen_singleton_seq(p, b)), NULL, EXTRA) }
+ | a=primary '(' b=[arguments] ')' {
+ _Py_Call(a,
+ (b) ? ((expr_ty) b)->v.Call.args : NULL,
+ (b) ? ((expr_ty) b)->v.Call.keywords : NULL,
+ EXTRA) }
+ | a=primary '[' b=slices ']' { _Py_Subscript(a, b, Load, EXTRA) }
+ | atom
+
+slices[expr_ty]:
+ | a=slice !',' { a }
+ | a=','.slice+ [','] { _Py_Tuple(a, Load, EXTRA) }
+slice[expr_ty]:
+ | a=[expression] ':' b=[expression] c=[':' d=[expression] { d }] { _Py_Slice(a, b, c, EXTRA) }
+ | a=expression { a }
+atom[expr_ty]:
+ | NAME
+ | 'True' { _Py_Constant(Py_True, NULL, EXTRA) }
+ | 'False' { _Py_Constant(Py_False, NULL, EXTRA) }
+ | 'None' { _Py_Constant(Py_None, NULL, EXTRA) }
+ | '__new_parser__' { RAISE_SYNTAX_ERROR("You found it!") }
+ | &STRING strings
+ | NUMBER
+ | &'(' (tuple | group | genexp)
+ | &'[' (list | listcomp)
+ | &'{' (dict | set | dictcomp | setcomp)
+ | '...' { _Py_Constant(Py_Ellipsis, NULL, EXTRA) }
+
+strings[expr_ty] (memo): a=STRING+ { _PyPegen_concatenate_strings(p, a) }
+list[expr_ty]:
+ | '[' a=[star_named_expressions] ']' { _Py_List(a, Load, EXTRA) }
+listcomp[expr_ty]:
+ | '[' a=named_expression b=for_if_clauses ']' { _Py_ListComp(a, b, EXTRA) }
+ | invalid_comprehension
+tuple[expr_ty]:
+ | '(' a=[y=star_named_expression ',' z=[star_named_expressions] { _PyPegen_seq_insert_in_front(p, y, z) } ] ')' {
+ _Py_Tuple(a, Load, EXTRA) }
+group[expr_ty]: '(' a=(yield_expr | named_expression) ')' { a }
+genexp[expr_ty]:
+ | '(' a=expression b=for_if_clauses ')' { _Py_GeneratorExp(a, b, EXTRA) }
+ | invalid_comprehension
+set[expr_ty]: '{' a=expressions_list '}' { _Py_Set(a, EXTRA) }
+setcomp[expr_ty]:
+ | '{' a=expression b=for_if_clauses '}' { _Py_SetComp(a, b, EXTRA) }
+ | invalid_comprehension
+dict[expr_ty]:
+ | '{' a=[double_starred_kvpairs] '}' {
+ _Py_Dict(CHECK(_PyPegen_get_keys(p, a)), CHECK(_PyPegen_get_values(p, a)), EXTRA) }
+dictcomp[expr_ty]:
+ | '{' a=kvpair b=for_if_clauses '}' { _Py_DictComp(a->key, a->value, b, EXTRA) }
+ | invalid_dict_comprehension
+double_starred_kvpairs[asdl_seq*]: a=','.double_starred_kvpair+ [','] { a }
+double_starred_kvpair[KeyValuePair*]:
+ | '**' a=bitwise_or { _PyPegen_key_value_pair(p, NULL, a) }
+ | kvpair
+kvpair[KeyValuePair*]: a=expression ':' b=expression { _PyPegen_key_value_pair(p, a, b) }
+for_if_clauses[asdl_seq*]:
+ | for_if_clause+
+for_if_clause[comprehension_ty]:
+ | ASYNC 'for' a=star_targets 'in' b=disjunction c=('if' z=disjunction { z })* {
+ CHECK_VERSION(6, "Async comprehensions are", _Py_comprehension(a, b, c, 1, p->arena)) }
+ | 'for' a=star_targets 'in' b=disjunction c=('if' z=disjunction { z })* {
+ _Py_comprehension(a, b, c, 0, p->arena) }
+
+yield_expr[expr_ty]:
+ | 'yield' 'from' a=expression { _Py_YieldFrom(a, EXTRA) }
+ | 'yield' a=[star_expressions] { _Py_Yield(a, EXTRA) }
+
+arguments[expr_ty] (memo):
+ | a=args [','] &')' { a }
+ | incorrect_arguments
+args[expr_ty]:
+ | a=starred_expression b=[',' c=args { c }] {
+ _Py_Call(_PyPegen_dummy_name(p),
+ (b) ? CHECK(_PyPegen_seq_insert_in_front(p, a, ((expr_ty) b)->v.Call.args))
+ : CHECK(_PyPegen_singleton_seq(p, a)),
+ (b) ? ((expr_ty) b)->v.Call.keywords : NULL,
+ EXTRA) }
+ | a=kwargs { _Py_Call(_PyPegen_dummy_name(p),
+ CHECK_NULL_ALLOWED(_PyPegen_seq_extract_starred_exprs(p, a)),
+ CHECK_NULL_ALLOWED(_PyPegen_seq_delete_starred_exprs(p, a)),
+ EXTRA) }
+ | a=named_expression b=[',' c=args { c }] {
+ _Py_Call(_PyPegen_dummy_name(p),
+ (b) ? CHECK(_PyPegen_seq_insert_in_front(p, a, ((expr_ty) b)->v.Call.args))
+ : CHECK(_PyPegen_singleton_seq(p, a)),
+ (b) ? ((expr_ty) b)->v.Call.keywords : NULL,
+ EXTRA) }
+kwargs[asdl_seq*]:
+ | a=','.kwarg_or_starred+ ',' b=','.kwarg_or_double_starred+ { _PyPegen_join_sequences(p, a, b) }
+ | ','.kwarg_or_starred+
+ | ','.kwarg_or_double_starred+
+starred_expression[expr_ty]:
+ | '*' a=expression { _Py_Starred(a, Load, EXTRA) }
+kwarg_or_starred[KeywordOrStarred*]:
+ | a=NAME '=' b=expression {
+ _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(a->v.Name.id, b, EXTRA)), 1) }
+ | a=starred_expression { _PyPegen_keyword_or_starred(p, a, 0) }
+ | invalid_kwarg
+kwarg_or_double_starred[KeywordOrStarred*]:
+ | a=NAME '=' b=expression {
+ _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(a->v.Name.id, b, EXTRA)), 1) }
+ | '**' a=expression { _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(NULL, a, EXTRA)), 1) }
+ | invalid_kwarg
+
+# NOTE: star_targets may contain *bitwise_or, targets may not.
+star_targets[expr_ty]:
+ | a=star_target !',' { a }
+ | a=star_target b=(',' c=star_target { c })* [','] {
+ _Py_Tuple(CHECK(_PyPegen_seq_insert_in_front(p, a, b)), Store, EXTRA) }
+star_targets_seq[asdl_seq*]: a=','.star_target+ [','] { a }
+star_target[expr_ty] (memo):
+ | '*' a=(!'*' star_target) {
+ _Py_Starred(CHECK(_PyPegen_set_expr_context(p, a, Store)), Store, EXTRA) }
+ | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Store, EXTRA) }
+ | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) }
+ | star_atom
+star_atom[expr_ty]:
+ | a=NAME { _PyPegen_set_expr_context(p, a, Store) }
+ | '(' a=star_target ')' { _PyPegen_set_expr_context(p, a, Store) }
+ | '(' a=[star_targets_seq] ')' { _Py_Tuple(a, Store, EXTRA) }
+ | '[' a=[star_targets_seq] ']' { _Py_List(a, Store, EXTRA) }
+
+single_target[expr_ty]:
+ | single_subscript_attribute_target
+ | a=NAME { _PyPegen_set_expr_context(p, a, Store) }
+ | '(' a=single_target ')' { a }
+single_subscript_attribute_target[expr_ty]:
+ | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Store, EXTRA) }
+ | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) }
+
+del_targets[asdl_seq*]: a=','.del_target+ [','] { a }
+# The lookaheads to del_target_end ensure that we don't match expressions where a prefix of the
+# expression matches our rule, thereby letting these cases fall through to invalid_del_target.
+del_target[expr_ty] (memo):
+ | a=t_primary '.' b=NAME &del_target_end { _Py_Attribute(a, b->v.Name.id, Del, EXTRA) }
+ | a=t_primary '[' b=slices ']' &del_target_end { _Py_Subscript(a, b, Del, EXTRA) }
+ | del_t_atom
+del_t_atom[expr_ty]:
+ | a=NAME &del_target_end { _PyPegen_set_expr_context(p, a, Del) }
+ | '(' a=del_target ')' { _PyPegen_set_expr_context(p, a, Del) }
+ | '(' a=[del_targets] ')' { _Py_Tuple(a, Del, EXTRA) }
+ | '[' a=[del_targets] ']' { _Py_List(a, Del, EXTRA) }
+ | invalid_del_target
+del_target_end: ')' | ']' | ',' | ';' | NEWLINE
+
+targets[asdl_seq*]: a=','.target+ [','] { a }
+target[expr_ty] (memo):
+ | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Store, EXTRA) }
+ | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) }
+ | t_atom
+t_primary[expr_ty]:
+ | a=t_primary '.' b=NAME &t_lookahead { _Py_Attribute(a, b->v.Name.id, Load, EXTRA) }
+ | a=t_primary '[' b=slices ']' &t_lookahead { _Py_Subscript(a, b, Load, EXTRA) }
+ | a=t_primary b=genexp &t_lookahead { _Py_Call(a, CHECK(_PyPegen_singleton_seq(p, b)), NULL, EXTRA) }
+ | a=t_primary '(' b=[arguments] ')' &t_lookahead {
+ _Py_Call(a,
+ (b) ? ((expr_ty) b)->v.Call.args : NULL,
+ (b) ? ((expr_ty) b)->v.Call.keywords : NULL,
+ EXTRA) }
+ | a=atom &t_lookahead { a }
+t_lookahead: '(' | '[' | '.'
+t_atom[expr_ty]:
+ | a=NAME { _PyPegen_set_expr_context(p, a, Store) }
+ | '(' a=target ')' { _PyPegen_set_expr_context(p, a, Store) }
+ | '(' b=[targets] ')' { _Py_Tuple(b, Store, EXTRA) }
+ | '[' b=[targets] ']' { _Py_List(b, Store, EXTRA) }
+
+
+# From here on, there are rules for invalid syntax with specialised error messages
+incorrect_arguments:
+ | args ',' '*' { RAISE_SYNTAX_ERROR("iterable argument unpacking follows keyword argument unpacking") }
+ | a=expression for_if_clauses ',' [args | expression for_if_clauses] {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "Generator expression must be parenthesized") }
+ | a=args for_if_clauses { _PyPegen_nonparen_genexp_in_call(p, a) }
+ | args ',' a=expression for_if_clauses {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "Generator expression must be parenthesized") }
+ | a=args ',' args { _PyPegen_arguments_parsing_error(p, a) }
+invalid_kwarg:
+ | a=expression '=' {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(
+ a, "expression cannot contain assignment, perhaps you meant \"==\"?") }
+invalid_named_expression:
+ | a=expression ':=' expression {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(
+ a, "cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) }
+invalid_assignment:
+ | a=list ':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not list) can be annotated") }
+ | a=tuple ':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") }
+ | a=star_named_expression ',' star_named_expressions* ':' {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") }
+ | a=expression ':' expression ['=' annotated_rhs] {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") }
+ | a=star_expressions '=' (yield_expr | star_expressions) {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(
+ _PyPegen_get_invalid_target(a),
+ "cannot assign to %s", _PyPegen_get_expr_name(_PyPegen_get_invalid_target(a))) }
+ | a=star_expressions augassign (yield_expr | star_expressions) {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(
+ a,
+ "'%s' is an illegal expression for augmented assignment",
+ _PyPegen_get_expr_name(a)
+ )}
+
+invalid_block:
+ | NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block") }
+invalid_comprehension:
+ | ('[' | '(' | '{') a=starred_expression for_if_clauses {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "iterable unpacking cannot be used in comprehension") }
+invalid_dict_comprehension:
+ | '{' a='**' bitwise_or for_if_clauses '}' {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "dict unpacking cannot be used in dict comprehension") }
+invalid_parameters:
+ | param_no_default* (slash_with_default | param_with_default+) param_no_default {
+ RAISE_SYNTAX_ERROR("non-default argument follows default argument") }
+invalid_star_etc:
+ | '*' (')' | ',' (')' | '**')) { RAISE_SYNTAX_ERROR("named arguments must follow bare *") }
+ | '*' ',' TYPE_COMMENT { RAISE_SYNTAX_ERROR("bare * has associated type comment") }
+invalid_lambda_star_etc:
+ | '*' (':' | ',' (':' | '**')) { RAISE_SYNTAX_ERROR("named arguments must follow bare *") }
+invalid_double_type_comments:
+ | TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT {
+ RAISE_SYNTAX_ERROR("Cannot have two type comments on def") }
+invalid_del_target:
+ | a=star_expression &del_target_end {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot delete %s", _PyPegen_get_expr_name(a)) }
+invalid_import_from_targets:
+ | import_from_as_names ',' {
+ RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") }
diff --git a/Include/Python-ast.h b/Include/Python-ast.h
index b9232b1d463074..e7afa1e6579e8d 100644
--- a/Include/Python-ast.h
+++ b/Include/Python-ast.h
@@ -17,10 +17,7 @@ typedef struct _stmt *stmt_ty;
typedef struct _expr *expr_ty;
-typedef enum _expr_context { Load=1, Store=2, Del=3, AugLoad=4, AugStore=5,
- Param=6 } expr_context_ty;
-
-typedef struct _slice *slice_ty;
+typedef enum _expr_context { Load=1, Store=2, Del=3 } expr_context_ty;
typedef enum _boolop { And=1, Or=2 } boolop_ty;
@@ -51,7 +48,7 @@ typedef struct _type_ignore *type_ignore_ty;
enum _mod_kind {Module_kind=1, Interactive_kind=2, Expression_kind=3,
- FunctionType_kind=4, Suite_kind=5};
+ FunctionType_kind=4};
struct _mod {
enum _mod_kind kind;
union {
@@ -73,10 +70,6 @@ struct _mod {
expr_ty returns;
} FunctionType;
- struct {
- asdl_seq *body;
- } Suite;
-
} v;
};
@@ -237,7 +230,7 @@ enum _expr_kind {BoolOp_kind=1, NamedExpr_kind=2, BinOp_kind=3, UnaryOp_kind=4,
YieldFrom_kind=15, Compare_kind=16, Call_kind=17,
FormattedValue_kind=18, JoinedStr_kind=19, Constant_kind=20,
Attribute_kind=21, Subscript_kind=22, Starred_kind=23,
- Name_kind=24, List_kind=25, Tuple_kind=26};
+ Name_kind=24, List_kind=25, Tuple_kind=26, Slice_kind=27};
struct _expr {
enum _expr_kind kind;
union {
@@ -350,7 +343,7 @@ struct _expr {
struct {
expr_ty value;
- slice_ty slice;
+ expr_ty slice;
expr_context_ty ctx;
} Subscript;
@@ -374,32 +367,17 @@ struct _expr {
expr_context_ty ctx;
} Tuple;
- } v;
- int lineno;
- int col_offset;
- int end_lineno;
- int end_col_offset;
-};
-
-enum _slice_kind {Slice_kind=1, ExtSlice_kind=2, Index_kind=3};
-struct _slice {
- enum _slice_kind kind;
- union {
struct {
expr_ty lower;
expr_ty upper;
expr_ty step;
} Slice;
- struct {
- asdl_seq *dims;
- } ExtSlice;
-
- struct {
- expr_ty value;
- } Index;
-
} v;
+ int lineno;
+ int col_offset;
+ int end_lineno;
+ int end_col_offset;
};
struct _comprehension {
@@ -449,6 +427,10 @@ struct _arg {
struct _keyword {
identifier arg;
expr_ty value;
+ int lineno;
+ int col_offset;
+ int end_lineno;
+ int end_col_offset;
};
struct _alias {
@@ -483,8 +465,6 @@ mod_ty _Py_Interactive(asdl_seq * body, PyArena *arena);
mod_ty _Py_Expression(expr_ty body, PyArena *arena);
#define FunctionType(a0, a1, a2) _Py_FunctionType(a0, a1, a2)
mod_ty _Py_FunctionType(asdl_seq * argtypes, expr_ty returns, PyArena *arena);
-#define Suite(a0, a1) _Py_Suite(a0, a1)
-mod_ty _Py_Suite(asdl_seq * body, PyArena *arena);
#define FunctionDef(a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10) _Py_FunctionDef(a0, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10)
stmt_ty _Py_FunctionDef(identifier name, arguments_ty args, asdl_seq * body,
asdl_seq * decorator_list, expr_ty returns, string
@@ -654,7 +634,7 @@ expr_ty _Py_Attribute(expr_ty value, identifier attr, expr_context_ty ctx, int
lineno, int col_offset, int end_lineno, int
end_col_offset, PyArena *arena);
#define Subscript(a0, a1, a2, a3, a4, a5, a6, a7) _Py_Subscript(a0, a1, a2, a3, a4, a5, a6, a7)
-expr_ty _Py_Subscript(expr_ty value, slice_ty slice, expr_context_ty ctx, int
+expr_ty _Py_Subscript(expr_ty value, expr_ty slice, expr_context_ty ctx, int
lineno, int col_offset, int end_lineno, int
end_col_offset, PyArena *arena);
#define Starred(a0, a1, a2, a3, a4, a5, a6) _Py_Starred(a0, a1, a2, a3, a4, a5, a6)
@@ -673,12 +653,10 @@ expr_ty _Py_List(asdl_seq * elts, expr_context_ty ctx, int lineno, int
expr_ty _Py_Tuple(asdl_seq * elts, expr_context_ty ctx, int lineno, int
col_offset, int end_lineno, int end_col_offset, PyArena
*arena);
-#define Slice(a0, a1, a2, a3) _Py_Slice(a0, a1, a2, a3)
-slice_ty _Py_Slice(expr_ty lower, expr_ty upper, expr_ty step, PyArena *arena);
-#define ExtSlice(a0, a1) _Py_ExtSlice(a0, a1)
-slice_ty _Py_ExtSlice(asdl_seq * dims, PyArena *arena);
-#define Index(a0, a1) _Py_Index(a0, a1)
-slice_ty _Py_Index(expr_ty value, PyArena *arena);
+#define Slice(a0, a1, a2, a3, a4, a5, a6, a7) _Py_Slice(a0, a1, a2, a3, a4, a5, a6, a7)
+expr_ty _Py_Slice(expr_ty lower, expr_ty upper, expr_ty step, int lineno, int
+ col_offset, int end_lineno, int end_col_offset, PyArena
+ *arena);
#define comprehension(a0, a1, a2, a3, a4) _Py_comprehension(a0, a1, a2, a3, a4)
comprehension_ty _Py_comprehension(expr_ty target, expr_ty iter, asdl_seq *
ifs, int is_async, PyArena *arena);
@@ -696,8 +674,10 @@ arguments_ty _Py_arguments(asdl_seq * posonlyargs, asdl_seq * args, arg_ty
arg_ty _Py_arg(identifier arg, expr_ty annotation, string type_comment, int
lineno, int col_offset, int end_lineno, int end_col_offset,
PyArena *arena);
-#define keyword(a0, a1, a2) _Py_keyword(a0, a1, a2)
-keyword_ty _Py_keyword(identifier arg, expr_ty value, PyArena *arena);
+#define keyword(a0, a1, a2, a3, a4, a5, a6) _Py_keyword(a0, a1, a2, a3, a4, a5, a6)
+keyword_ty _Py_keyword(identifier arg, expr_ty value, int lineno, int
+ col_offset, int end_lineno, int end_col_offset, PyArena
+ *arena);
#define alias(a0, a1, a2) _Py_alias(a0, a1, a2)
alias_ty _Py_alias(identifier name, identifier asname, PyArena *arena);
#define withitem(a0, a1, a2) _Py_withitem(a0, a1, a2)
diff --git a/Include/Python.h b/Include/Python.h
index d6e5b139ac6796..dcd0a57ac1f03f 100644
--- a/Include/Python.h
+++ b/Include/Python.h
@@ -114,12 +114,15 @@
#include "classobject.h"
#include "fileobject.h"
#include "pycapsule.h"
+#include "code.h"
+#include "pyframe.h"
#include "traceback.h"
#include "sliceobject.h"
#include "cellobject.h"
#include "iterobject.h"
#include "genobject.h"
#include "descrobject.h"
+#include "genericaliasobject.h"
#include "warnings.h"
#include "weakrefobject.h"
#include "structseq.h"
@@ -130,6 +133,7 @@
#include "pyerrors.h"
#include "cpython/initconfig.h"
+#include "pythread.h"
#include "pystate.h"
#include "context.h"
@@ -152,7 +156,6 @@
#include "pyctype.h"
#include "pystrtod.h"
#include "pystrcmp.h"
-#include "dtoa.h"
#include "fileutils.h"
#include "pyfpe.h"
#include "tracemalloc.h"
diff --git a/Include/asdl.h b/Include/asdl.h
index 549df2ace7555e..e962560bcd4cbe 100644
--- a/Include/asdl.h
+++ b/Include/asdl.h
@@ -4,9 +4,7 @@
typedef PyObject * identifier;
typedef PyObject * string;
-typedef PyObject * bytes;
typedef PyObject * object;
-typedef PyObject * singleton;
typedef PyObject * constant;
/* It would be nice if the code generated by asdl_c.py was completely
diff --git a/Include/boolobject.h b/Include/boolobject.h
index 7cc2f1fe23937a..bb8044a2b02cf6 100644
--- a/Include/boolobject.h
+++ b/Include/boolobject.h
@@ -9,7 +9,7 @@ extern "C" {
PyAPI_DATA(PyTypeObject) PyBool_Type;
-#define PyBool_Check(x) (Py_TYPE(x) == &PyBool_Type)
+#define PyBool_Check(x) Py_IS_TYPE(x, &PyBool_Type)
/* Py_False and Py_True are the only two bools in existence.
Don't forget to apply Py_INCREF() when returning either!!! */
diff --git a/Include/bytearrayobject.h b/Include/bytearrayobject.h
index 647a17a819d23f..9e95433f0f26f0 100644
--- a/Include/bytearrayobject.h
+++ b/Include/bytearrayobject.h
@@ -18,24 +18,13 @@ extern "C" {
* to contain a char pointer, not an unsigned char pointer.
*/
-/* Object layout */
-#ifndef Py_LIMITED_API
-typedef struct {
- PyObject_VAR_HEAD
- Py_ssize_t ob_alloc; /* How many bytes allocated in ob_bytes */
- char *ob_bytes; /* Physical backing buffer */
- char *ob_start; /* Logical start inside ob_bytes */
- Py_ssize_t ob_exports; /* How many buffer exports */
-} PyByteArrayObject;
-#endif
-
/* Type object */
PyAPI_DATA(PyTypeObject) PyByteArray_Type;
PyAPI_DATA(PyTypeObject) PyByteArrayIter_Type;
/* Type check macros */
#define PyByteArray_Check(self) PyObject_TypeCheck(self, &PyByteArray_Type)
-#define PyByteArray_CheckExact(self) (Py_TYPE(self) == &PyByteArray_Type)
+#define PyByteArray_CheckExact(self) Py_IS_TYPE(self, &PyByteArray_Type)
/* Direct API functions */
PyAPI_FUNC(PyObject *) PyByteArray_FromObject(PyObject *);
@@ -45,14 +34,10 @@ PyAPI_FUNC(Py_ssize_t) PyByteArray_Size(PyObject *);
PyAPI_FUNC(char *) PyByteArray_AsString(PyObject *);
PyAPI_FUNC(int) PyByteArray_Resize(PyObject *, Py_ssize_t);
-/* Macros, trading safety for speed */
#ifndef Py_LIMITED_API
-#define PyByteArray_AS_STRING(self) \
- (assert(PyByteArray_Check(self)), \
- Py_SIZE(self) ? ((PyByteArrayObject *)(self))->ob_start : _PyByteArray_empty_string)
-#define PyByteArray_GET_SIZE(self) (assert(PyByteArray_Check(self)), Py_SIZE(self))
-
-PyAPI_DATA(char) _PyByteArray_empty_string[];
+# define Py_CPYTHON_BYTEARRAYOBJECT_H
+# include "cpython/bytearrayobject.h"
+# undef Py_CPYTHON_BYTEARRAYOBJECT_H
#endif
#ifdef __cplusplus
diff --git a/Include/bytesobject.h b/Include/bytesobject.h
index 4aaa71a832bd78..5062d8d123ad3e 100644
--- a/Include/bytesobject.h
+++ b/Include/bytesobject.h
@@ -27,26 +27,12 @@ functions should be applied to nil objects.
/* Caching the hash (ob_shash) saves recalculation of a string's hash value.
This significantly speeds up dict lookups. */
-#ifndef Py_LIMITED_API
-typedef struct {
- PyObject_VAR_HEAD
- Py_hash_t ob_shash;
- char ob_sval[1];
-
- /* Invariants:
- * ob_sval contains space for 'ob_size+1' elements.
- * ob_sval[ob_size] == 0.
- * ob_shash is the hash of the string or -1 if not computed yet.
- */
-} PyBytesObject;
-#endif
-
PyAPI_DATA(PyTypeObject) PyBytes_Type;
PyAPI_DATA(PyTypeObject) PyBytesIter_Type;
#define PyBytes_Check(op) \
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_BYTES_SUBCLASS)
-#define PyBytes_CheckExact(op) (Py_TYPE(op) == &PyBytes_Type)
+#define PyBytes_CheckExact(op) Py_IS_TYPE(op, &PyBytes_Type)
PyAPI_FUNC(PyObject *) PyBytes_FromStringAndSize(const char *, Py_ssize_t);
PyAPI_FUNC(PyObject *) PyBytes_FromString(const char *);
@@ -60,38 +46,9 @@ PyAPI_FUNC(char *) PyBytes_AsString(PyObject *);
PyAPI_FUNC(PyObject *) PyBytes_Repr(PyObject *, int);
PyAPI_FUNC(void) PyBytes_Concat(PyObject **, PyObject *);
PyAPI_FUNC(void) PyBytes_ConcatAndDel(PyObject **, PyObject *);
-#ifndef Py_LIMITED_API
-PyAPI_FUNC(int) _PyBytes_Resize(PyObject **, Py_ssize_t);
-PyAPI_FUNC(PyObject*) _PyBytes_FormatEx(
- const char *format,
- Py_ssize_t format_len,
- PyObject *args,
- int use_bytearray);
-PyAPI_FUNC(PyObject*) _PyBytes_FromHex(
- PyObject *string,
- int use_bytearray);
-#endif
PyAPI_FUNC(PyObject *) PyBytes_DecodeEscape(const char *, Py_ssize_t,
const char *, Py_ssize_t,
const char *);
-#ifndef Py_LIMITED_API
-/* Helper for PyBytes_DecodeEscape that detects invalid escape chars. */
-PyAPI_FUNC(PyObject *) _PyBytes_DecodeEscape(const char *, Py_ssize_t,
- const char *, const char **);
-#endif
-
-/* Macro, trading safety for speed */
-#ifndef Py_LIMITED_API
-#define PyBytes_AS_STRING(op) (assert(PyBytes_Check(op)), \
- (((PyBytesObject *)(op))->ob_sval))
-#define PyBytes_GET_SIZE(op) (assert(PyBytes_Check(op)),Py_SIZE(op))
-#endif
-
-/* _PyBytes_Join(sep, x) is like sep.join(x). sep must be PyBytesObject*,
- x must be an iterable object. */
-#ifndef Py_LIMITED_API
-PyAPI_FUNC(PyObject *) _PyBytes_Join(PyObject *sep, PyObject *x);
-#endif
/* Provides access to the internal data buffer and size of a string
object or the default encoded version of a Unicode object. Passing
@@ -114,85 +71,10 @@ PyAPI_FUNC(int) PyBytes_AsStringAndSize(
#define F_ZERO (1<<4)
#ifndef Py_LIMITED_API
-/* The _PyBytesWriter structure is big: it contains an embedded "stack buffer".
- A _PyBytesWriter variable must be declared at the end of variables in a
- function to optimize the memory allocation on the stack. */
-typedef struct {
- /* bytes, bytearray or NULL (when the small buffer is used) */
- PyObject *buffer;
-
- /* Number of allocated size. */
- Py_ssize_t allocated;
-
- /* Minimum number of allocated bytes,
- incremented by _PyBytesWriter_Prepare() */
- Py_ssize_t min_size;
-
- /* If non-zero, use a bytearray instead of a bytes object for buffer. */
- int use_bytearray;
-
- /* If non-zero, overallocate the buffer (default: 0).
- This flag must be zero if use_bytearray is non-zero. */
- int overallocate;
-
- /* Stack buffer */
- int use_small_buffer;
- char small_buffer[512];
-} _PyBytesWriter;
-
-/* Initialize a bytes writer
-
- By default, the overallocation is disabled. Set the overallocate attribute
- to control the allocation of the buffer. */
-PyAPI_FUNC(void) _PyBytesWriter_Init(_PyBytesWriter *writer);
-
-/* Get the buffer content and reset the writer.
- Return a bytes object, or a bytearray object if use_bytearray is non-zero.
- Raise an exception and return NULL on error. */
-PyAPI_FUNC(PyObject *) _PyBytesWriter_Finish(_PyBytesWriter *writer,
- void *str);
-
-/* Deallocate memory of a writer (clear its internal buffer). */
-PyAPI_FUNC(void) _PyBytesWriter_Dealloc(_PyBytesWriter *writer);
-
-/* Allocate the buffer to write size bytes.
- Return the pointer to the beginning of buffer data.
- Raise an exception and return NULL on error. */
-PyAPI_FUNC(void*) _PyBytesWriter_Alloc(_PyBytesWriter *writer,
- Py_ssize_t size);
-
-/* Ensure that the buffer is large enough to write *size* bytes.
- Add size to the writer minimum size (min_size attribute).
-
- str is the current pointer inside the buffer.
- Return the updated current pointer inside the buffer.
- Raise an exception and return NULL on error. */
-PyAPI_FUNC(void*) _PyBytesWriter_Prepare(_PyBytesWriter *writer,
- void *str,
- Py_ssize_t size);
-
-/* Resize the buffer to make it larger.
- The new buffer may be larger than size bytes because of overallocation.
- Return the updated current pointer inside the buffer.
- Raise an exception and return NULL on error.
-
- Note: size must be greater than the number of allocated bytes in the writer.
-
- This function doesn't use the writer minimum size (min_size attribute).
-
- See also _PyBytesWriter_Prepare().
- */
-PyAPI_FUNC(void*) _PyBytesWriter_Resize(_PyBytesWriter *writer,
- void *str,
- Py_ssize_t size);
-
-/* Write bytes.
- Raise an exception and return NULL on error. */
-PyAPI_FUNC(void*) _PyBytesWriter_WriteBytes(_PyBytesWriter *writer,
- void *str,
- const void *bytes,
- Py_ssize_t size);
-#endif /* Py_LIMITED_API */
+# define Py_CPYTHON_BYTESOBJECT_H
+# include "cpython/bytesobject.h"
+# undef Py_CPYTHON_BYTESOBJECT_H
+#endif
#ifdef __cplusplus
}
diff --git a/Include/cellobject.h b/Include/cellobject.h
index 2f9b5b75d998ae..f12aa90a42a8fe 100644
--- a/Include/cellobject.h
+++ b/Include/cellobject.h
@@ -13,7 +13,7 @@ typedef struct {
PyAPI_DATA(PyTypeObject) PyCell_Type;
-#define PyCell_Check(op) (Py_TYPE(op) == &PyCell_Type)
+#define PyCell_Check(op) Py_IS_TYPE(op, &PyCell_Type)
PyAPI_FUNC(PyObject *) PyCell_New(PyObject *);
PyAPI_FUNC(PyObject *) PyCell_Get(PyObject *);
diff --git a/Include/ceval.h b/Include/ceval.h
index 62c6489ed1c351..df5253900eea71 100644
--- a/Include/ceval.h
+++ b/Include/ceval.h
@@ -28,12 +28,10 @@ Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyEval_CallFunction(
Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyEval_CallMethod(
PyObject *obj, const char *name, const char *format, ...);
-struct _frame; /* Avoid including frameobject.h */
-
PyAPI_FUNC(PyObject *) PyEval_GetBuiltins(void);
PyAPI_FUNC(PyObject *) PyEval_GetGlobals(void);
PyAPI_FUNC(PyObject *) PyEval_GetLocals(void);
-PyAPI_FUNC(struct _frame *) PyEval_GetFrame(void);
+PyAPI_FUNC(PyFrameObject *) PyEval_GetFrame(void);
PyAPI_FUNC(int) Py_AddPendingCall(int (*func)(void *), void *arg);
PyAPI_FUNC(int) Py_MakePendingCalls(void);
@@ -80,8 +78,8 @@ PyAPI_FUNC(void) Py_LeaveRecursiveCall(void);
PyAPI_FUNC(const char *) PyEval_GetFuncName(PyObject *);
PyAPI_FUNC(const char *) PyEval_GetFuncDesc(PyObject *);
-PyAPI_FUNC(PyObject *) PyEval_EvalFrame(struct _frame *);
-PyAPI_FUNC(PyObject *) PyEval_EvalFrameEx(struct _frame *f, int exc);
+PyAPI_FUNC(PyObject *) PyEval_EvalFrame(PyFrameObject *);
+PyAPI_FUNC(PyObject *) PyEval_EvalFrameEx(PyFrameObject *f, int exc);
/* Interface for threads.
@@ -121,9 +119,6 @@ PyAPI_FUNC(PyObject *) PyEval_EvalFrameEx(struct _frame *f, int exc);
WARNING: NEVER NEST CALLS TO Py_BEGIN_ALLOW_THREADS AND
Py_END_ALLOW_THREADS!!!
- The function PyEval_InitThreads() should be called only from
- init_thread() in "_threadmodule.c".
-
Note that not yet all candidates have been converted to use this
mechanism!
*/
@@ -131,8 +126,8 @@ PyAPI_FUNC(PyObject *) PyEval_EvalFrameEx(struct _frame *f, int exc);
PyAPI_FUNC(PyThreadState *) PyEval_SaveThread(void);
PyAPI_FUNC(void) PyEval_RestoreThread(PyThreadState *);
-PyAPI_FUNC(int) PyEval_ThreadsInitialized(void);
-PyAPI_FUNC(void) PyEval_InitThreads(void);
+Py_DEPRECATED(3.9) PyAPI_FUNC(int) PyEval_ThreadsInitialized(void);
+Py_DEPRECATED(3.9) PyAPI_FUNC(void) PyEval_InitThreads(void);
Py_DEPRECATED(3.2) PyAPI_FUNC(void) PyEval_AcquireLock(void);
/* Py_DEPRECATED(3.2) */ PyAPI_FUNC(void) PyEval_ReleaseLock(void);
PyAPI_FUNC(void) PyEval_AcquireThread(PyThreadState *tstate);
diff --git a/Include/classobject.h b/Include/classobject.h
index 840431a127691b..1952f673b7d865 100644
--- a/Include/classobject.h
+++ b/Include/classobject.h
@@ -19,7 +19,7 @@ typedef struct {
PyAPI_DATA(PyTypeObject) PyMethod_Type;
-#define PyMethod_Check(op) ((op)->ob_type == &PyMethod_Type)
+#define PyMethod_Check(op) Py_IS_TYPE(op, &PyMethod_Type)
PyAPI_FUNC(PyObject *) PyMethod_New(PyObject *, PyObject *);
@@ -40,7 +40,7 @@ typedef struct {
PyAPI_DATA(PyTypeObject) PyInstanceMethod_Type;
-#define PyInstanceMethod_Check(op) ((op)->ob_type == &PyInstanceMethod_Type)
+#define PyInstanceMethod_Check(op) Py_IS_TYPE(op, &PyInstanceMethod_Type)
PyAPI_FUNC(PyObject *) PyInstanceMethod_New(PyObject *);
PyAPI_FUNC(PyObject *) PyInstanceMethod_Function(PyObject *);
diff --git a/Include/code.h b/Include/code.h
index 3afddd20c80d74..b9e23eb816529b 100644
--- a/Include/code.h
+++ b/Include/code.h
@@ -1,180 +1,20 @@
/* Definitions for bytecode */
-#ifndef Py_LIMITED_API
#ifndef Py_CODE_H
#define Py_CODE_H
#ifdef __cplusplus
extern "C" {
#endif
-typedef uint16_t _Py_CODEUNIT;
-
-#ifdef WORDS_BIGENDIAN
-# define _Py_OPCODE(word) ((word) >> 8)
-# define _Py_OPARG(word) ((word) & 255)
-#else
-# define _Py_OPCODE(word) ((word) & 255)
-# define _Py_OPARG(word) ((word) >> 8)
-#endif
-
-typedef struct _PyOpcache _PyOpcache;
-
-/* Bytecode object */
-typedef struct {
- PyObject_HEAD
- int co_argcount; /* #arguments, except *args */
- int co_posonlyargcount; /* #positional only arguments */
- int co_kwonlyargcount; /* #keyword only arguments */
- int co_nlocals; /* #local variables */
- int co_stacksize; /* #entries needed for evaluation stack */
- int co_flags; /* CO_..., see below */
- int co_firstlineno; /* first source line number */
- PyObject *co_code; /* instruction opcodes */
- PyObject *co_consts; /* list (constants used) */
- PyObject *co_names; /* list of strings (names used) */
- PyObject *co_varnames; /* tuple of strings (local variable names) */
- PyObject *co_freevars; /* tuple of strings (free variable names) */
- PyObject *co_cellvars; /* tuple of strings (cell variable names) */
- /* The rest aren't used in either hash or comparisons, except for co_name,
- used in both. This is done to preserve the name and line number
- for tracebacks and debuggers; otherwise, constant de-duplication
- would collapse identical functions/lambdas defined on different lines.
- */
- Py_ssize_t *co_cell2arg; /* Maps cell vars which are arguments. */
- PyObject *co_filename; /* unicode (where it was loaded from) */
- PyObject *co_name; /* unicode (name, for reference) */
- PyObject *co_lnotab; /* string (encoding addr<->lineno mapping) See
- Objects/lnotab_notes.txt for details. */
- void *co_zombieframe; /* for optimization only (see frameobject.c) */
- PyObject *co_weakreflist; /* to support weakrefs to code objects */
- /* Scratch space for extra data relating to the code object.
- Type is a void* to keep the format private in codeobject.c to force
- people to go through the proper APIs. */
- void *co_extra;
-
- /* Per opcodes just-in-time cache
- *
- * To reduce cache size, we use indirect mapping from opcode index to
- * cache object:
- * cache = co_opcache[co_opcache_map[next_instr - first_instr] - 1]
- */
-
- // co_opcache_map is indexed by (next_instr - first_instr).
- // * 0 means there is no cache for this opcode.
- // * n > 0 means there is cache in co_opcache[n-1].
- unsigned char *co_opcache_map;
- _PyOpcache *co_opcache;
- int co_opcache_flag; // used to determine when create a cache.
- unsigned char co_opcache_size; // length of co_opcache.
-} PyCodeObject;
-
-/* Masks for co_flags above */
-#define CO_OPTIMIZED 0x0001
-#define CO_NEWLOCALS 0x0002
-#define CO_VARARGS 0x0004
-#define CO_VARKEYWORDS 0x0008
-#define CO_NESTED 0x0010
-#define CO_GENERATOR 0x0020
-/* The CO_NOFREE flag is set if there are no free or cell variables.
- This information is redundant, but it allows a single flag test
- to determine whether there is any extra work to be done when the
- call frame it setup.
-*/
-#define CO_NOFREE 0x0040
-
-/* The CO_COROUTINE flag is set for coroutine functions (defined with
- ``async def`` keywords) */
-#define CO_COROUTINE 0x0080
-#define CO_ITERABLE_COROUTINE 0x0100
-#define CO_ASYNC_GENERATOR 0x0200
-
-/* These are no longer used. */
-#if 0
-#define CO_GENERATOR_ALLOWED 0x1000
-#endif
-#define CO_FUTURE_DIVISION 0x2000
-#define CO_FUTURE_ABSOLUTE_IMPORT 0x4000 /* do absolute imports by default */
-#define CO_FUTURE_WITH_STATEMENT 0x8000
-#define CO_FUTURE_PRINT_FUNCTION 0x10000
-#define CO_FUTURE_UNICODE_LITERALS 0x20000
-
-#define CO_FUTURE_BARRY_AS_BDFL 0x40000
-#define CO_FUTURE_GENERATOR_STOP 0x80000
-#define CO_FUTURE_ANNOTATIONS 0x100000
-
-/* This value is found in the co_cell2arg array when the associated cell
- variable does not correspond to an argument. */
-#define CO_CELL_NOT_AN_ARG (-1)
-
-/* This should be defined if a future statement modifies the syntax.
- For example, when a keyword is added.
-*/
-#define PY_PARSER_REQUIRES_FUTURE_KEYWORD
-
-#define CO_MAXBLOCKS 20 /* Max static block nesting within a function */
-
-PyAPI_DATA(PyTypeObject) PyCode_Type;
-
-#define PyCode_Check(op) (Py_TYPE(op) == &PyCode_Type)
-#define PyCode_GetNumFree(op) (PyTuple_GET_SIZE((op)->co_freevars))
-
-/* Public interface */
-PyAPI_FUNC(PyCodeObject *) PyCode_New(
- int, int, int, int, int, PyObject *, PyObject *,
- PyObject *, PyObject *, PyObject *, PyObject *,
- PyObject *, PyObject *, int, PyObject *);
-
-PyAPI_FUNC(PyCodeObject *) PyCode_NewWithPosOnlyArgs(
- int, int, int, int, int, int, PyObject *, PyObject *,
- PyObject *, PyObject *, PyObject *, PyObject *,
- PyObject *, PyObject *, int, PyObject *);
- /* same as struct above */
-
-/* Creates a new empty code object with the specified source location. */
-PyAPI_FUNC(PyCodeObject *)
-PyCode_NewEmpty(const char *filename, const char *funcname, int firstlineno);
-
-/* Return the line number associated with the specified bytecode index
- in this code object. If you just need the line number of a frame,
- use PyFrame_GetLineNumber() instead. */
-PyAPI_FUNC(int) PyCode_Addr2Line(PyCodeObject *, int);
-
-/* for internal use only */
-typedef struct _addr_pair {
- int ap_lower;
- int ap_upper;
-} PyAddrPair;
-
-#ifndef Py_LIMITED_API
-/* Update *bounds to describe the first and one-past-the-last instructions in the
- same line as lasti. Return the number of that line.
-*/
-PyAPI_FUNC(int) _PyCode_CheckLineNumber(PyCodeObject* co,
- int lasti, PyAddrPair *bounds);
-
-/* Create a comparable key used to compare constants taking in account the
- * object type. It is used to make sure types are not coerced (e.g., float and
- * complex) _and_ to distinguish 0.0 from -0.0 e.g. on IEEE platforms
- *
- * Return (type(obj), obj, ...): a tuple with variable size (at least 2 items)
- * depending on the type and the value. The type is the first item to not
- * compare bytes and str which can raise a BytesWarning exception. */
-PyAPI_FUNC(PyObject*) _PyCode_ConstantKey(PyObject *obj);
-#endif
-
-PyAPI_FUNC(PyObject*) PyCode_Optimize(PyObject *code, PyObject* consts,
- PyObject *names, PyObject *lnotab);
-
+typedef struct PyCodeObject PyCodeObject;
#ifndef Py_LIMITED_API
-PyAPI_FUNC(int) _PyCode_GetExtra(PyObject *code, Py_ssize_t index,
- void **extra);
-PyAPI_FUNC(int) _PyCode_SetExtra(PyObject *code, Py_ssize_t index,
- void *extra);
+# define Py_CPYTHON_CODE_H
+# include "cpython/code.h"
+# undef Py_CPYTHON_CODE_H
#endif
#ifdef __cplusplus
}
#endif
#endif /* !Py_CODE_H */
-#endif /* Py_LIMITED_API */
diff --git a/Include/compile.h b/Include/compile.h
index 1cda955c142551..12417ce805464b 100644
--- a/Include/compile.h
+++ b/Include/compile.h
@@ -2,7 +2,6 @@
#define Py_COMPILE_H
#ifndef Py_LIMITED_API
-#include "code.h"
#ifdef __cplusplus
extern "C" {
@@ -18,12 +17,18 @@ PyAPI_FUNC(PyCodeObject *) PyNode_Compile(struct _node *, const char *);
CO_FUTURE_UNICODE_LITERALS | CO_FUTURE_BARRY_AS_BDFL | \
CO_FUTURE_GENERATOR_STOP | CO_FUTURE_ANNOTATIONS)
#define PyCF_MASK_OBSOLETE (CO_NESTED)
+
+/* bpo-39562: CO_FUTURE_ and PyCF_ constants must be kept unique.
+ PyCF_ constants can use bits from 0x0100 to 0x10000.
+ CO_FUTURE_ constants use bits starting at 0x20000. */
#define PyCF_SOURCE_IS_UTF8 0x0100
#define PyCF_DONT_IMPLY_DEDENT 0x0200
#define PyCF_ONLY_AST 0x0400
#define PyCF_IGNORE_COOKIE 0x0800
#define PyCF_TYPE_COMMENTS 0x1000
#define PyCF_ALLOW_TOP_LEVEL_AWAIT 0x2000
+#define PyCF_COMPILE_MASK (PyCF_ONLY_AST | PyCF_ALLOW_TOP_LEVEL_AWAIT | \
+ PyCF_TYPE_COMMENTS | PyCF_DONT_IMPLY_DEDENT)
#ifndef Py_LIMITED_API
typedef struct {
@@ -83,7 +88,12 @@ PyAPI_FUNC(PyObject*) _Py_Mangle(PyObject *p, PyObject *name);
PyAPI_FUNC(int) PyCompile_OpcodeStackEffect(int opcode, int oparg);
PyAPI_FUNC(int) PyCompile_OpcodeStackEffectWithJump(int opcode, int oparg, int jump);
-PyAPI_FUNC(int) _PyAST_Optimize(struct _mod *, PyArena *arena, int optimize);
+typedef struct {
+ int optimize;
+ int ff_features;
+} _PyASTOptimizeState;
+
+PyAPI_FUNC(int) _PyAST_Optimize(struct _mod *, PyArena *arena, _PyASTOptimizeState *state);
#ifdef __cplusplus
}
@@ -97,4 +107,7 @@ PyAPI_FUNC(int) _PyAST_Optimize(struct _mod *, PyArena *arena, int optimize);
#define Py_eval_input 258
#define Py_func_type_input 345
+/* This doesn't need to match anything */
+#define Py_fstring_input 800
+
#endif /* !Py_COMPILE_H */
diff --git a/Include/complexobject.h b/Include/complexobject.h
index cb8c52c5800854..9221f9c51d65be 100644
--- a/Include/complexobject.h
+++ b/Include/complexobject.h
@@ -39,7 +39,7 @@ typedef struct {
PyAPI_DATA(PyTypeObject) PyComplex_Type;
#define PyComplex_Check(op) PyObject_TypeCheck(op, &PyComplex_Type)
-#define PyComplex_CheckExact(op) (Py_TYPE(op) == &PyComplex_Type)
+#define PyComplex_CheckExact(op) Py_IS_TYPE(op, &PyComplex_Type)
#ifndef Py_LIMITED_API
PyAPI_FUNC(PyObject *) PyComplex_FromCComplex(Py_complex);
diff --git a/Include/context.h b/Include/context.h
index 9581285247b397..4e5007089dd94b 100644
--- a/Include/context.h
+++ b/Include/context.h
@@ -17,9 +17,9 @@ PyAPI_DATA(PyTypeObject) PyContextToken_Type;
typedef struct _pycontexttokenobject PyContextToken;
-#define PyContext_CheckExact(o) (Py_TYPE(o) == &PyContext_Type)
-#define PyContextVar_CheckExact(o) (Py_TYPE(o) == &PyContextVar_Type)
-#define PyContextToken_CheckExact(o) (Py_TYPE(o) == &PyContextToken_Type)
+#define PyContext_CheckExact(o) Py_IS_TYPE(o, &PyContext_Type)
+#define PyContextVar_CheckExact(o) Py_IS_TYPE(o, &PyContextVar_Type)
+#define PyContextToken_CheckExact(o) Py_IS_TYPE(o, &PyContextToken_Type)
PyAPI_FUNC(PyObject *) PyContext_New(void);
@@ -73,9 +73,6 @@ PyAPI_FUNC(int) PyContextVar_Reset(PyObject *var, PyObject *token);
PyAPI_FUNC(PyObject *) _PyContext_NewHamtForTests(void);
-PyAPI_FUNC(int) PyContext_ClearFreeList(void);
-
-
#endif /* !Py_LIMITED_API */
#ifdef __cplusplus
diff --git a/Include/cpython/abstract.h b/Include/cpython/abstract.h
index 2c4eae70b96901..aa72f998b701cf 100644
--- a/Include/cpython/abstract.h
+++ b/Include/cpython/abstract.h
@@ -29,7 +29,7 @@ PyAPI_FUNC(PyObject *) _PyStack_AsDict(
/* Suggested size (number of positional arguments) for arrays of PyObject*
allocated on a C stack to avoid allocating memory on the heap memory. Such
array is used to pass positional arguments to call functions of the
- _PyObject_Vectorcall() family.
+ PyObject_Vectorcall() family.
The size is chosen to not abuse the C stack and so limit the risk of stack
overflow. The size is also chosen to allow using the small stack for most
@@ -45,8 +45,8 @@ PyAPI_FUNC(PyObject *) _Py_CheckFunctionResult(
/* === Vectorcall protocol (PEP 590) ============================= */
-/* Call callable using tp_call. Arguments are like _PyObject_Vectorcall()
- or _PyObject_FastCallDict() (both forms are supported),
+/* Call callable using tp_call. Arguments are like PyObject_Vectorcall()
+ or PyObject_FastCallDict() (both forms are supported),
except that nargs is plainly the number of arguments without flags. */
PyAPI_FUNC(PyObject *) _PyObject_MakeTpCall(
PyThreadState *tstate,
@@ -63,17 +63,21 @@ PyVectorcall_NARGS(size_t n)
}
static inline vectorcallfunc
-_PyVectorcall_Function(PyObject *callable)
+PyVectorcall_Function(PyObject *callable)
{
+ PyTypeObject *tp;
+ Py_ssize_t offset;
+ vectorcallfunc *ptr;
+
assert(callable != NULL);
- PyTypeObject *tp = Py_TYPE(callable);
- if (!PyType_HasFeature(tp, _Py_TPFLAGS_HAVE_VECTORCALL)) {
+ tp = Py_TYPE(callable);
+ if (!PyType_HasFeature(tp, Py_TPFLAGS_HAVE_VECTORCALL)) {
return NULL;
}
assert(PyCallable_Check(callable));
- Py_ssize_t offset = tp->tp_vectorcall_offset;
+ offset = tp->tp_vectorcall_offset;
assert(offset > 0);
- vectorcallfunc *ptr = (vectorcallfunc *)(((char *)callable) + offset);
+ ptr = (vectorcallfunc *)(((char *)callable) + offset);
return *ptr;
}
@@ -100,20 +104,23 @@ _PyObject_VectorcallTstate(PyThreadState *tstate, PyObject *callable,
PyObject *const *args, size_t nargsf,
PyObject *kwnames)
{
+ vectorcallfunc func;
+ PyObject *res;
+
assert(kwnames == NULL || PyTuple_Check(kwnames));
assert(args != NULL || PyVectorcall_NARGS(nargsf) == 0);
- vectorcallfunc func = _PyVectorcall_Function(callable);
+ func = PyVectorcall_Function(callable);
if (func == NULL) {
Py_ssize_t nargs = PyVectorcall_NARGS(nargsf);
return _PyObject_MakeTpCall(tstate, callable, args, nargs, kwnames);
}
- PyObject *res = func(callable, args, nargsf, kwnames);
+ res = func(callable, args, nargsf, kwnames);
return _Py_CheckFunctionResult(tstate, callable, res, NULL);
}
static inline PyObject *
-_PyObject_Vectorcall(PyObject *callable, PyObject *const *args,
+PyObject_Vectorcall(PyObject *callable, PyObject *const *args,
size_t nargsf, PyObject *kwnames)
{
PyThreadState *tstate = PyThreadState_GET();
@@ -121,9 +128,18 @@ _PyObject_Vectorcall(PyObject *callable, PyObject *const *args,
args, nargsf, kwnames);
}
-/* Same as _PyObject_Vectorcall except that keyword arguments are passed as
+// Backwards compatibility aliases for API that was provisional in Python 3.8
+#define _PyObject_Vectorcall PyObject_Vectorcall
+#define _PyObject_VectorcallMethod PyObject_VectorcallMethod
+#define _PyObject_FastCallDict PyObject_VectorcallDict
+#define _PyVectorcall_Function PyVectorcall_Function
+#define _PyObject_CallOneArg PyObject_CallOneArg
+#define _PyObject_CallMethodNoArgs PyObject_CallMethodNoArgs
+#define _PyObject_CallMethodOneArg PyObject_CallMethodOneArg
+
+/* Same as PyObject_Vectorcall except that keyword arguments are passed as
dict, which may be NULL if there are no keyword arguments. */
-PyAPI_FUNC(PyObject *) _PyObject_FastCallDict(
+PyAPI_FUNC(PyObject *) PyObject_VectorcallDict(
PyObject *callable,
PyObject *const *args,
size_t nargsf,
@@ -133,12 +149,18 @@ PyAPI_FUNC(PyObject *) _PyObject_FastCallDict(
"tuple" and keyword arguments "dict". "dict" may also be NULL */
PyAPI_FUNC(PyObject *) PyVectorcall_Call(PyObject *callable, PyObject *tuple, PyObject *dict);
-/* Same as _PyObject_Vectorcall except without keyword arguments */
+static inline PyObject *
+_PyObject_FastCallTstate(PyThreadState *tstate, PyObject *func, PyObject *const *args, Py_ssize_t nargs)
+{
+ return _PyObject_VectorcallTstate(tstate, func, args, (size_t)nargs, NULL);
+}
+
+/* Same as PyObject_Vectorcall except without keyword arguments */
static inline PyObject *
_PyObject_FastCall(PyObject *func, PyObject *const *args, Py_ssize_t nargs)
{
PyThreadState *tstate = PyThreadState_GET();
- return _PyObject_VectorcallTstate(tstate, func, args, (size_t)nargs, NULL);
+ return _PyObject_FastCallTstate(tstate, func, args, nargs);
}
/* Call a callable without any arguments
@@ -151,34 +173,39 @@ _PyObject_CallNoArg(PyObject *func) {
}
static inline PyObject *
-_PyObject_CallOneArg(PyObject *func, PyObject *arg)
+PyObject_CallOneArg(PyObject *func, PyObject *arg)
{
- assert(arg != NULL);
PyObject *_args[2];
- PyObject **args = _args + 1; // For PY_VECTORCALL_ARGUMENTS_OFFSET
+ PyObject **args;
+ PyThreadState *tstate;
+ size_t nargsf;
+
+ assert(arg != NULL);
+ args = _args + 1; // For PY_VECTORCALL_ARGUMENTS_OFFSET
args[0] = arg;
- PyThreadState *tstate = PyThreadState_GET();
- size_t nargsf = 1 | PY_VECTORCALL_ARGUMENTS_OFFSET;
+ tstate = PyThreadState_GET();
+ nargsf = 1 | PY_VECTORCALL_ARGUMENTS_OFFSET;
return _PyObject_VectorcallTstate(tstate, func, args, nargsf, NULL);
}
-PyAPI_FUNC(PyObject *) _PyObject_VectorcallMethod(
+PyAPI_FUNC(PyObject *) PyObject_VectorcallMethod(
PyObject *name, PyObject *const *args,
size_t nargsf, PyObject *kwnames);
static inline PyObject *
-_PyObject_CallMethodNoArgs(PyObject *self, PyObject *name)
+PyObject_CallMethodNoArgs(PyObject *self, PyObject *name)
{
- return _PyObject_VectorcallMethod(name, &self,
+ return PyObject_VectorcallMethod(name, &self,
1 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL);
}
static inline PyObject *
-_PyObject_CallMethodOneArg(PyObject *self, PyObject *name, PyObject *arg)
+PyObject_CallMethodOneArg(PyObject *self, PyObject *name, PyObject *arg)
{
- assert(arg != NULL);
PyObject *args[2] = {self, arg};
- return _PyObject_VectorcallMethod(name, args,
+
+ assert(arg != NULL);
+ return PyObject_VectorcallMethod(name, args,
2 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL);
}
@@ -207,7 +234,7 @@ _PyObject_VectorcallMethodId(
if (!oname) {
return NULL;
}
- return _PyObject_VectorcallMethod(oname, args, nargsf, kwnames);
+ return PyObject_VectorcallMethod(oname, args, nargsf, kwnames);
}
static inline PyObject *
@@ -220,8 +247,9 @@ _PyObject_CallMethodIdNoArgs(PyObject *self, _Py_Identifier *name)
static inline PyObject *
_PyObject_CallMethodIdOneArg(PyObject *self, _Py_Identifier *name, PyObject *arg)
{
- assert(arg != NULL);
PyObject *args[2] = {self, arg};
+
+ assert(arg != NULL);
return _PyObject_VectorcallMethodId(name, args,
2 | PY_VECTORCALL_ARGUMENTS_OFFSET, NULL);
}
@@ -236,9 +264,7 @@ PyAPI_FUNC(Py_ssize_t) PyObject_LengthHint(PyObject *o, Py_ssize_t);
/* === New Buffer API ============================================ */
/* Return 1 if the getbuffer function is available, otherwise return 0. */
-#define PyObject_CheckBuffer(obj) \
- (((obj)->ob_type->tp_as_buffer != NULL) && \
- ((obj)->ob_type->tp_as_buffer->bf_getbuffer != NULL))
+PyAPI_FUNC(int) PyObject_CheckBuffer(PyObject *obj);
/* This is a C-API version of the getbuffer function call. It checks
to make sure object has the required function pointer and issues the
@@ -306,14 +332,8 @@ PyAPI_FUNC(void) PyBuffer_Release(Py_buffer *view);
/* ==== Iterators ================================================ */
#define PyIter_Check(obj) \
- ((obj)->ob_type->tp_iternext != NULL && \
- (obj)->ob_type->tp_iternext != &_PyObject_NextNotImplemented)
-
-/* === Number Protocol ================================================== */
-
-#define PyIndex_Check(obj) \
- ((obj)->ob_type->tp_as_number != NULL && \
- (obj)->ob_type->tp_as_number->nb_index != NULL)
+ (Py_TYPE(obj)->tp_iternext != NULL && \
+ Py_TYPE(obj)->tp_iternext != &_PyObject_NextNotImplemented)
/* === Sequence protocol ================================================ */
@@ -359,6 +379,9 @@ PyAPI_FUNC(void) _Py_add_one_to_index_C(int nd, Py_ssize_t *index,
/* Convert Python int to Py_ssize_t. Do nothing if the argument is None. */
PyAPI_FUNC(int) _Py_convert_optional_to_ssize_t(PyObject *, void *);
+/* Same as PyNumber_Index but can return an instance of a subclass of int. */
+PyAPI_FUNC(PyObject *) _PyNumber_Index(PyObject *o);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/cpython/bytearrayobject.h b/Include/cpython/bytearrayobject.h
new file mode 100644
index 00000000000000..569b0cd0369861
--- /dev/null
+++ b/Include/cpython/bytearrayobject.h
@@ -0,0 +1,20 @@
+#ifndef Py_CPYTHON_BYTEARRAYOBJECT_H
+# error "this header file must not be included directly"
+#endif
+
+/* Object layout */
+typedef struct {
+ PyObject_VAR_HEAD
+ Py_ssize_t ob_alloc; /* How many bytes allocated in ob_bytes */
+ char *ob_bytes; /* Physical backing buffer */
+ char *ob_start; /* Logical start inside ob_bytes */
+ Py_ssize_t ob_exports; /* How many buffer exports */
+} PyByteArrayObject;
+
+/* Macros, trading safety for speed */
+#define PyByteArray_AS_STRING(self) \
+ (assert(PyByteArray_Check(self)), \
+ Py_SIZE(self) ? ((PyByteArrayObject *)(self))->ob_start : _PyByteArray_empty_string)
+#define PyByteArray_GET_SIZE(self) (assert(PyByteArray_Check(self)), Py_SIZE(self))
+
+PyAPI_DATA(char) _PyByteArray_empty_string[];
diff --git a/Include/cpython/bytesobject.h b/Include/cpython/bytesobject.h
new file mode 100644
index 00000000000000..f284c5835df099
--- /dev/null
+++ b/Include/cpython/bytesobject.h
@@ -0,0 +1,118 @@
+#ifndef Py_CPYTHON_BYTESOBJECT_H
+# error "this header file must not be included directly"
+#endif
+
+typedef struct {
+ PyObject_VAR_HEAD
+ Py_hash_t ob_shash;
+ char ob_sval[1];
+
+ /* Invariants:
+ * ob_sval contains space for 'ob_size+1' elements.
+ * ob_sval[ob_size] == 0.
+ * ob_shash is the hash of the string or -1 if not computed yet.
+ */
+} PyBytesObject;
+
+PyAPI_FUNC(int) _PyBytes_Resize(PyObject **, Py_ssize_t);
+PyAPI_FUNC(PyObject*) _PyBytes_FormatEx(
+ const char *format,
+ Py_ssize_t format_len,
+ PyObject *args,
+ int use_bytearray);
+PyAPI_FUNC(PyObject*) _PyBytes_FromHex(
+ PyObject *string,
+ int use_bytearray);
+
+/* Helper for PyBytes_DecodeEscape that detects invalid escape chars. */
+PyAPI_FUNC(PyObject *) _PyBytes_DecodeEscape(const char *, Py_ssize_t,
+ const char *, const char **);
+
+/* Macro, trading safety for speed */
+#define PyBytes_AS_STRING(op) (assert(PyBytes_Check(op)), \
+ (((PyBytesObject *)(op))->ob_sval))
+#define PyBytes_GET_SIZE(op) (assert(PyBytes_Check(op)),Py_SIZE(op))
+
+/* _PyBytes_Join(sep, x) is like sep.join(x). sep must be PyBytesObject*,
+ x must be an iterable object. */
+PyAPI_FUNC(PyObject *) _PyBytes_Join(PyObject *sep, PyObject *x);
+
+
+/* The _PyBytesWriter structure is big: it contains an embedded "stack buffer".
+ A _PyBytesWriter variable must be declared at the end of variables in a
+ function to optimize the memory allocation on the stack. */
+typedef struct {
+ /* bytes, bytearray or NULL (when the small buffer is used) */
+ PyObject *buffer;
+
+ /* Number of allocated size. */
+ Py_ssize_t allocated;
+
+ /* Minimum number of allocated bytes,
+ incremented by _PyBytesWriter_Prepare() */
+ Py_ssize_t min_size;
+
+ /* If non-zero, use a bytearray instead of a bytes object for buffer. */
+ int use_bytearray;
+
+ /* If non-zero, overallocate the buffer (default: 0).
+ This flag must be zero if use_bytearray is non-zero. */
+ int overallocate;
+
+ /* Stack buffer */
+ int use_small_buffer;
+ char small_buffer[512];
+} _PyBytesWriter;
+
+/* Initialize a bytes writer
+
+ By default, the overallocation is disabled. Set the overallocate attribute
+ to control the allocation of the buffer. */
+PyAPI_FUNC(void) _PyBytesWriter_Init(_PyBytesWriter *writer);
+
+/* Get the buffer content and reset the writer.
+ Return a bytes object, or a bytearray object if use_bytearray is non-zero.
+ Raise an exception and return NULL on error. */
+PyAPI_FUNC(PyObject *) _PyBytesWriter_Finish(_PyBytesWriter *writer,
+ void *str);
+
+/* Deallocate memory of a writer (clear its internal buffer). */
+PyAPI_FUNC(void) _PyBytesWriter_Dealloc(_PyBytesWriter *writer);
+
+/* Allocate the buffer to write size bytes.
+ Return the pointer to the beginning of buffer data.
+ Raise an exception and return NULL on error. */
+PyAPI_FUNC(void*) _PyBytesWriter_Alloc(_PyBytesWriter *writer,
+ Py_ssize_t size);
+
+/* Ensure that the buffer is large enough to write *size* bytes.
+ Add size to the writer minimum size (min_size attribute).
+
+ str is the current pointer inside the buffer.
+ Return the updated current pointer inside the buffer.
+ Raise an exception and return NULL on error. */
+PyAPI_FUNC(void*) _PyBytesWriter_Prepare(_PyBytesWriter *writer,
+ void *str,
+ Py_ssize_t size);
+
+/* Resize the buffer to make it larger.
+ The new buffer may be larger than size bytes because of overallocation.
+ Return the updated current pointer inside the buffer.
+ Raise an exception and return NULL on error.
+
+ Note: size must be greater than the number of allocated bytes in the writer.
+
+ This function doesn't use the writer minimum size (min_size attribute).
+
+ See also _PyBytesWriter_Prepare().
+ */
+PyAPI_FUNC(void*) _PyBytesWriter_Resize(_PyBytesWriter *writer,
+ void *str,
+ Py_ssize_t size);
+
+/* Write bytes.
+ Raise an exception and return NULL on error. */
+PyAPI_FUNC(void*) _PyBytesWriter_WriteBytes(_PyBytesWriter *writer,
+ void *str,
+ const void *bytes,
+ Py_ssize_t size);
diff --git a/Include/cpython/ceval.h b/Include/cpython/ceval.h
index e601304589f762..e1922a677bd38c 100644
--- a/Include/cpython/ceval.h
+++ b/Include/cpython/ceval.h
@@ -7,11 +7,13 @@ extern "C" {
#endif
PyAPI_FUNC(void) PyEval_SetProfile(Py_tracefunc, PyObject *);
+PyAPI_DATA(int) _PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg);
PyAPI_FUNC(void) PyEval_SetTrace(Py_tracefunc, PyObject *);
+PyAPI_FUNC(int) _PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg);
PyAPI_FUNC(int) _PyEval_GetCoroutineOriginTrackingDepth(void);
-PyAPI_FUNC(void) _PyEval_SetAsyncGenFirstiter(PyObject *);
+PyAPI_FUNC(int) _PyEval_SetAsyncGenFirstiter(PyObject *);
PyAPI_FUNC(PyObject *) _PyEval_GetAsyncGenFirstiter(void);
-PyAPI_FUNC(void) _PyEval_SetAsyncGenFinalizer(PyObject *);
+PyAPI_FUNC(int) _PyEval_SetAsyncGenFinalizer(PyObject *);
PyAPI_FUNC(PyObject *) _PyEval_GetAsyncGenFinalizer(void);
/* Helper to look up a builtin object */
@@ -21,7 +23,7 @@ PyAPI_FUNC(PyObject *) _PyEval_GetBuiltinId(_Py_Identifier *);
flag was set, else return 0. */
PyAPI_FUNC(int) PyEval_MergeCompilerFlags(PyCompilerFlags *cf);
-PyAPI_FUNC(PyObject *) _PyEval_EvalFrameDefault(struct _frame *f, int exc);
+PyAPI_FUNC(PyObject *) _PyEval_EvalFrameDefault(PyThreadState *tstate, PyFrameObject *f, int exc);
PyAPI_FUNC(void) _PyEval_SetSwitchInterval(unsigned long microseconds);
PyAPI_FUNC(unsigned long) _PyEval_GetSwitchInterval(void);
@@ -31,62 +33,6 @@ PyAPI_FUNC(Py_ssize_t) _PyEval_RequestCodeExtraIndex(freefunc);
PyAPI_FUNC(int) _PyEval_SliceIndex(PyObject *, Py_ssize_t *);
PyAPI_FUNC(int) _PyEval_SliceIndexNotNone(PyObject *, Py_ssize_t *);
-PyAPI_DATA(int) _Py_CheckRecursionLimit;
-
-#ifdef USE_STACKCHECK
-/* With USE_STACKCHECK macro defined, trigger stack checks in
- _Py_CheckRecursiveCall() on every 64th call to Py_EnterRecursiveCall. */
-static inline int _Py_MakeRecCheck(PyThreadState *tstate) {
- return (++tstate->recursion_depth > _Py_CheckRecursionLimit
- || ++tstate->stackcheck_counter > 64);
-}
-#else
-static inline int _Py_MakeRecCheck(PyThreadState *tstate) {
- return (++tstate->recursion_depth > _Py_CheckRecursionLimit);
-}
-#endif
-
-PyAPI_FUNC(int) _Py_CheckRecursiveCall(
- PyThreadState *tstate,
- const char *where);
-
-static inline int _Py_EnterRecursiveCall(PyThreadState *tstate,
- const char *where) {
- return (_Py_MakeRecCheck(tstate) && _Py_CheckRecursiveCall(tstate, where));
-}
-
-static inline int _Py_EnterRecursiveCall_inline(const char *where) {
- PyThreadState *tstate = PyThreadState_GET();
- return _Py_EnterRecursiveCall(tstate, where);
-}
-
-#define Py_EnterRecursiveCall(where) _Py_EnterRecursiveCall_inline(where)
-
-
-/* Compute the "lower-water mark" for a recursion limit. When
- * Py_LeaveRecursiveCall() is called with a recursion depth below this mark,
- * the overflowed flag is reset to 0. */
-#define _Py_RecursionLimitLowerWaterMark(limit) \
- (((limit) > 200) \
- ? ((limit) - 50) \
- : (3 * ((limit) >> 2)))
-
-#define _Py_MakeEndRecCheck(x) \
- (--(x) < _Py_RecursionLimitLowerWaterMark(_Py_CheckRecursionLimit))
-
-static inline void _Py_LeaveRecursiveCall(PyThreadState *tstate) {
- if (_Py_MakeEndRecCheck(tstate->recursion_depth)) {
- tstate->overflowed = 0;
- }
-}
-
-static inline void _Py_LeaveRecursiveCall_inline(void) {
- PyThreadState *tstate = PyThreadState_GET();
- _Py_LeaveRecursiveCall(tstate);
-}
-
-#define Py_LeaveRecursiveCall() _Py_LeaveRecursiveCall_inline()
-
#ifdef __cplusplus
}
#endif
diff --git a/Include/cpython/code.h b/Include/cpython/code.h
new file mode 100644
index 00000000000000..cda28ac6ee9343
--- /dev/null
+++ b/Include/cpython/code.h
@@ -0,0 +1,165 @@
+#ifndef Py_CPYTHON_CODE_H
+# error "this header file must not be included directly"
+#endif
+
+typedef uint16_t _Py_CODEUNIT;
+
+#ifdef WORDS_BIGENDIAN
+# define _Py_OPCODE(word) ((word) >> 8)
+# define _Py_OPARG(word) ((word) & 255)
+#else
+# define _Py_OPCODE(word) ((word) & 255)
+# define _Py_OPARG(word) ((word) >> 8)
+#endif
+
+typedef struct _PyOpcache _PyOpcache;
+
+/* Bytecode object */
+struct PyCodeObject {
+ PyObject_HEAD
+ int co_argcount; /* #arguments, except *args */
+ int co_posonlyargcount; /* #positional only arguments */
+ int co_kwonlyargcount; /* #keyword only arguments */
+ int co_nlocals; /* #local variables */
+ int co_stacksize; /* #entries needed for evaluation stack */
+ int co_flags; /* CO_..., see below */
+ int co_firstlineno; /* first source line number */
+ PyObject *co_code; /* instruction opcodes */
+ PyObject *co_consts; /* list (constants used) */
+ PyObject *co_names; /* list of strings (names used) */
+ PyObject *co_varnames; /* tuple of strings (local variable names) */
+ PyObject *co_freevars; /* tuple of strings (free variable names) */
+ PyObject *co_cellvars; /* tuple of strings (cell variable names) */
+ /* The rest aren't used in either hash or comparisons, except for co_name,
+ used in both. This is done to preserve the name and line number
+ for tracebacks and debuggers; otherwise, constant de-duplication
+ would collapse identical functions/lambdas defined on different lines.
+ */
+ Py_ssize_t *co_cell2arg; /* Maps cell vars which are arguments. */
+ PyObject *co_filename; /* unicode (where it was loaded from) */
+ PyObject *co_name; /* unicode (name, for reference) */
+ PyObject *co_lnotab; /* string (encoding addr<->lineno mapping) See
+ Objects/lnotab_notes.txt for details. */
+ void *co_zombieframe; /* for optimization only (see frameobject.c) */
+ PyObject *co_weakreflist; /* to support weakrefs to code objects */
+ /* Scratch space for extra data relating to the code object.
+ Type is a void* to keep the format private in codeobject.c to force
+ people to go through the proper APIs. */
+ void *co_extra;
+
+ /* Per opcodes just-in-time cache
+ *
+ * To reduce cache size, we use indirect mapping from opcode index to
+ * cache object:
+ * cache = co_opcache[co_opcache_map[next_instr - first_instr] - 1]
+ */
+
+ // co_opcache_map is indexed by (next_instr - first_instr).
+ // * 0 means there is no cache for this opcode.
+ // * n > 0 means there is cache in co_opcache[n-1].
+ unsigned char *co_opcache_map;
+ _PyOpcache *co_opcache;
+ int co_opcache_flag; // used to determine when create a cache.
+ unsigned char co_opcache_size; // length of co_opcache.
+};
+
+/* Masks for co_flags above */
+#define CO_OPTIMIZED 0x0001
+#define CO_NEWLOCALS 0x0002
+#define CO_VARARGS 0x0004
+#define CO_VARKEYWORDS 0x0008
+#define CO_NESTED 0x0010
+#define CO_GENERATOR 0x0020
+/* The CO_NOFREE flag is set if there are no free or cell variables.
+ This information is redundant, but it allows a single flag test
+ to determine whether there is any extra work to be done when the
+ call frame it setup.
+*/
+#define CO_NOFREE 0x0040
+
+/* The CO_COROUTINE flag is set for coroutine functions (defined with
+ ``async def`` keywords) */
+#define CO_COROUTINE 0x0080
+#define CO_ITERABLE_COROUTINE 0x0100
+#define CO_ASYNC_GENERATOR 0x0200
+
+/* bpo-39562: These constant values are changed in Python 3.9
+ to prevent collision with compiler flags. CO_FUTURE_ and PyCF_
+ constants must be kept unique. PyCF_ constants can use bits from
+ 0x0100 to 0x10000. CO_FUTURE_ constants use bits starting at 0x20000. */
+#define CO_FUTURE_DIVISION 0x20000
+#define CO_FUTURE_ABSOLUTE_IMPORT 0x40000 /* do absolute imports by default */
+#define CO_FUTURE_WITH_STATEMENT 0x80000
+#define CO_FUTURE_PRINT_FUNCTION 0x100000
+#define CO_FUTURE_UNICODE_LITERALS 0x200000
+
+#define CO_FUTURE_BARRY_AS_BDFL 0x400000
+#define CO_FUTURE_GENERATOR_STOP 0x800000
+#define CO_FUTURE_ANNOTATIONS 0x1000000
+
+/* This value is found in the co_cell2arg array when the associated cell
+ variable does not correspond to an argument. */
+#define CO_CELL_NOT_AN_ARG (-1)
+
+/* This should be defined if a future statement modifies the syntax.
+ For example, when a keyword is added.
+*/
+#define PY_PARSER_REQUIRES_FUTURE_KEYWORD
+
+#define CO_MAXBLOCKS 20 /* Max static block nesting within a function */
+
+PyAPI_DATA(PyTypeObject) PyCode_Type;
+
+#define PyCode_Check(op) Py_IS_TYPE(op, &PyCode_Type)
+#define PyCode_GetNumFree(op) (PyTuple_GET_SIZE((op)->co_freevars))
+
+/* Public interface */
+PyAPI_FUNC(PyCodeObject *) PyCode_New(
+ int, int, int, int, int, PyObject *, PyObject *,
+ PyObject *, PyObject *, PyObject *, PyObject *,
+ PyObject *, PyObject *, int, PyObject *);
+
+PyAPI_FUNC(PyCodeObject *) PyCode_NewWithPosOnlyArgs(
+ int, int, int, int, int, int, PyObject *, PyObject *,
+ PyObject *, PyObject *, PyObject *, PyObject *,
+ PyObject *, PyObject *, int, PyObject *);
+ /* same as struct above */
+
+/* Creates a new empty code object with the specified source location. */
+PyAPI_FUNC(PyCodeObject *)
+PyCode_NewEmpty(const char *filename, const char *funcname, int firstlineno);
+
+/* Return the line number associated with the specified bytecode index
+ in this code object. If you just need the line number of a frame,
+ use PyFrame_GetLineNumber() instead. */
+PyAPI_FUNC(int) PyCode_Addr2Line(PyCodeObject *, int);
+
+/* for internal use only */
+typedef struct _addr_pair {
+ int ap_lower;
+ int ap_upper;
+} PyAddrPair;
+
+/* Update *bounds to describe the first and one-past-the-last instructions in the
+ same line as lasti. Return the number of that line.
+*/
+PyAPI_FUNC(int) _PyCode_CheckLineNumber(PyCodeObject* co,
+ int lasti, PyAddrPair *bounds);
+
+/* Create a comparable key used to compare constants taking in account the
+ * object type. It is used to make sure types are not coerced (e.g., float and
+ * complex) _and_ to distinguish 0.0 from -0.0 e.g. on IEEE platforms
+ *
+ * Return (type(obj), obj, ...): a tuple with variable size (at least 2 items)
+ * depending on the type and the value. The type is the first item to not
+ * compare bytes and str which can raise a BytesWarning exception. */
+PyAPI_FUNC(PyObject*) _PyCode_ConstantKey(PyObject *obj);
+
+PyAPI_FUNC(PyObject*) PyCode_Optimize(PyObject *code, PyObject* consts,
+ PyObject *names, PyObject *lnotab);
+
+
+PyAPI_FUNC(int) _PyCode_GetExtra(PyObject *code, Py_ssize_t index,
+ void **extra);
+PyAPI_FUNC(int) _PyCode_SetExtra(PyObject *code, Py_ssize_t index,
+ void *extra);
diff --git a/Include/cpython/dictobject.h b/Include/cpython/dictobject.h
index 64c012a012b793..e33a0d156fead2 100644
--- a/Include/cpython/dictobject.h
+++ b/Include/cpython/dictobject.h
@@ -62,8 +62,6 @@ PyObject *_PyDict_Pop_KnownHash(PyObject *, PyObject *, Py_hash_t, PyObject *);
PyObject *_PyDict_FromKeys(PyObject *, PyObject *, PyObject *);
#define _PyDict_HasSplitTable(d) ((d)->ma_values != NULL)
-PyAPI_FUNC(int) PyDict_ClearFreeList(void);
-
/* Like PyDict_Merge, but override can be 0, 1 or 2. If override is 0,
the first occurrence of a key wins, if override is 1, the last occurrence
of a key wins, if override is 2, a KeyError with conflicting key as
diff --git a/Include/cpython/fileutils.h b/Include/cpython/fileutils.h
new file mode 100644
index 00000000000000..e79d03e24f5771
--- /dev/null
+++ b/Include/cpython/fileutils.h
@@ -0,0 +1,165 @@
+#ifndef Py_CPYTHON_FILEUTILS_H
+# error "this header file must not be included directly"
+#endif
+
+typedef enum {
+ _Py_ERROR_UNKNOWN=0,
+ _Py_ERROR_STRICT,
+ _Py_ERROR_SURROGATEESCAPE,
+ _Py_ERROR_REPLACE,
+ _Py_ERROR_IGNORE,
+ _Py_ERROR_BACKSLASHREPLACE,
+ _Py_ERROR_SURROGATEPASS,
+ _Py_ERROR_XMLCHARREFREPLACE,
+ _Py_ERROR_OTHER
+} _Py_error_handler;
+
+PyAPI_FUNC(_Py_error_handler) _Py_GetErrorHandler(const char *errors);
+
+PyAPI_FUNC(int) _Py_DecodeLocaleEx(
+ const char *arg,
+ wchar_t **wstr,
+ size_t *wlen,
+ const char **reason,
+ int current_locale,
+ _Py_error_handler errors);
+
+PyAPI_FUNC(int) _Py_EncodeLocaleEx(
+ const wchar_t *text,
+ char **str,
+ size_t *error_pos,
+ const char **reason,
+ int current_locale,
+ _Py_error_handler errors);
+
+
+PyAPI_FUNC(PyObject *) _Py_device_encoding(int);
+
+#if defined(MS_WINDOWS) || defined(__APPLE__)
+ /* On Windows, the count parameter of read() is an int (bpo-9015, bpo-9611).
+ On macOS 10.13, read() and write() with more than INT_MAX bytes
+ fail with EINVAL (bpo-24658). */
+# define _PY_READ_MAX INT_MAX
+# define _PY_WRITE_MAX INT_MAX
+#else
+ /* write() should truncate the input to PY_SSIZE_T_MAX bytes,
+ but it's safer to do it ourself to have a portable behaviour */
+# define _PY_READ_MAX PY_SSIZE_T_MAX
+# define _PY_WRITE_MAX PY_SSIZE_T_MAX
+#endif
+
+#ifdef MS_WINDOWS
+struct _Py_stat_struct {
+ unsigned long st_dev;
+ uint64_t st_ino;
+ unsigned short st_mode;
+ int st_nlink;
+ int st_uid;
+ int st_gid;
+ unsigned long st_rdev;
+ __int64 st_size;
+ time_t st_atime;
+ int st_atime_nsec;
+ time_t st_mtime;
+ int st_mtime_nsec;
+ time_t st_ctime;
+ int st_ctime_nsec;
+ unsigned long st_file_attributes;
+ unsigned long st_reparse_tag;
+};
+#else
+# define _Py_stat_struct stat
+#endif
+
+PyAPI_FUNC(int) _Py_fstat(
+ int fd,
+ struct _Py_stat_struct *status);
+
+PyAPI_FUNC(int) _Py_fstat_noraise(
+ int fd,
+ struct _Py_stat_struct *status);
+
+PyAPI_FUNC(int) _Py_stat(
+ PyObject *path,
+ struct stat *status);
+
+PyAPI_FUNC(int) _Py_open(
+ const char *pathname,
+ int flags);
+
+PyAPI_FUNC(int) _Py_open_noraise(
+ const char *pathname,
+ int flags);
+
+PyAPI_FUNC(FILE *) _Py_wfopen(
+ const wchar_t *path,
+ const wchar_t *mode);
+
+PyAPI_FUNC(FILE*) _Py_fopen(
+ const char *pathname,
+ const char *mode);
+
+PyAPI_FUNC(FILE*) _Py_fopen_obj(
+ PyObject *path,
+ const char *mode);
+
+PyAPI_FUNC(Py_ssize_t) _Py_read(
+ int fd,
+ void *buf,
+ size_t count);
+
+PyAPI_FUNC(Py_ssize_t) _Py_write(
+ int fd,
+ const void *buf,
+ size_t count);
+
+PyAPI_FUNC(Py_ssize_t) _Py_write_noraise(
+ int fd,
+ const void *buf,
+ size_t count);
+
+#ifdef HAVE_READLINK
+PyAPI_FUNC(int) _Py_wreadlink(
+ const wchar_t *path,
+ wchar_t *buf,
+ /* Number of characters of 'buf' buffer
+ including the trailing NUL character */
+ size_t buflen);
+#endif
+
+#ifdef HAVE_REALPATH
+PyAPI_FUNC(wchar_t*) _Py_wrealpath(
+ const wchar_t *path,
+ wchar_t *resolved_path,
+ /* Number of characters of 'resolved_path' buffer
+ including the trailing NUL character */
+ size_t resolved_path_len);
+#endif
+
+#ifndef MS_WINDOWS
+PyAPI_FUNC(int) _Py_isabs(const wchar_t *path);
+#endif
+
+PyAPI_FUNC(int) _Py_abspath(const wchar_t *path, wchar_t **abspath_p);
+
+PyAPI_FUNC(wchar_t*) _Py_wgetcwd(
+ wchar_t *buf,
+ /* Number of characters of 'buf' buffer
+ including the trailing NUL character */
+ size_t buflen);
+
+PyAPI_FUNC(int) _Py_get_inheritable(int fd);
+
+PyAPI_FUNC(int) _Py_set_inheritable(int fd, int inheritable,
+ int *atomic_flag_works);
+
+PyAPI_FUNC(int) _Py_set_inheritable_async_safe(int fd, int inheritable,
+ int *atomic_flag_works);
+
+PyAPI_FUNC(int) _Py_dup(int fd);
+
+#ifndef MS_WINDOWS
+PyAPI_FUNC(int) _Py_get_blocking(int fd);
+
+PyAPI_FUNC(int) _Py_set_blocking(int fd, int blocking);
+#endif /* !MS_WINDOWS */
diff --git a/Include/cpython/frameobject.h b/Include/cpython/frameobject.h
index cf8c00c3528e0e..36a51baae8784b 100644
--- a/Include/cpython/frameobject.h
+++ b/Include/cpython/frameobject.h
@@ -14,7 +14,7 @@ typedef struct {
int b_level; /* value stack level to pop to */
} PyTryBlock;
-typedef struct _frame {
+struct _frame {
PyObject_VAR_HEAD
struct _frame *f_back; /* previous frame, or NULL */
PyCodeObject *f_code; /* code segment */
@@ -44,14 +44,14 @@ typedef struct _frame {
char f_executing; /* whether the frame is still executing */
PyTryBlock f_blockstack[CO_MAXBLOCKS]; /* for try and loop blocks */
PyObject *f_localsplus[1]; /* locals+stack, dynamically sized */
-} PyFrameObject;
+};
/* Standard object interface */
PyAPI_DATA(PyTypeObject) PyFrame_Type;
-#define PyFrame_Check(op) (Py_TYPE(op) == &PyFrame_Type)
+#define PyFrame_Check(op) Py_IS_TYPE(op, &PyFrame_Type)
PyAPI_FUNC(PyFrameObject *) PyFrame_New(PyThreadState *, PyCodeObject *,
PyObject *, PyObject *);
@@ -75,12 +75,9 @@ PyAPI_FUNC(void) PyFrame_LocalsToFast(PyFrameObject *, int);
PyAPI_FUNC(int) PyFrame_FastToLocalsWithError(PyFrameObject *f);
PyAPI_FUNC(void) PyFrame_FastToLocals(PyFrameObject *);
-PyAPI_FUNC(int) PyFrame_ClearFreeList(void);
-
PyAPI_FUNC(void) _PyFrame_DebugMallocStats(FILE *out);
-/* Return the line of code the frame is currently executing. */
-PyAPI_FUNC(int) PyFrame_GetLineNumber(PyFrameObject *);
+PyAPI_FUNC(PyFrameObject *) PyFrame_GetBack(PyFrameObject *frame);
#ifdef __cplusplus
}
diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h
index c5fa2b3857e7a2..df93a5539d48bd 100644
--- a/Include/cpython/initconfig.h
+++ b/Include/cpython/initconfig.h
@@ -147,6 +147,10 @@ typedef struct {
Set to 1 by -X faulthandler and PYTHONFAULTHANDLER. -1 means unset. */
int faulthandler;
+ /* Enable PEG parser?
+ 1 by default, set to 0 by -X oldparser and PYTHONOLDPARSER */
+ int _use_peg_parser;
+
/* Enable tracemalloc?
Set by -X tracemalloc=N and PYTHONTRACEMALLOC. -1 means unset */
int tracemalloc;
@@ -405,6 +409,10 @@ typedef struct {
/* If equal to 0, stop Python initialization before the "main" phase */
int _init_main;
+
+ /* If non-zero, disallow threads, subprocesses, and fork.
+ Default: 0. */
+ int _isolated_interpreter;
} PyConfig;
PyAPI_FUNC(void) PyConfig_InitPythonConfig(PyConfig *config);
diff --git a/Include/cpython/listobject.h b/Include/cpython/listobject.h
new file mode 100644
index 00000000000000..74fe3301a7ab71
--- /dev/null
+++ b/Include/cpython/listobject.h
@@ -0,0 +1,43 @@
+#ifndef Py_CPYTHON_LISTOBJECT_H
+# error "this header file must not be included directly"
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+ PyObject_VAR_HEAD
+ /* Vector of pointers to list elements. list[0] is ob_item[0], etc. */
+ PyObject **ob_item;
+
+ /* ob_item contains space for 'allocated' elements. The number
+ * currently in use is ob_size.
+ * Invariants:
+ * 0 <= ob_size <= allocated
+ * len(list) == ob_size
+ * ob_item == NULL implies ob_size == allocated == 0
+ * list.sort() temporarily sets allocated to -1 to detect mutations.
+ *
+ * Items must normally not be NULL, except during construction when
+ * the list is not yet visible outside the function that builds it.
+ */
+ Py_ssize_t allocated;
+} PyListObject;
+
+PyAPI_FUNC(PyObject *) _PyList_Extend(PyListObject *, PyObject *);
+PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out);
+
+/* Macro, trading safety for speed */
+
+/* Cast argument to PyTupleObject* type. */
+#define _PyList_CAST(op) (assert(PyList_Check(op)), (PyListObject *)(op))
+
+#define PyList_GET_ITEM(op, i) (_PyList_CAST(op)->ob_item[i])
+#define PyList_SET_ITEM(op, i, v) (_PyList_CAST(op)->ob_item[i] = (v))
+#define PyList_GET_SIZE(op) Py_SIZE(_PyList_CAST(op))
+#define _PyList_ITEMS(op) (_PyList_CAST(op)->ob_item)
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/Include/cpython/methodobject.h b/Include/cpython/methodobject.h
new file mode 100644
index 00000000000000..7ecbfe3b5e2fe8
--- /dev/null
+++ b/Include/cpython/methodobject.h
@@ -0,0 +1,35 @@
+#ifndef Py_CPYTHON_METHODOBJECT_H
+# error "this header file must not be included directly"
+#endif
+
+PyAPI_DATA(PyTypeObject) PyCMethod_Type;
+
+#define PyCMethod_CheckExact(op) Py_IS_TYPE(op, &PyCMethod_Type)
+#define PyCMethod_Check(op) PyObject_TypeCheck(op, &PyCMethod_Type)
+
+/* Macros for direct access to these values. Type checks are *not*
+ done, so use with care. */
+#define PyCFunction_GET_FUNCTION(func) \
+ (((PyCFunctionObject *)func) -> m_ml -> ml_meth)
+#define PyCFunction_GET_SELF(func) \
+ (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \
+ NULL : ((PyCFunctionObject *)func) -> m_self)
+#define PyCFunction_GET_FLAGS(func) \
+ (((PyCFunctionObject *)func) -> m_ml -> ml_flags)
+#define PyCFunction_GET_CLASS(func) \
+ (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_METHOD ? \
+ ((PyCMethodObject *)func) -> mm_class : NULL)
+
+typedef struct {
+ PyObject_HEAD
+ PyMethodDef *m_ml; /* Description of the C function to call */
+ PyObject *m_self; /* Passed as 'self' arg to the C func, can be NULL */
+ PyObject *m_module; /* The __module__ attribute, can be anything */
+ PyObject *m_weakreflist; /* List of weak references */
+ vectorcallfunc vectorcall;
+} PyCFunctionObject;
+
+typedef struct {
+ PyCFunctionObject func;
+ PyTypeObject *mm_class; /* Class that defines this method */
+} PyCMethodObject;
diff --git a/Include/cpython/object.h b/Include/cpython/object.h
index 0b5260eda7d8a2..444f832f5bd8d3 100644
--- a/Include/cpython/object.h
+++ b/Include/cpython/object.h
@@ -36,7 +36,7 @@ PyAPI_FUNC(Py_ssize_t) _Py_GetRefTotal(void);
PyId_foo is a static variable, either on block level or file level. On first
usage, the string "foo" is interned, and the structures are linked. On interpreter
- shutdown, all strings are released (through _PyUnicode_ClearStaticStrings).
+ shutdown, all strings are released.
Alternatively, _Py_static_string allows choosing the variable name.
_PyUnicode_FromId returns a borrowed reference to the interned string.
@@ -190,7 +190,7 @@ typedef struct {
* backwards-compatibility */
typedef Py_ssize_t printfunc;
-typedef struct _typeobject {
+struct _typeobject {
PyObject_VAR_HEAD
const char *tp_name; /* For printing, in format "." */
Py_ssize_t tp_basicsize, tp_itemsize; /* For allocation */
@@ -271,7 +271,7 @@ typedef struct _typeobject {
destructor tp_finalize;
vectorcallfunc tp_vectorcall;
-} PyTypeObject;
+};
/* The *real* layout of a type object when allocated on the heap */
typedef struct _heaptypeobject {
@@ -289,6 +289,7 @@ typedef struct _heaptypeobject {
PyBufferProcs as_buffer;
PyObject *ht_name, *ht_slots, *ht_qualname;
struct _dictkeysobject *ht_cached_keys;
+ PyObject *ht_module;
/* here are optional user slots, followed by the members. */
} PyHeapTypeObject;
@@ -385,11 +386,6 @@ PyAPI_DATA(PyTypeObject) _PyNotImplemented_Type;
*/
PyAPI_DATA(int) _Py_SwappedOp[];
-/* This is the old private API, invoked by the macros before 3.2.4.
- Kept for binary compatibility of extensions using the stable ABI. */
-PyAPI_FUNC(void) _PyTrash_deposit_object(PyObject*);
-PyAPI_FUNC(void) _PyTrash_destroy_chain(void);
-
PyAPI_FUNC(void)
_PyDebugAllocatorStats(FILE *out, const char *block_name, int num_blocks,
size_t sizeof_block);
@@ -507,10 +503,23 @@ partially-deallocated object. To check this, the tp_dealloc function must be
passed as second argument to Py_TRASHCAN_BEGIN().
*/
-/* The new thread-safe private API, invoked by the macros below. */
+/* This is the old private API, invoked by the macros before 3.2.4.
+ Kept for binary compatibility of extensions using the stable ABI. */
+PyAPI_FUNC(void) _PyTrash_deposit_object(PyObject*);
+PyAPI_FUNC(void) _PyTrash_destroy_chain(void);
+
+/* This is the old private API, invoked by the macros before 3.9.
+ Kept for binary compatibility of extensions using the stable ABI. */
PyAPI_FUNC(void) _PyTrash_thread_deposit_object(PyObject*);
PyAPI_FUNC(void) _PyTrash_thread_destroy_chain(void);
+/* Forward declarations for PyThreadState */
+struct _ts;
+
+/* Python 3.9 private API, invoked by the macros below. */
+PyAPI_FUNC(int) _PyTrash_begin(struct _ts *tstate, PyObject *op);
+PyAPI_FUNC(void) _PyTrash_end(struct _ts *tstate);
+
#define PyTrash_UNWIND_LEVEL 50
#define Py_TRASHCAN_BEGIN_CONDITION(op, cond) \
@@ -520,24 +529,19 @@ PyAPI_FUNC(void) _PyTrash_thread_destroy_chain(void);
* is run normally without involving the trashcan */ \
if (cond) { \
_tstate = PyThreadState_GET(); \
- if (_tstate->trash_delete_nesting >= PyTrash_UNWIND_LEVEL) { \
- /* Store the object (to be deallocated later) and jump past \
- * Py_TRASHCAN_END, skipping the body of the deallocator */ \
- _PyTrash_thread_deposit_object(_PyObject_CAST(op)); \
+ if (_PyTrash_begin(_tstate, _PyObject_CAST(op))) { \
break; \
} \
- ++_tstate->trash_delete_nesting; \
}
/* The body of the deallocator is here. */
#define Py_TRASHCAN_END \
if (_tstate) { \
- --_tstate->trash_delete_nesting; \
- if (_tstate->trash_delete_later && _tstate->trash_delete_nesting <= 0) \
- _PyTrash_thread_destroy_chain(); \
+ _PyTrash_end(_tstate); \
} \
} while (0);
-#define Py_TRASHCAN_BEGIN(op, dealloc) Py_TRASHCAN_BEGIN_CONDITION(op, \
+#define Py_TRASHCAN_BEGIN(op, dealloc) \
+ Py_TRASHCAN_BEGIN_CONDITION(op, \
Py_TYPE(op)->tp_dealloc == (destructor)(dealloc))
/* For backwards compatibility, these macros enable the trashcan
diff --git a/Include/cpython/objimpl.h b/Include/cpython/objimpl.h
index 3f148146f67a40..b835936db70112 100644
--- a/Include/cpython/objimpl.h
+++ b/Include/cpython/objimpl.h
@@ -6,6 +6,56 @@
extern "C" {
#endif
+#define _PyObject_SIZE(typeobj) ( (typeobj)->tp_basicsize )
+
+/* _PyObject_VAR_SIZE returns the number of bytes (as size_t) allocated for a
+ vrbl-size object with nitems items, exclusive of gc overhead (if any). The
+ value is rounded up to the closest multiple of sizeof(void *), in order to
+ ensure that pointer fields at the end of the object are correctly aligned
+ for the platform (this is of special importance for subclasses of, e.g.,
+ str or int, so that pointers can be stored after the embedded data).
+
+ Note that there's no memory wastage in doing this, as malloc has to
+ return (at worst) pointer-aligned memory anyway.
+*/
+#if ((SIZEOF_VOID_P - 1) & SIZEOF_VOID_P) != 0
+# error "_PyObject_VAR_SIZE requires SIZEOF_VOID_P be a power of 2"
+#endif
+
+#define _PyObject_VAR_SIZE(typeobj, nitems) \
+ _Py_SIZE_ROUND_UP((typeobj)->tp_basicsize + \
+ (nitems)*(typeobj)->tp_itemsize, \
+ SIZEOF_VOID_P)
+
+
+/* This example code implements an object constructor with a custom
+ allocator, where PyObject_New is inlined, and shows the important
+ distinction between two steps (at least):
+ 1) the actual allocation of the object storage;
+ 2) the initialization of the Python specific fields
+ in this storage with PyObject_{Init, InitVar}.
+
+ PyObject *
+ YourObject_New(...)
+ {
+ PyObject *op;
+
+ op = (PyObject *) Your_Allocator(_PyObject_SIZE(YourTypeStruct));
+ if (op == NULL)
+ return PyErr_NoMemory();
+
+ PyObject_Init(op, &YourTypeStruct);
+
+ op->ob_field = value;
+ ...
+ return op;
+ }
+
+ Note that in C++, the use of the new operator usually implies that
+ the 1st step is performed automatically for you, so in a C++ class
+ constructor you would start directly with PyObject_Init/InitVar. */
+
+
/* Inline functions trading binary compatibility for speed:
PyObject_INIT() is the fast version of PyObject_Init(), and
PyObject_INIT_VAR() is the fast version of PyObject_InitVar().
@@ -15,7 +65,7 @@ static inline PyObject*
_PyObject_INIT(PyObject *op, PyTypeObject *typeobj)
{
assert(op != NULL);
- Py_TYPE(op) = typeobj;
+ Py_SET_TYPE(op, typeobj);
if (PyType_GetFlags(typeobj) & Py_TPFLAGS_HEAPTYPE) {
Py_INCREF(typeobj);
}
@@ -30,7 +80,7 @@ static inline PyVarObject*
_PyObject_INIT_VAR(PyVarObject *op, PyTypeObject *typeobj, Py_ssize_t size)
{
assert(op != NULL);
- Py_SIZE(op) = size;
+ Py_SET_SIZE(op, size);
PyObject_INIT((PyObject *)op, typeobj);
return op;
}
@@ -70,66 +120,16 @@ PyAPI_FUNC(Py_ssize_t) _PyGC_CollectNoFail(void);
PyAPI_FUNC(Py_ssize_t) _PyGC_CollectIfEnabled(void);
-/* Test if an object has a GC head */
-#define PyObject_IS_GC(o) \
- (PyType_IS_GC(Py_TYPE(o)) \
- && (Py_TYPE(o)->tp_is_gc == NULL || Py_TYPE(o)->tp_is_gc(o)))
+/* Test if an object implements the garbage collector protocol */
+PyAPI_FUNC(int) PyObject_IS_GC(PyObject *obj);
-/* GC information is stored BEFORE the object structure. */
-typedef struct {
- // Pointer to next object in the list.
- // 0 means the object is not tracked
- uintptr_t _gc_next;
-
- // Pointer to previous object in the list.
- // Lowest two bits are used for flags documented later.
- uintptr_t _gc_prev;
-} PyGC_Head;
-
-#define _Py_AS_GC(o) ((PyGC_Head *)(o)-1)
-
-/* True if the object is currently tracked by the GC. */
-#define _PyObject_GC_IS_TRACKED(o) (_Py_AS_GC(o)->_gc_next != 0)
-
-/* True if the object may be tracked by the GC in the future, or already is.
- This can be useful to implement some optimizations. */
-#define _PyObject_GC_MAY_BE_TRACKED(obj) \
- (PyObject_IS_GC(obj) && \
- (!PyTuple_CheckExact(obj) || _PyObject_GC_IS_TRACKED(obj)))
-
-
-/* Bit flags for _gc_prev */
-/* Bit 0 is set when tp_finalize is called */
-#define _PyGC_PREV_MASK_FINALIZED (1)
-/* Bit 1 is set when the object is in generation which is GCed currently. */
-#define _PyGC_PREV_MASK_COLLECTING (2)
-/* The (N-2) most significant bits contain the real address. */
-#define _PyGC_PREV_SHIFT (2)
-#define _PyGC_PREV_MASK (((uintptr_t) -1) << _PyGC_PREV_SHIFT)
-
-// Lowest bit of _gc_next is used for flags only in GC.
-// But it is always 0 for normal code.
-#define _PyGCHead_NEXT(g) ((PyGC_Head*)(g)->_gc_next)
-#define _PyGCHead_SET_NEXT(g, p) ((g)->_gc_next = (uintptr_t)(p))
-
-// Lowest two bits of _gc_prev is used for _PyGC_PREV_MASK_* flags.
-#define _PyGCHead_PREV(g) ((PyGC_Head*)((g)->_gc_prev & _PyGC_PREV_MASK))
-#define _PyGCHead_SET_PREV(g, p) do { \
- assert(((uintptr_t)p & ~_PyGC_PREV_MASK) == 0); \
- (g)->_gc_prev = ((g)->_gc_prev & ~_PyGC_PREV_MASK) \
- | ((uintptr_t)(p)); \
- } while (0)
-
-#define _PyGCHead_FINALIZED(g) \
- (((g)->_gc_prev & _PyGC_PREV_MASK_FINALIZED) != 0)
-#define _PyGCHead_SET_FINALIZED(g) \
- ((g)->_gc_prev |= _PyGC_PREV_MASK_FINALIZED)
-
-#define _PyGC_FINALIZED(o) \
- _PyGCHead_FINALIZED(_Py_AS_GC(o))
-#define _PyGC_SET_FINALIZED(o) \
- _PyGCHead_SET_FINALIZED(_Py_AS_GC(o))
+/* Code built with Py_BUILD_CORE must include pycore_gc.h instead which
+ defines a different _PyGC_FINALIZED() macro. */
+#ifndef Py_BUILD_CORE
+ // Kept for backward compatibility with Python 3.8
+# define _PyGC_FINALIZED(o) PyObject_GC_IsFinalized(o)
+#endif
PyAPI_FUNC(PyObject *) _PyObject_GC_Malloc(size_t size);
PyAPI_FUNC(PyObject *) _PyObject_GC_Calloc(size_t size);
@@ -138,8 +138,7 @@ PyAPI_FUNC(PyObject *) _PyObject_GC_Calloc(size_t size);
/* Test if a type supports weak references */
#define PyType_SUPPORTS_WEAKREFS(t) ((t)->tp_weaklistoffset > 0)
-#define PyObject_GET_WEAKREFS_LISTPTR(o) \
- ((PyObject **) (((char *) (o)) + Py_TYPE(o)->tp_weaklistoffset))
+PyAPI_FUNC(PyObject **) PyObject_GET_WEAKREFS_LISTPTR(PyObject *op);
#ifdef __cplusplus
}
diff --git a/Include/cpython/pyerrors.h b/Include/cpython/pyerrors.h
index f8480fb79e5575..dd3c2caa0cc043 100644
--- a/Include/cpython/pyerrors.h
+++ b/Include/cpython/pyerrors.h
@@ -75,7 +75,7 @@ typedef PyOSErrorObject PyWindowsErrorObject;
/* Error handling definitions */
PyAPI_FUNC(void) _PyErr_SetKeyError(PyObject *);
-_PyErr_StackItem *_PyErr_GetTopmostException(PyThreadState *tstate);
+PyAPI_FUNC(_PyErr_StackItem*) _PyErr_GetTopmostException(PyThreadState *tstate);
PyAPI_FUNC(void) _PyErr_GetExcInfo(PyThreadState *, PyObject **, PyObject **, PyObject **);
/* Context manipulation (PEP 3134) */
@@ -178,6 +178,17 @@ PyAPI_FUNC(void) _PyErr_WriteUnraisableMsg(
const char *err_msg,
PyObject *obj);
+PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalErrorFunc(
+ const char *func,
+ const char *message);
+
+PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalErrorFormat(
+ const char *func,
+ const char *format,
+ ...);
+
+#define Py_FatalError(message) _Py_FatalErrorFunc(__func__, message)
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/cpython/pylifecycle.h b/Include/cpython/pylifecycle.h
index a01e9c94f12d75..eb523b82e182d8 100644
--- a/Include/cpython/pylifecycle.h
+++ b/Include/cpython/pylifecycle.h
@@ -65,6 +65,8 @@ PyAPI_FUNC(int) _Py_CoerceLegacyLocale(int warn);
PyAPI_FUNC(int) _Py_LegacyLocaleDetected(int warn);
PyAPI_FUNC(char *) _Py_SetLocaleFromEnv(int category);
+PyAPI_FUNC(PyThreadState *) _Py_NewInterpreter(int isolated_subinterpreter);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h
index d1792575c97378..f292da1d3c6c5e 100644
--- a/Include/cpython/pystate.h
+++ b/Include/cpython/pystate.h
@@ -16,7 +16,7 @@ PyAPI_FUNC(PyObject *) _PyInterpreterState_GetMainModule(PyInterpreterState *);
/* State unique per thread */
/* Py_tracefunc return -1 when raising an exception, or 0 for success. */
-typedef int (*Py_tracefunc)(PyObject *, struct _frame *, int, PyObject *);
+typedef int (*Py_tracefunc)(PyObject *, PyFrameObject *, int, PyObject *);
/* The following values are used for 'what' for tracefunc functions
*
@@ -55,7 +55,8 @@ struct _ts {
struct _ts *next;
PyInterpreterState *interp;
- struct _frame *frame;
+ /* Borrowed reference to the current frame (it can be NULL) */
+ PyFrameObject *frame;
int recursion_depth;
char overflowed; /* The stack has overflowed. Allow 50 more calls
to handle the runtime error. */
@@ -139,13 +140,8 @@ struct _ts {
};
-/* Get the current interpreter state.
-
- Issue a fatal error if there no current Python thread state or no current
- interpreter. It cannot return NULL.
-
- The caller must hold the GIL.*/
-PyAPI_FUNC(PyInterpreterState *) _PyInterpreterState_Get(void);
+// Alias for backward compatibility with Python 3.8
+#define _PyInterpreterState_Get PyInterpreterState_Get
PyAPI_FUNC(PyThreadState *) _PyThreadState_Prealloc(PyInterpreterState *);
@@ -153,6 +149,8 @@ PyAPI_FUNC(PyThreadState *) _PyThreadState_Prealloc(PyInterpreterState *);
* if it is NULL. */
PyAPI_FUNC(PyThreadState *) _PyThreadState_UncheckedGet(void);
+PyAPI_FUNC(PyObject *) _PyThreadState_GetDict(PyThreadState *tstate);
+
/* PyGILState */
/* Helper/diagnostic function - return 1 if the current thread
@@ -167,7 +165,7 @@ PyAPI_FUNC(int) PyGILState_Check(void);
This function doesn't check for error. Return NULL before _PyGILState_Init()
is called and after _PyGILState_Fini() is called.
- See also _PyInterpreterState_Get() and _PyInterpreterState_GET_UNSAFE(). */
+ See also _PyInterpreterState_Get() and _PyInterpreterState_GET(). */
PyAPI_FUNC(PyInterpreterState *) _PyGILState_GetInterpreterStateUnsafe(void);
/* The implementation of sys._current_frames() Returns a dict mapping
@@ -184,7 +182,22 @@ PyAPI_FUNC(PyThreadState *) PyInterpreterState_ThreadHead(PyInterpreterState *);
PyAPI_FUNC(PyThreadState *) PyThreadState_Next(PyThreadState *);
PyAPI_FUNC(void) PyThreadState_DeleteCurrent(void);
-typedef struct _frame *(*PyThreadFrameGetter)(PyThreadState *self_);
+/* Frame evaluation API */
+
+typedef PyObject* (*_PyFrameEvalFunction)(PyThreadState *tstate, PyFrameObject *, int);
+
+PyAPI_FUNC(_PyFrameEvalFunction) _PyInterpreterState_GetEvalFrameFunc(
+ PyInterpreterState *interp);
+PyAPI_FUNC(void) _PyInterpreterState_SetEvalFrameFunc(
+ PyInterpreterState *interp,
+ _PyFrameEvalFunction eval_frame);
+
+PyAPI_FUNC(const PyConfig*) _PyInterpreterState_GetConfig(PyInterpreterState *interp);
+
+// Get the configuration of the currrent interpreter.
+// The caller must hold the GIL.
+PyAPI_FUNC(const PyConfig*) _Py_GetConfig(void);
+
/* cross-interpreter data */
diff --git a/Include/cpython/sysmodule.h b/Include/cpython/sysmodule.h
index 72d8ffed29fd6f..1802b5b300018e 100644
--- a/Include/cpython/sysmodule.h
+++ b/Include/cpython/sysmodule.h
@@ -13,7 +13,10 @@ PyAPI_FUNC(size_t) _PySys_GetSizeOf(PyObject *);
typedef int(*Py_AuditHookFunction)(const char *, PyObject *, void *);
-PyAPI_FUNC(int) PySys_Audit(const char*, const char *, ...);
+PyAPI_FUNC(int) PySys_Audit(
+ const char *event,
+ const char *argFormat,
+ ...);
PyAPI_FUNC(int) PySys_AddAuditHook(Py_AuditHookFunction, void*);
#ifdef __cplusplus
diff --git a/Include/cpython/traceback.h b/Include/cpython/traceback.h
index 746097daaf9400..837470c3ba2bca 100644
--- a/Include/cpython/traceback.h
+++ b/Include/cpython/traceback.h
@@ -9,7 +9,7 @@ extern "C" {
typedef struct _traceback {
PyObject_HEAD
struct _traceback *tb_next;
- struct _frame *tb_frame;
+ PyFrameObject *tb_frame;
int tb_lasti;
int tb_lineno;
} PyTracebackObject;
diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h
index 54a13e32ba22b5..4fd674ffea36ea 100644
--- a/Include/cpython/unicodeobject.h
+++ b/Include/cpython/unicodeobject.h
@@ -726,12 +726,6 @@ PyAPI_FUNC(int) _PyUnicode_FormatAdvancedWriter(
Py_ssize_t start,
Py_ssize_t end);
-/* --- wchar_t support for platforms which support it --------------------- */
-
-#ifdef HAVE_WCHAR_H
-PyAPI_FUNC(void*) _PyUnicode_AsKind(PyObject *s, unsigned int kind);
-#endif
-
/* --- Manage the default encoding ---------------------------------------- */
/* Returns a pointer to the default encoding (UTF-8) of the
@@ -746,12 +740,6 @@ PyAPI_FUNC(void*) _PyUnicode_AsKind(PyObject *s, unsigned int kind);
_PyUnicode_AsStringAndSize is a #define for PyUnicode_AsUTF8AndSize to
support the previous internal function with the same behaviour.
-
- *** This API is for interpreter INTERNAL USE ONLY and will likely
- *** be removed or changed in the future.
-
- *** If you need to access the Unicode object as UTF-8 bytes string,
- *** please use PyUnicode_AsUTF8String() instead.
*/
PyAPI_FUNC(const char *) PyUnicode_AsUTF8AndSize(
@@ -1227,13 +1215,13 @@ Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE*) PyUnicode_AsUnicodeCopy(
/* Return an interned Unicode object for an Identifier; may fail if there is no memory.*/
PyAPI_FUNC(PyObject*) _PyUnicode_FromId(_Py_Identifier*);
-/* Clear all static strings. */
-PyAPI_FUNC(void) _PyUnicode_ClearStaticStrings(void);
/* Fast equality check when the inputs are known to be exact unicode types
and where the hash values are equal (i.e. a very probable match) */
PyAPI_FUNC(int) _PyUnicode_EQ(PyObject *, PyObject *);
+PyAPI_FUNC(Py_ssize_t) _PyUnicode_ScanIdentifier(PyObject *);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/datetime.h b/Include/datetime.h
index 00507cb85cc04a..5d9f2558f924d0 100644
--- a/Include/datetime.h
+++ b/Include/datetime.h
@@ -196,19 +196,19 @@ static PyDateTime_CAPI *PyDateTimeAPI = NULL;
/* Macros for type checking when not building the Python core. */
#define PyDate_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->DateType)
-#define PyDate_CheckExact(op) (Py_TYPE(op) == PyDateTimeAPI->DateType)
+#define PyDate_CheckExact(op) Py_IS_TYPE(op, PyDateTimeAPI->DateType)
#define PyDateTime_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->DateTimeType)
-#define PyDateTime_CheckExact(op) (Py_TYPE(op) == PyDateTimeAPI->DateTimeType)
+#define PyDateTime_CheckExact(op) Py_IS_TYPE(op, PyDateTimeAPI->DateTimeType)
#define PyTime_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->TimeType)
-#define PyTime_CheckExact(op) (Py_TYPE(op) == PyDateTimeAPI->TimeType)
+#define PyTime_CheckExact(op) Py_IS_TYPE(op, PyDateTimeAPI->TimeType)
#define PyDelta_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->DeltaType)
-#define PyDelta_CheckExact(op) (Py_TYPE(op) == PyDateTimeAPI->DeltaType)
+#define PyDelta_CheckExact(op) Py_IS_TYPE(op, PyDateTimeAPI->DeltaType)
#define PyTZInfo_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->TZInfoType)
-#define PyTZInfo_CheckExact(op) (Py_TYPE(op) == PyDateTimeAPI->TZInfoType)
+#define PyTZInfo_CheckExact(op) Py_IS_TYPE(op, PyDateTimeAPI->TZInfoType)
/* Macros for accessing constructors in a simplified fashion. */
diff --git a/Include/dictobject.h b/Include/dictobject.h
index b37573ad48c003..c88b0aa0a5d0f9 100644
--- a/Include/dictobject.h
+++ b/Include/dictobject.h
@@ -16,7 +16,7 @@ PyAPI_DATA(PyTypeObject) PyDict_Type;
#define PyDict_Check(op) \
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_DICT_SUBCLASS)
-#define PyDict_CheckExact(op) (Py_TYPE(op) == &PyDict_Type)
+#define PyDict_CheckExact(op) Py_IS_TYPE(op, &PyDict_Type)
PyAPI_FUNC(PyObject *) PyDict_New(void);
PyAPI_FUNC(PyObject *) PyDict_GetItem(PyObject *mp, PyObject *key);
diff --git a/Include/errcode.h b/Include/errcode.h
index b37cd261d5ec4d..790518b8b7730e 100644
--- a/Include/errcode.h
+++ b/Include/errcode.h
@@ -29,7 +29,6 @@ extern "C" {
#define E_EOFS 23 /* EOF in triple-quoted string */
#define E_EOLS 24 /* EOL in single-quoted string */
#define E_LINECONT 25 /* Unexpected characters after a line continuation */
-#define E_IDENTIFIER 26 /* Invalid characters in identifier */
#define E_BADSINGLE 27 /* Ill-formed single statement input */
#ifdef __cplusplus
diff --git a/Include/fileutils.h b/Include/fileutils.h
index a9655bbf9a5f05..12bd071c49c04a 100644
--- a/Include/fileutils.h
+++ b/Include/fileutils.h
@@ -18,173 +18,12 @@ PyAPI_FUNC(char*) _Py_EncodeLocaleRaw(
size_t *error_pos);
#endif
-
-#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03080000
-typedef enum {
- _Py_ERROR_UNKNOWN=0,
- _Py_ERROR_STRICT,
- _Py_ERROR_SURROGATEESCAPE,
- _Py_ERROR_REPLACE,
- _Py_ERROR_IGNORE,
- _Py_ERROR_BACKSLASHREPLACE,
- _Py_ERROR_SURROGATEPASS,
- _Py_ERROR_XMLCHARREFREPLACE,
- _Py_ERROR_OTHER
-} _Py_error_handler;
-
-PyAPI_FUNC(_Py_error_handler) _Py_GetErrorHandler(const char *errors);
-
-PyAPI_FUNC(int) _Py_DecodeLocaleEx(
- const char *arg,
- wchar_t **wstr,
- size_t *wlen,
- const char **reason,
- int current_locale,
- _Py_error_handler errors);
-
-PyAPI_FUNC(int) _Py_EncodeLocaleEx(
- const wchar_t *text,
- char **str,
- size_t *error_pos,
- const char **reason,
- int current_locale,
- _Py_error_handler errors);
-#endif
-
#ifndef Py_LIMITED_API
-PyAPI_FUNC(PyObject *) _Py_device_encoding(int);
-
-#if defined(MS_WINDOWS) || defined(__APPLE__)
- /* On Windows, the count parameter of read() is an int (bpo-9015, bpo-9611).
- On macOS 10.13, read() and write() with more than INT_MAX bytes
- fail with EINVAL (bpo-24658). */
-# define _PY_READ_MAX INT_MAX
-# define _PY_WRITE_MAX INT_MAX
-#else
- /* write() should truncate the input to PY_SSIZE_T_MAX bytes,
- but it's safer to do it ourself to have a portable behaviour */
-# define _PY_READ_MAX PY_SSIZE_T_MAX
-# define _PY_WRITE_MAX PY_SSIZE_T_MAX
+# define Py_CPYTHON_FILEUTILS_H
+# include "cpython/fileutils.h"
+# undef Py_CPYTHON_FILEUTILS_H
#endif
-#ifdef MS_WINDOWS
-struct _Py_stat_struct {
- unsigned long st_dev;
- uint64_t st_ino;
- unsigned short st_mode;
- int st_nlink;
- int st_uid;
- int st_gid;
- unsigned long st_rdev;
- __int64 st_size;
- time_t st_atime;
- int st_atime_nsec;
- time_t st_mtime;
- int st_mtime_nsec;
- time_t st_ctime;
- int st_ctime_nsec;
- unsigned long st_file_attributes;
- unsigned long st_reparse_tag;
-};
-#else
-# define _Py_stat_struct stat
-#endif
-
-PyAPI_FUNC(int) _Py_fstat(
- int fd,
- struct _Py_stat_struct *status);
-
-PyAPI_FUNC(int) _Py_fstat_noraise(
- int fd,
- struct _Py_stat_struct *status);
-
-PyAPI_FUNC(int) _Py_stat(
- PyObject *path,
- struct stat *status);
-
-PyAPI_FUNC(int) _Py_open(
- const char *pathname,
- int flags);
-
-PyAPI_FUNC(int) _Py_open_noraise(
- const char *pathname,
- int flags);
-
-PyAPI_FUNC(FILE *) _Py_wfopen(
- const wchar_t *path,
- const wchar_t *mode);
-
-PyAPI_FUNC(FILE*) _Py_fopen(
- const char *pathname,
- const char *mode);
-
-PyAPI_FUNC(FILE*) _Py_fopen_obj(
- PyObject *path,
- const char *mode);
-
-PyAPI_FUNC(Py_ssize_t) _Py_read(
- int fd,
- void *buf,
- size_t count);
-
-PyAPI_FUNC(Py_ssize_t) _Py_write(
- int fd,
- const void *buf,
- size_t count);
-
-PyAPI_FUNC(Py_ssize_t) _Py_write_noraise(
- int fd,
- const void *buf,
- size_t count);
-
-#ifdef HAVE_READLINK
-PyAPI_FUNC(int) _Py_wreadlink(
- const wchar_t *path,
- wchar_t *buf,
- /* Number of characters of 'buf' buffer
- including the trailing NUL character */
- size_t buflen);
-#endif
-
-#ifdef HAVE_REALPATH
-PyAPI_FUNC(wchar_t*) _Py_wrealpath(
- const wchar_t *path,
- wchar_t *resolved_path,
- /* Number of characters of 'resolved_path' buffer
- including the trailing NUL character */
- size_t resolved_path_len);
-#endif
-
-#ifndef MS_WINDOWS
-PyAPI_FUNC(int) _Py_isabs(const wchar_t *path);
-#endif
-
-PyAPI_FUNC(int) _Py_abspath(const wchar_t *path, wchar_t **abspath_p);
-
-PyAPI_FUNC(wchar_t*) _Py_wgetcwd(
- wchar_t *buf,
- /* Number of characters of 'buf' buffer
- including the trailing NUL character */
- size_t buflen);
-
-PyAPI_FUNC(int) _Py_get_inheritable(int fd);
-
-PyAPI_FUNC(int) _Py_set_inheritable(int fd, int inheritable,
- int *atomic_flag_works);
-
-PyAPI_FUNC(int) _Py_set_inheritable_async_safe(int fd, int inheritable,
- int *atomic_flag_works);
-
-PyAPI_FUNC(int) _Py_dup(int fd);
-
-#ifndef MS_WINDOWS
-PyAPI_FUNC(int) _Py_get_blocking(int fd);
-
-PyAPI_FUNC(int) _Py_set_blocking(int fd, int blocking);
-#endif /* !MS_WINDOWS */
-
-#endif /* Py_LIMITED_API */
-
#ifdef __cplusplus
}
#endif
diff --git a/Include/floatobject.h b/Include/floatobject.h
index 0fb9fc4e0fae75..e994aa8f29da48 100644
--- a/Include/floatobject.h
+++ b/Include/floatobject.h
@@ -21,7 +21,7 @@ typedef struct {
PyAPI_DATA(PyTypeObject) PyFloat_Type;
#define PyFloat_Check(op) PyObject_TypeCheck(op, &PyFloat_Type)
-#define PyFloat_CheckExact(op) (Py_TYPE(op) == &PyFloat_Type)
+#define PyFloat_CheckExact(op) Py_IS_TYPE(op, &PyFloat_Type)
#ifdef Py_NAN
#define Py_RETURN_NAN return PyFloat_FromDouble(Py_NAN)
@@ -100,9 +100,6 @@ PyAPI_FUNC(double) _PyFloat_Unpack2(const unsigned char *p, int le);
PyAPI_FUNC(double) _PyFloat_Unpack4(const unsigned char *p, int le);
PyAPI_FUNC(double) _PyFloat_Unpack8(const unsigned char *p, int le);
-/* free list api */
-PyAPI_FUNC(int) PyFloat_ClearFreeList(void);
-
PyAPI_FUNC(void) _PyFloat_DebugMallocStats(FILE* out);
/* Format the object based on the format_spec, as defined in PEP 3101
diff --git a/Include/frameobject.h b/Include/frameobject.h
index 1460e2210e3173..c118af1201a4c8 100644
--- a/Include/frameobject.h
+++ b/Include/frameobject.h
@@ -6,9 +6,7 @@
extern "C" {
#endif
-/* There are currently no frame related APIs in the stable ABI
- * (they're all in the full CPython-specific API)
- */
+#include "pyframe.h"
#ifndef Py_LIMITED_API
# define Py_CPYTHON_FRAMEOBJECT_H
diff --git a/Include/funcobject.h b/Include/funcobject.h
index c6dd67d6124d3e..c5cc9d261a314e 100644
--- a/Include/funcobject.h
+++ b/Include/funcobject.h
@@ -43,7 +43,7 @@ typedef struct {
PyAPI_DATA(PyTypeObject) PyFunction_Type;
-#define PyFunction_Check(op) (Py_TYPE(op) == &PyFunction_Type)
+#define PyFunction_Check(op) Py_IS_TYPE(op, &PyFunction_Type)
PyAPI_FUNC(PyObject *) PyFunction_New(PyObject *, PyObject *);
PyAPI_FUNC(PyObject *) PyFunction_NewWithQualName(PyObject *, PyObject *, PyObject *);
diff --git a/Include/genericaliasobject.h b/Include/genericaliasobject.h
new file mode 100644
index 00000000000000..cf002976b27cd7
--- /dev/null
+++ b/Include/genericaliasobject.h
@@ -0,0 +1,14 @@
+// Implementation of PEP 585: support list[int] etc.
+#ifndef Py_GENERICALIASOBJECT_H
+#define Py_GENERICALIASOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_FUNC(PyObject *) Py_GenericAlias(PyObject *, PyObject *);
+PyAPI_DATA(PyTypeObject) Py_GenericAliasType;
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_GENERICALIASOBJECT_H */
diff --git a/Include/genobject.h b/Include/genobject.h
index 5ee9a2831d12bf..8ffd15646f084e 100644
--- a/Include/genobject.h
+++ b/Include/genobject.h
@@ -10,14 +10,12 @@ extern "C" {
#include "pystate.h" /* _PyErr_StackItem */
-struct _frame; /* Avoid including frameobject.h */
-
/* _PyGenObject_HEAD defines the initial segment of generator
and coroutine objects. */
#define _PyGenObject_HEAD(prefix) \
PyObject_HEAD \
/* Note: gi_frame can be NULL if the generator is "finished" */ \
- struct _frame *prefix##_frame; \
+ PyFrameObject *prefix##_frame; \
/* True if generator is being executed. */ \
char prefix##_running; \
/* The code object backing the generator */ \
@@ -38,10 +36,10 @@ typedef struct {
PyAPI_DATA(PyTypeObject) PyGen_Type;
#define PyGen_Check(op) PyObject_TypeCheck(op, &PyGen_Type)
-#define PyGen_CheckExact(op) (Py_TYPE(op) == &PyGen_Type)
+#define PyGen_CheckExact(op) Py_IS_TYPE(op, &PyGen_Type)
-PyAPI_FUNC(PyObject *) PyGen_New(struct _frame *);
-PyAPI_FUNC(PyObject *) PyGen_NewWithQualName(struct _frame *,
+PyAPI_FUNC(PyObject *) PyGen_New(PyFrameObject *);
+PyAPI_FUNC(PyObject *) PyGen_NewWithQualName(PyFrameObject *,
PyObject *name, PyObject *qualname);
PyAPI_FUNC(int) _PyGen_SetStopIterationValue(PyObject *);
PyAPI_FUNC(int) _PyGen_FetchStopIterationValue(PyObject **);
@@ -58,9 +56,9 @@ typedef struct {
PyAPI_DATA(PyTypeObject) PyCoro_Type;
PyAPI_DATA(PyTypeObject) _PyCoroWrapper_Type;
-#define PyCoro_CheckExact(op) (Py_TYPE(op) == &PyCoro_Type)
+#define PyCoro_CheckExact(op) Py_IS_TYPE(op, &PyCoro_Type)
PyObject *_PyCoro_GetAwaitableIter(PyObject *o);
-PyAPI_FUNC(PyObject *) PyCoro_New(struct _frame *,
+PyAPI_FUNC(PyObject *) PyCoro_New(PyFrameObject *,
PyObject *name, PyObject *qualname);
/* Asynchronous Generators */
@@ -86,15 +84,13 @@ PyAPI_DATA(PyTypeObject) _PyAsyncGenASend_Type;
PyAPI_DATA(PyTypeObject) _PyAsyncGenWrappedValue_Type;
PyAPI_DATA(PyTypeObject) _PyAsyncGenAThrow_Type;
-PyAPI_FUNC(PyObject *) PyAsyncGen_New(struct _frame *,
+PyAPI_FUNC(PyObject *) PyAsyncGen_New(PyFrameObject *,
PyObject *name, PyObject *qualname);
-#define PyAsyncGen_CheckExact(op) (Py_TYPE(op) == &PyAsyncGen_Type)
+#define PyAsyncGen_CheckExact(op) Py_IS_TYPE(op, &PyAsyncGen_Type)
PyObject *_PyAsyncGenValueWrapperNew(PyObject *);
-int PyAsyncGen_ClearFreeLists(void);
-
#endif
#undef _PyGenObject_HEAD
diff --git a/Include/internal/pegen_interface.h b/Include/internal/pegen_interface.h
new file mode 100644
index 00000000000000..ee4c77ec006760
--- /dev/null
+++ b/Include/internal/pegen_interface.h
@@ -0,0 +1,46 @@
+#ifndef Py_PEGENINTERFACE
+#define Py_PEGENINTERFACE
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+#include "Python.h"
+#include "Python-ast.h"
+
+PyAPI_FUNC(mod_ty) PyPegen_ASTFromString(
+ const char *str,
+ const char *filename,
+ int mode,
+ PyCompilerFlags *flags,
+ PyArena *arena);
+PyAPI_FUNC(mod_ty) PyPegen_ASTFromStringObject(
+ const char *str,
+ PyObject* filename,
+ int mode,
+ PyCompilerFlags *flags,
+ PyArena *arena);
+PyAPI_FUNC(mod_ty) PyPegen_ASTFromFileObject(
+ FILE *fp,
+ PyObject *filename_ob,
+ int mode,
+ const char *enc,
+ const char *ps1,
+ const char *ps2,
+ PyCompilerFlags *flags,
+ int *errcode,
+ PyArena *arena);
+PyAPI_FUNC(mod_ty) PyPegen_ASTFromFilename(
+ const char *filename,
+ int mode,
+ PyCompilerFlags *flags,
+ PyArena *arena);
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_PEGENINTERFACE*/
diff --git a/Include/internal/pycore_abstract.h b/Include/internal/pycore_abstract.h
new file mode 100644
index 00000000000000..b791bf24321991
--- /dev/null
+++ b/Include/internal/pycore_abstract.h
@@ -0,0 +1,22 @@
+#ifndef Py_INTERNAL_ABSTRACT_H
+#define Py_INTERNAL_ABSTRACT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+// Fast inlined version of PyIndex_Check()
+static inline int
+_PyIndex_Check(PyObject *obj)
+{
+ PyNumberMethods *tp_as_number = Py_TYPE(obj)->tp_as_number;
+ return (tp_as_number != NULL && tp_as_number->nb_index != NULL);
+}
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_ABSTRACT_H */
diff --git a/Include/bytes_methods.h b/Include/internal/pycore_bytes_methods.h
similarity index 97%
rename from Include/bytes_methods.h
rename to Include/internal/pycore_bytes_methods.h
index 8434a50a4bba71..11e8ab20e91367 100644
--- a/Include/bytes_methods.h
+++ b/Include/internal/pycore_bytes_methods.h
@@ -2,6 +2,10 @@
#ifndef Py_BYTES_CTYPE_H
#define Py_BYTES_CTYPE_H
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
/*
* The internal implementation behind PyBytes (bytes) and PyByteArray (bytearray)
* methods of the given names, they operate on ASCII byte strings.
diff --git a/Include/internal/pycore_byteswap.h b/Include/internal/pycore_byteswap.h
new file mode 100644
index 00000000000000..5e64704a004c82
--- /dev/null
+++ b/Include/internal/pycore_byteswap.h
@@ -0,0 +1,89 @@
+/* Bytes swap functions, reverse order of bytes:
+
+ - _Py_bswap16(uint16_t)
+ - _Py_bswap32(uint32_t)
+ - _Py_bswap64(uint64_t)
+*/
+
+#ifndef Py_INTERNAL_BSWAP_H
+#define Py_INTERNAL_BSWAP_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+#if defined(__clang__) || \
+ (defined(__GNUC__) && \
+ ((__GNUC__ >= 5) || (__GNUC__ == 4) && (__GNUC_MINOR__ >= 8)))
+ /* __builtin_bswap16() is available since GCC 4.8,
+ __builtin_bswap32() is available since GCC 4.3,
+ __builtin_bswap64() is available since GCC 4.3. */
+# define _PY_HAVE_BUILTIN_BSWAP
+#endif
+
+#ifdef _MSC_VER
+ /* Get _byteswap_ushort(), _byteswap_ulong(), _byteswap_uint64() */
+# include
+#endif
+
+static inline uint16_t
+_Py_bswap16(uint16_t word)
+{
+#ifdef _PY_HAVE_BUILTIN_BSWAP
+ return __builtin_bswap16(word);
+#elif defined(_MSC_VER)
+ Py_BUILD_ASSERT(sizeof(word) == sizeof(unsigned short));
+ return _byteswap_ushort(word);
+#else
+ // Portable implementation which doesn't rely on circular bit shift
+ return ( ((word & UINT16_C(0x00FF)) << 8)
+ | ((word & UINT16_C(0xFF00)) >> 8));
+#endif
+}
+
+static inline uint32_t
+_Py_bswap32(uint32_t word)
+{
+#ifdef _PY_HAVE_BUILTIN_BSWAP
+ return __builtin_bswap32(word);
+#elif defined(_MSC_VER)
+ Py_BUILD_ASSERT(sizeof(word) == sizeof(unsigned long));
+ return _byteswap_ulong(word);
+#else
+ // Portable implementation which doesn't rely on circular bit shift
+ return ( ((word & UINT32_C(0x000000FF)) << 24)
+ | ((word & UINT32_C(0x0000FF00)) << 8)
+ | ((word & UINT32_C(0x00FF0000)) >> 8)
+ | ((word & UINT32_C(0xFF000000)) >> 24));
+#endif
+}
+
+static inline uint64_t
+_Py_bswap64(uint64_t word)
+{
+#ifdef _PY_HAVE_BUILTIN_BSWAP
+ return __builtin_bswap64(word);
+#elif defined(_MSC_VER)
+ return _byteswap_uint64(word);
+#else
+ // Portable implementation which doesn't rely on circular bit shift
+ return ( ((word & UINT64_C(0x00000000000000FF)) << 56)
+ | ((word & UINT64_C(0x000000000000FF00)) << 40)
+ | ((word & UINT64_C(0x0000000000FF0000)) << 24)
+ | ((word & UINT64_C(0x00000000FF000000)) << 8)
+ | ((word & UINT64_C(0x000000FF00000000)) >> 8)
+ | ((word & UINT64_C(0x0000FF0000000000)) >> 24)
+ | ((word & UINT64_C(0x00FF000000000000)) >> 40)
+ | ((word & UINT64_C(0xFF00000000000000)) >> 56));
+#endif
+}
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_BSWAP_H */
+
diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h
index 2a7c235cfc264c..368990099089fe 100644
--- a/Include/internal/pycore_ceval.h
+++ b/Include/internal/pycore_ceval.h
@@ -11,25 +11,22 @@ extern "C" {
/* Forward declarations */
struct pyruntimestate;
struct _ceval_runtime_state;
-struct _frame;
-#include "pycore_pystate.h" /* PyInterpreterState.eval_frame */
+#include "pycore_interp.h" /* PyInterpreterState.eval_frame */
-PyAPI_FUNC(void) _Py_FinishPendingCalls(PyThreadState *tstate);
-PyAPI_FUNC(void) _PyEval_Initialize(struct _ceval_runtime_state *);
-PyAPI_FUNC(void) _PyEval_FiniThreads(
- struct _ceval_runtime_state *ceval);
-PyAPI_FUNC(void) _PyEval_SignalReceived(
- struct _ceval_runtime_state *ceval);
+extern void _Py_FinishPendingCalls(PyThreadState *tstate);
+extern void _PyEval_InitRuntimeState(struct _ceval_runtime_state *);
+extern int _PyEval_InitState(struct _ceval_state *ceval);
+extern void _PyEval_FiniState(struct _ceval_state *ceval);
+PyAPI_FUNC(void) _PyEval_SignalReceived(PyInterpreterState *interp);
PyAPI_FUNC(int) _PyEval_AddPendingCall(
- PyThreadState *tstate,
- struct _ceval_runtime_state *ceval,
+ PyInterpreterState *interp,
int (*func)(void *),
void *arg);
-PyAPI_FUNC(void) _PyEval_SignalAsyncExc(
- struct _ceval_runtime_state *ceval);
-PyAPI_FUNC(void) _PyEval_ReInitThreads(
- struct pyruntimestate *runtime);
+PyAPI_FUNC(void) _PyEval_SignalAsyncExc(PyThreadState *tstate);
+#ifdef HAVE_FORK
+extern void _PyEval_ReInitThreads(struct pyruntimestate *runtime);
+#endif
PyAPI_FUNC(void) _PyEval_SetCoroutineOriginTrackingDepth(
PyThreadState *tstate,
int new_depth);
@@ -38,9 +35,9 @@ PyAPI_FUNC(void) _PyEval_SetCoroutineOriginTrackingDepth(
void _PyEval_Fini(void);
static inline PyObject*
-_PyEval_EvalFrame(PyThreadState *tstate, struct _frame *f, int throwflag)
+_PyEval_EvalFrame(PyThreadState *tstate, PyFrameObject *f, int throwflag)
{
- return tstate->interp->eval_frame(f, throwflag);
+ return tstate->interp->eval_frame(tstate, f, throwflag);
}
extern PyObject *_PyEval_EvalCode(
@@ -53,6 +50,78 @@ extern PyObject *_PyEval_EvalCode(
PyObject *kwdefs, PyObject *closure,
PyObject *name, PyObject *qualname);
+#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS
+extern int _PyEval_ThreadsInitialized(PyInterpreterState *interp);
+#else
+extern int _PyEval_ThreadsInitialized(struct pyruntimestate *runtime);
+#endif
+extern PyStatus _PyEval_InitGIL(PyThreadState *tstate);
+extern void _PyEval_FiniGIL(PyThreadState *tstate);
+
+extern void _PyEval_ReleaseLock(PyThreadState *tstate);
+
+
+/* --- _Py_EnterRecursiveCall() ----------------------------------------- */
+
+PyAPI_DATA(int) _Py_CheckRecursionLimit;
+
+#ifdef USE_STACKCHECK
+/* With USE_STACKCHECK macro defined, trigger stack checks in
+ _Py_CheckRecursiveCall() on every 64th call to Py_EnterRecursiveCall. */
+static inline int _Py_MakeRecCheck(PyThreadState *tstate) {
+ return (++tstate->recursion_depth > tstate->interp->ceval.recursion_limit
+ || ++tstate->stackcheck_counter > 64);
+}
+#else
+static inline int _Py_MakeRecCheck(PyThreadState *tstate) {
+ return (++tstate->recursion_depth > tstate->interp->ceval.recursion_limit);
+}
+#endif
+
+PyAPI_FUNC(int) _Py_CheckRecursiveCall(
+ PyThreadState *tstate,
+ const char *where);
+
+static inline int _Py_EnterRecursiveCall(PyThreadState *tstate,
+ const char *where) {
+ return (_Py_MakeRecCheck(tstate) && _Py_CheckRecursiveCall(tstate, where));
+}
+
+static inline int _Py_EnterRecursiveCall_inline(const char *where) {
+ PyThreadState *tstate = PyThreadState_GET();
+ return _Py_EnterRecursiveCall(tstate, where);
+}
+
+#define Py_EnterRecursiveCall(where) _Py_EnterRecursiveCall_inline(where)
+
+/* Compute the "lower-water mark" for a recursion limit. When
+ * Py_LeaveRecursiveCall() is called with a recursion depth below this mark,
+ * the overflowed flag is reset to 0. */
+static inline int _Py_RecursionLimitLowerWaterMark(int limit) {
+ if (limit > 200) {
+ return (limit - 50);
+ }
+ else {
+ return (3 * (limit >> 2));
+ }
+}
+
+static inline void _Py_LeaveRecursiveCall(PyThreadState *tstate) {
+ tstate->recursion_depth--;
+ int limit = tstate->interp->ceval.recursion_limit;
+ if (tstate->recursion_depth < _Py_RecursionLimitLowerWaterMark(limit)) {
+ tstate->overflowed = 0;
+ }
+}
+
+static inline void _Py_LeaveRecursiveCall_inline(void) {
+ PyThreadState *tstate = PyThreadState_GET();
+ _Py_LeaveRecursiveCall(tstate);
+}
+
+#define Py_LeaveRecursiveCall() _Py_LeaveRecursiveCall_inline()
+
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/dtoa.h b/Include/internal/pycore_dtoa.h
similarity index 66%
rename from Include/dtoa.h
rename to Include/internal/pycore_dtoa.h
index 9bfb6251db831c..3faf8cf6b2eefc 100644
--- a/Include/dtoa.h
+++ b/Include/internal/pycore_dtoa.h
@@ -1,9 +1,15 @@
-#ifndef Py_LIMITED_API
#ifndef PY_NO_SHORT_FLOAT_REPR
#ifdef __cplusplus
extern "C" {
#endif
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+/* These functions are used by modules compiled as C extension like math:
+ they must be exported. */
+
PyAPI_FUNC(double) _Py_dg_strtod(const char *str, char **ptr);
PyAPI_FUNC(char *) _Py_dg_dtoa(double d, int mode, int ndigits,
int *decpt, int *sign, char **rve);
@@ -11,9 +17,7 @@ PyAPI_FUNC(void) _Py_dg_freedtoa(char *s);
PyAPI_FUNC(double) _Py_dg_stdnan(int sign);
PyAPI_FUNC(double) _Py_dg_infinity(int sign);
-
#ifdef __cplusplus
}
#endif
-#endif
-#endif
+#endif /* !PY_NO_SHORT_FLOAT_REPR */
diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h
new file mode 100644
index 00000000000000..0511eea779a7e7
--- /dev/null
+++ b/Include/internal/pycore_gc.h
@@ -0,0 +1,179 @@
+#ifndef Py_INTERNAL_GC_H
+#define Py_INTERNAL_GC_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+/* GC information is stored BEFORE the object structure. */
+typedef struct {
+ // Pointer to next object in the list.
+ // 0 means the object is not tracked
+ uintptr_t _gc_next;
+
+ // Pointer to previous object in the list.
+ // Lowest two bits are used for flags documented later.
+ uintptr_t _gc_prev;
+} PyGC_Head;
+
+#define _Py_AS_GC(o) ((PyGC_Head *)(o)-1)
+
+/* True if the object is currently tracked by the GC. */
+#define _PyObject_GC_IS_TRACKED(o) (_Py_AS_GC(o)->_gc_next != 0)
+
+/* True if the object may be tracked by the GC in the future, or already is.
+ This can be useful to implement some optimizations. */
+#define _PyObject_GC_MAY_BE_TRACKED(obj) \
+ (PyObject_IS_GC(obj) && \
+ (!PyTuple_CheckExact(obj) || _PyObject_GC_IS_TRACKED(obj)))
+
+
+/* Bit flags for _gc_prev */
+/* Bit 0 is set when tp_finalize is called */
+#define _PyGC_PREV_MASK_FINALIZED (1)
+/* Bit 1 is set when the object is in generation which is GCed currently. */
+#define _PyGC_PREV_MASK_COLLECTING (2)
+/* The (N-2) most significant bits contain the real address. */
+#define _PyGC_PREV_SHIFT (2)
+#define _PyGC_PREV_MASK (((uintptr_t) -1) << _PyGC_PREV_SHIFT)
+
+// Lowest bit of _gc_next is used for flags only in GC.
+// But it is always 0 for normal code.
+#define _PyGCHead_NEXT(g) ((PyGC_Head*)(g)->_gc_next)
+#define _PyGCHead_SET_NEXT(g, p) ((g)->_gc_next = (uintptr_t)(p))
+
+// Lowest two bits of _gc_prev is used for _PyGC_PREV_MASK_* flags.
+#define _PyGCHead_PREV(g) ((PyGC_Head*)((g)->_gc_prev & _PyGC_PREV_MASK))
+#define _PyGCHead_SET_PREV(g, p) do { \
+ assert(((uintptr_t)p & ~_PyGC_PREV_MASK) == 0); \
+ (g)->_gc_prev = ((g)->_gc_prev & ~_PyGC_PREV_MASK) \
+ | ((uintptr_t)(p)); \
+ } while (0)
+
+#define _PyGCHead_FINALIZED(g) \
+ (((g)->_gc_prev & _PyGC_PREV_MASK_FINALIZED) != 0)
+#define _PyGCHead_SET_FINALIZED(g) \
+ ((g)->_gc_prev |= _PyGC_PREV_MASK_FINALIZED)
+
+#define _PyGC_FINALIZED(o) \
+ _PyGCHead_FINALIZED(_Py_AS_GC(o))
+#define _PyGC_SET_FINALIZED(o) \
+ _PyGCHead_SET_FINALIZED(_Py_AS_GC(o))
+
+
+/* GC runtime state */
+
+/* If we change this, we need to change the default value in the
+ signature of gc.collect. */
+#define NUM_GENERATIONS 3
+/*
+ NOTE: about untracking of mutable objects.
+
+ Certain types of container cannot participate in a reference cycle, and
+ so do not need to be tracked by the garbage collector. Untracking these
+ objects reduces the cost of garbage collections. However, determining
+ which objects may be untracked is not free, and the costs must be
+ weighed against the benefits for garbage collection.
+
+ There are two possible strategies for when to untrack a container:
+
+ i) When the container is created.
+ ii) When the container is examined by the garbage collector.
+
+ Tuples containing only immutable objects (integers, strings etc, and
+ recursively, tuples of immutable objects) do not need to be tracked.
+ The interpreter creates a large number of tuples, many of which will
+ not survive until garbage collection. It is therefore not worthwhile
+ to untrack eligible tuples at creation time.
+
+ Instead, all tuples except the empty tuple are tracked when created.
+ During garbage collection it is determined whether any surviving tuples
+ can be untracked. A tuple can be untracked if all of its contents are
+ already not tracked. Tuples are examined for untracking in all garbage
+ collection cycles. It may take more than one cycle to untrack a tuple.
+
+ Dictionaries containing only immutable objects also do not need to be
+ tracked. Dictionaries are untracked when created. If a tracked item is
+ inserted into a dictionary (either as a key or value), the dictionary
+ becomes tracked. During a full garbage collection (all generations),
+ the collector will untrack any dictionaries whose contents are not
+ tracked.
+
+ The module provides the python function is_tracked(obj), which returns
+ the CURRENT tracking status of the object. Subsequent garbage
+ collections may change the tracking status of the object.
+
+ Untracking of certain containers was introduced in issue #4688, and
+ the algorithm was refined in response to issue #14775.
+*/
+
+struct gc_generation {
+ PyGC_Head head;
+ int threshold; /* collection threshold */
+ int count; /* count of allocations or collections of younger
+ generations */
+};
+
+/* Running stats per generation */
+struct gc_generation_stats {
+ /* total number of collections */
+ Py_ssize_t collections;
+ /* total number of collected objects */
+ Py_ssize_t collected;
+ /* total number of uncollectable objects (put into gc.garbage) */
+ Py_ssize_t uncollectable;
+};
+
+struct _gc_runtime_state {
+ /* List of objects that still need to be cleaned up, singly linked
+ * via their gc headers' gc_prev pointers. */
+ PyObject *trash_delete_later;
+ /* Current call-stack depth of tp_dealloc calls. */
+ int trash_delete_nesting;
+
+ int enabled;
+ int debug;
+ /* linked lists of container objects */
+ struct gc_generation generations[NUM_GENERATIONS];
+ PyGC_Head *generation0;
+ /* a permanent generation which won't be collected */
+ struct gc_generation permanent_generation;
+ struct gc_generation_stats generation_stats[NUM_GENERATIONS];
+ /* true if we are currently running the collector */
+ int collecting;
+ /* list of uncollectable objects */
+ PyObject *garbage;
+ /* a list of callbacks to be invoked when collection is performed */
+ PyObject *callbacks;
+ /* This is the number of objects that survived the last full
+ collection. It approximates the number of long lived objects
+ tracked by the GC.
+
+ (by "full collection", we mean a collection of the oldest
+ generation). */
+ Py_ssize_t long_lived_total;
+ /* This is the number of objects that survived all "non-full"
+ collections, and are awaiting to undergo a full collection for
+ the first time. */
+ Py_ssize_t long_lived_pending;
+};
+
+PyAPI_FUNC(void) _PyGC_InitState(struct _gc_runtime_state *);
+
+
+// Functions to clear types free lists
+extern void _PyFrame_ClearFreeList(void);
+extern void _PyTuple_ClearFreeList(void);
+extern void _PyFloat_ClearFreeList(void);
+extern void _PyList_ClearFreeList(void);
+extern void _PyDict_ClearFreeList(void);
+extern void _PyAsyncGen_ClearFreeLists(void);
+extern void _PyContext_ClearFreeList(void);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_GC_H */
diff --git a/Include/internal/pycore_hamt.h b/Include/internal/pycore_hamt.h
index e65aef5e21a954..aaf655909551af 100644
--- a/Include/internal/pycore_hamt.h
+++ b/Include/internal/pycore_hamt.h
@@ -8,7 +8,7 @@
#define _Py_HAMT_MAX_TREE_DEPTH 7
-#define PyHamt_Check(o) (Py_TYPE(o) == &_PyHamt_Type)
+#define PyHamt_Check(o) Py_IS_TYPE(o, &_PyHamt_Type)
/* Abstract tree node. */
diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h
new file mode 100644
index 00000000000000..18757abc28c195
--- /dev/null
+++ b/Include/internal/pycore_hashtable.h
@@ -0,0 +1,148 @@
+#ifndef Py_INTERNAL_HASHTABLE_H
+#define Py_INTERNAL_HASHTABLE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+/* Single linked list */
+
+typedef struct _Py_slist_item_s {
+ struct _Py_slist_item_s *next;
+} _Py_slist_item_t;
+
+typedef struct {
+ _Py_slist_item_t *head;
+} _Py_slist_t;
+
+#define _Py_SLIST_ITEM_NEXT(ITEM) (((_Py_slist_item_t *)ITEM)->next)
+
+#define _Py_SLIST_HEAD(SLIST) (((_Py_slist_t *)SLIST)->head)
+
+
+/* _Py_hashtable: table entry */
+
+typedef struct {
+ /* used by _Py_hashtable_t.buckets to link entries */
+ _Py_slist_item_t _Py_slist_item;
+
+ Py_uhash_t key_hash;
+ void *key;
+ void *value;
+} _Py_hashtable_entry_t;
+
+
+/* _Py_hashtable: prototypes */
+
+/* Forward declaration */
+struct _Py_hashtable_t;
+typedef struct _Py_hashtable_t _Py_hashtable_t;
+
+typedef Py_uhash_t (*_Py_hashtable_hash_func) (const void *key);
+typedef int (*_Py_hashtable_compare_func) (const void *key1, const void *key2);
+typedef void (*_Py_hashtable_destroy_func) (void *key);
+typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t *ht,
+ const void *key);
+
+typedef struct {
+ // Allocate a memory block
+ void* (*malloc) (size_t size);
+
+ // Release a memory block
+ void (*free) (void *ptr);
+} _Py_hashtable_allocator_t;
+
+
+/* _Py_hashtable: table */
+struct _Py_hashtable_t {
+ size_t nentries; // Total number of entries in the table
+ size_t nbuckets;
+ _Py_slist_t *buckets;
+
+ _Py_hashtable_get_entry_func get_entry_func;
+ _Py_hashtable_hash_func hash_func;
+ _Py_hashtable_compare_func compare_func;
+ _Py_hashtable_destroy_func key_destroy_func;
+ _Py_hashtable_destroy_func value_destroy_func;
+ _Py_hashtable_allocator_t alloc;
+};
+
+/* Hash a pointer (void*) */
+PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr(const void *key);
+
+/* Comparison using memcmp() */
+PyAPI_FUNC(int) _Py_hashtable_compare_direct(
+ const void *key1,
+ const void *key2);
+
+PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new(
+ _Py_hashtable_hash_func hash_func,
+ _Py_hashtable_compare_func compare_func);
+
+PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new_full(
+ _Py_hashtable_hash_func hash_func,
+ _Py_hashtable_compare_func compare_func,
+ _Py_hashtable_destroy_func key_destroy_func,
+ _Py_hashtable_destroy_func value_destroy_func,
+ _Py_hashtable_allocator_t *allocator);
+
+PyAPI_FUNC(void) _Py_hashtable_destroy(_Py_hashtable_t *ht);
+
+PyAPI_FUNC(void) _Py_hashtable_clear(_Py_hashtable_t *ht);
+
+typedef int (*_Py_hashtable_foreach_func) (_Py_hashtable_t *ht,
+ const void *key, const void *value,
+ void *user_data);
+
+/* Call func() on each entry of the hashtable.
+ Iteration stops if func() result is non-zero, in this case it's the result
+ of the call. Otherwise, the function returns 0. */
+PyAPI_FUNC(int) _Py_hashtable_foreach(
+ _Py_hashtable_t *ht,
+ _Py_hashtable_foreach_func func,
+ void *user_data);
+
+PyAPI_FUNC(size_t) _Py_hashtable_size(const _Py_hashtable_t *ht);
+
+/* Add a new entry to the hash. The key must not be present in the hash table.
+ Return 0 on success, -1 on memory error. */
+PyAPI_FUNC(int) _Py_hashtable_set(
+ _Py_hashtable_t *ht,
+ const void *key,
+ void *value);
+
+
+/* Get an entry.
+ Return NULL if the key does not exist. */
+static inline _Py_hashtable_entry_t *
+_Py_hashtable_get_entry(_Py_hashtable_t *ht, const void *key)
+{
+ return ht->get_entry_func(ht, key);
+}
+
+
+/* Get value from an entry.
+ Return NULL if the entry is not found.
+
+ Use _Py_hashtable_get_entry() to distinguish entry value equal to NULL
+ and entry not found. */
+PyAPI_FUNC(void*) _Py_hashtable_get(_Py_hashtable_t *ht, const void *key);
+
+
+/* Remove a key and its associated value without calling key and value destroy
+ functions.
+
+ Return the removed value if the key was found.
+ Return NULL if the key was not found. */
+PyAPI_FUNC(void*) _Py_hashtable_steal(
+ _Py_hashtable_t *ht,
+ const void *key);
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_HASHTABLE_H */
diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h
index 5d3203e5b97fdf..b011ea4425112b 100644
--- a/Include/internal/pycore_import.h
+++ b/Include/internal/pycore_import.h
@@ -10,7 +10,9 @@ PyAPI_FUNC(PyObject *) _PyImport_FindBuiltin(
const char *name /* UTF-8 encoded string */
);
+#ifdef HAVE_FORK
extern void _PyImport_ReInitLock(void);
+#endif
extern void _PyImport_Cleanup(PyThreadState *tstate);
#ifdef __cplusplus
diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h
new file mode 100644
index 00000000000000..f04ea330d04571
--- /dev/null
+++ b/Include/internal/pycore_interp.h
@@ -0,0 +1,195 @@
+#ifndef Py_INTERNAL_INTERP_H
+#define Py_INTERNAL_INTERP_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+#include "pycore_atomic.h" /* _Py_atomic_address */
+#include "pycore_gil.h" /* struct _gil_runtime_state */
+#include "pycore_gc.h" /* struct _gc_runtime_state */
+#include "pycore_warnings.h" /* struct _warnings_runtime_state */
+
+/* ceval state */
+
+struct _pending_calls {
+ PyThread_type_lock lock;
+ /* Request for running pending calls. */
+ _Py_atomic_int calls_to_do;
+ /* Request for looking at the `async_exc` field of the current
+ thread state.
+ Guarded by the GIL. */
+ int async_exc;
+#define NPENDINGCALLS 32
+ struct {
+ int (*func)(void *);
+ void *arg;
+ } calls[NPENDINGCALLS];
+ int first;
+ int last;
+};
+
+struct _ceval_state {
+ int recursion_limit;
+ /* Records whether tracing is on for any thread. Counts the number
+ of threads for which tstate->c_tracefunc is non-NULL, so if the
+ value is 0, we know we don't have to check this thread's
+ c_tracefunc. This speeds up the if statement in
+ _PyEval_EvalFrameDefault() after fast_next_opcode. */
+ int tracing_possible;
+ /* This single variable consolidates all requests to break out of
+ the fast path in the eval loop. */
+ _Py_atomic_int eval_breaker;
+ /* Request for dropping the GIL */
+ _Py_atomic_int gil_drop_request;
+ struct _pending_calls pending;
+#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS
+ struct _gil_runtime_state gil;
+#endif
+};
+
+/* fs_codec.encoding is initialized to NULL.
+ Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */
+struct _Py_unicode_fs_codec {
+ char *encoding; // Filesystem encoding (encoded to UTF-8)
+ int utf8; // encoding=="utf-8"?
+ char *errors; // Filesystem errors (encoded to UTF-8)
+ _Py_error_handler error_handler;
+};
+
+struct _Py_unicode_state {
+ struct _Py_unicode_fs_codec fs_codec;
+};
+
+
+/* interpreter state */
+
+#define _PY_NSMALLPOSINTS 257
+#define _PY_NSMALLNEGINTS 5
+
+// The PyInterpreterState typedef is in Include/pystate.h.
+struct _is {
+
+ struct _is *next;
+ struct _ts *tstate_head;
+
+ /* Reference to the _PyRuntime global variable. This field exists
+ to not have to pass runtime in addition to tstate to a function.
+ Get runtime from tstate: tstate->interp->runtime. */
+ struct pyruntimestate *runtime;
+
+ int64_t id;
+ int64_t id_refcount;
+ int requires_idref;
+ PyThread_type_lock id_mutex;
+
+ int finalizing;
+
+ struct _ceval_state ceval;
+ struct _gc_runtime_state gc;
+
+ PyObject *modules;
+ PyObject *modules_by_index;
+ PyObject *sysdict;
+ PyObject *builtins;
+ PyObject *importlib;
+
+ /* Used in Modules/_threadmodule.c. */
+ long num_threads;
+ /* Support for runtime thread stack size tuning.
+ A value of 0 means using the platform's default stack size
+ or the size specified by the THREAD_STACK_SIZE macro. */
+ /* Used in Python/thread.c. */
+ size_t pythread_stacksize;
+
+ PyObject *codec_search_path;
+ PyObject *codec_search_cache;
+ PyObject *codec_error_registry;
+ int codecs_initialized;
+
+ struct _Py_unicode_state unicode;
+
+ PyConfig config;
+#ifdef HAVE_DLOPEN
+ int dlopenflags;
+#endif
+
+ PyObject *dict; /* Stores per-interpreter state */
+
+ PyObject *builtins_copy;
+ PyObject *import_func;
+ /* Initialized to PyEval_EvalFrameDefault(). */
+ _PyFrameEvalFunction eval_frame;
+
+ Py_ssize_t co_extra_user_count;
+ freefunc co_extra_freefuncs[MAX_CO_EXTRA_USERS];
+
+#ifdef HAVE_FORK
+ PyObject *before_forkers;
+ PyObject *after_forkers_parent;
+ PyObject *after_forkers_child;
+#endif
+ /* AtExit module */
+ void (*pyexitfunc)(PyObject *);
+ PyObject *pyexitmodule;
+
+ uint64_t tstate_next_unique_id;
+
+ struct _warnings_runtime_state warnings;
+
+ PyObject *audit_hooks;
+
+ struct {
+ struct {
+ int level;
+ int atbol;
+ } listnode;
+ } parser;
+
+#if _PY_NSMALLNEGINTS + _PY_NSMALLPOSINTS > 0
+ /* Small integers are preallocated in this array so that they
+ can be shared.
+ The integers that are preallocated are those in the range
+ -_PY_NSMALLNEGINTS (inclusive) to _PY_NSMALLPOSINTS (not inclusive).
+ */
+ PyLongObject* small_ints[_PY_NSMALLNEGINTS + _PY_NSMALLPOSINTS];
+#endif
+};
+
+/* Used by _PyImport_Cleanup() */
+extern void _PyInterpreterState_ClearModules(PyInterpreterState *interp);
+
+extern PyStatus _PyInterpreterState_SetConfig(
+ PyInterpreterState *interp,
+ const PyConfig *config);
+
+
+
+/* cross-interpreter data registry */
+
+/* For now we use a global registry of shareable classes. An
+ alternative would be to add a tp_* slot for a class's
+ crossinterpdatafunc. It would be simpler and more efficient. */
+
+struct _xidregitem;
+
+struct _xidregitem {
+ PyTypeObject *cls;
+ crossinterpdatafunc getdata;
+ struct _xidregitem *next;
+};
+
+PyAPI_FUNC(struct _is*) _PyInterpreterState_LookUpID(int64_t);
+
+PyAPI_FUNC(int) _PyInterpreterState_IDInitref(struct _is *);
+PyAPI_FUNC(void) _PyInterpreterState_IDIncref(struct _is *);
+PyAPI_FUNC(void) _PyInterpreterState_IDDecref(struct _is *);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_INTERP_H */
+
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index 10a5746997ec72..32e86d06db5b43 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -8,7 +8,9 @@ extern "C" {
# error "this header requires Py_BUILD_CORE define"
#endif
-#include "pycore_pystate.h" /* _PyRuntime.gc */
+#include "pycore_gc.h" // _PyObject_GC_IS_TRACKED()
+#include "pycore_interp.h" // PyInterpreterState.gc
+#include "pycore_pystate.h" // _PyThreadState_GET()
PyAPI_FUNC(int) _PyType_CheckConsistency(PyTypeObject *type);
PyAPI_FUNC(int) _PyDict_CheckConsistency(PyObject *mp, int check_content);
@@ -87,6 +89,31 @@ extern void _Py_PrintReferences(FILE *);
extern void _Py_PrintReferenceAddresses(FILE *);
#endif
+static inline PyObject **
+_PyObject_GET_WEAKREFS_LISTPTR(PyObject *op)
+{
+ Py_ssize_t offset = Py_TYPE(op)->tp_weaklistoffset;
+ return (PyObject **)((char *)op + offset);
+}
+
+// Fast inlined version of PyType_HasFeature()
+static inline int
+_PyType_HasFeature(PyTypeObject *type, unsigned long feature) {
+ return ((type->tp_flags & feature) != 0);
+}
+
+// Fast inlined version of PyObject_IS_GC()
+static inline int
+_PyObject_IS_GC(PyObject *obj)
+{
+ return (PyType_IS_GC(Py_TYPE(obj))
+ && (Py_TYPE(obj)->tp_is_gc == NULL
+ || Py_TYPE(obj)->tp_is_gc(obj)));
+}
+
+// Fast inlined version of PyType_IS_GC()
+#define _PyType_IS_GC(t) _PyType_HasFeature((t), Py_TPFLAGS_HAVE_GC)
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_pyerrors.h b/Include/internal/pycore_pyerrors.h
index f3aa3e8b98abe7..2cf1160afc0149 100644
--- a/Include/internal/pycore_pyerrors.h
+++ b/Include/internal/pycore_pyerrors.h
@@ -14,6 +14,20 @@ static inline PyObject* _PyErr_Occurred(PyThreadState *tstate)
return tstate->curexc_type;
}
+static inline void _PyErr_ClearExcState(_PyErr_StackItem *exc_state)
+{
+ PyObject *t, *v, *tb;
+ t = exc_state->exc_type;
+ v = exc_state->exc_value;
+ tb = exc_state->exc_traceback;
+ exc_state->exc_type = NULL;
+ exc_state->exc_value = NULL;
+ exc_state->exc_traceback = NULL;
+ Py_XDECREF(t);
+ Py_XDECREF(v);
+ Py_XDECREF(tb);
+}
+
PyAPI_FUNC(void) _PyErr_Fetch(
PyThreadState *tstate,
@@ -36,6 +50,9 @@ PyAPI_FUNC(void) _PyErr_SetObject(
PyObject *type,
PyObject *value);
+PyAPI_FUNC(void) _PyErr_ChainStackItem(
+ _PyErr_StackItem *exc_info);
+
PyAPI_FUNC(void) _PyErr_Clear(PyThreadState *tstate);
PyAPI_FUNC(void) _PyErr_SetNone(PyThreadState *tstate, PyObject *exception);
@@ -65,6 +82,8 @@ PyAPI_FUNC(PyObject *) _PyErr_FormatFromCauseTstate(
const char *format,
...);
+PyAPI_FUNC(int) _PyErr_CheckSignalsTstate(PyThreadState *tstate);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h
index 2dd6149a6b3d35..77ea3f27454da0 100644
--- a/Include/internal/pycore_pylifecycle.h
+++ b/Include/internal/pycore_pylifecycle.h
@@ -83,7 +83,7 @@ extern void _PyHash_Fini(void);
extern void _PyTraceMalloc_Fini(void);
extern void _PyWarnings_Fini(PyInterpreterState *interp);
-extern void _PyGILState_Init(PyThreadState *tstate);
+extern PyStatus _PyGILState_Init(PyThreadState *tstate);
extern void _PyGILState_Fini(PyThreadState *tstate);
PyAPI_FUNC(void) _PyGC_DumpShutdownStats(PyThreadState *tstate);
@@ -104,7 +104,7 @@ PyAPI_FUNC(void) _PyErr_Print(PyThreadState *tstate);
PyAPI_FUNC(void) _PyErr_Display(PyObject *file, PyObject *exception,
PyObject *value, PyObject *tb);
-PyAPI_FUNC(void) _PyThreadState_DeleteCurrent(struct pyruntimestate *runtime);
+PyAPI_FUNC(void) _PyThreadState_DeleteCurrent(PyThreadState *tstate);
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h
index db153e0bd2d710..3d925e2250d252 100644
--- a/Include/internal/pycore_pymem.h
+++ b/Include/internal/pycore_pymem.h
@@ -8,107 +8,7 @@ extern "C" {
# error "this header requires Py_BUILD_CORE define"
#endif
-#include "pymem.h" /* PyMemAllocatorName */
-
-
-/* GC runtime state */
-
-/* If we change this, we need to change the default value in the
- signature of gc.collect. */
-#define NUM_GENERATIONS 3
-/*
- NOTE: about untracking of mutable objects.
-
- Certain types of container cannot participate in a reference cycle, and
- so do not need to be tracked by the garbage collector. Untracking these
- objects reduces the cost of garbage collections. However, determining
- which objects may be untracked is not free, and the costs must be
- weighed against the benefits for garbage collection.
-
- There are two possible strategies for when to untrack a container:
-
- i) When the container is created.
- ii) When the container is examined by the garbage collector.
-
- Tuples containing only immutable objects (integers, strings etc, and
- recursively, tuples of immutable objects) do not need to be tracked.
- The interpreter creates a large number of tuples, many of which will
- not survive until garbage collection. It is therefore not worthwhile
- to untrack eligible tuples at creation time.
-
- Instead, all tuples except the empty tuple are tracked when created.
- During garbage collection it is determined whether any surviving tuples
- can be untracked. A tuple can be untracked if all of its contents are
- already not tracked. Tuples are examined for untracking in all garbage
- collection cycles. It may take more than one cycle to untrack a tuple.
-
- Dictionaries containing only immutable objects also do not need to be
- tracked. Dictionaries are untracked when created. If a tracked item is
- inserted into a dictionary (either as a key or value), the dictionary
- becomes tracked. During a full garbage collection (all generations),
- the collector will untrack any dictionaries whose contents are not
- tracked.
-
- The module provides the python function is_tracked(obj), which returns
- the CURRENT tracking status of the object. Subsequent garbage
- collections may change the tracking status of the object.
-
- Untracking of certain containers was introduced in issue #4688, and
- the algorithm was refined in response to issue #14775.
-*/
-
-struct gc_generation {
- PyGC_Head head;
- int threshold; /* collection threshold */
- int count; /* count of allocations or collections of younger
- generations */
-};
-
-/* Running stats per generation */
-struct gc_generation_stats {
- /* total number of collections */
- Py_ssize_t collections;
- /* total number of collected objects */
- Py_ssize_t collected;
- /* total number of uncollectable objects (put into gc.garbage) */
- Py_ssize_t uncollectable;
-};
-
-struct _gc_runtime_state {
- /* List of objects that still need to be cleaned up, singly linked
- * via their gc headers' gc_prev pointers. */
- PyObject *trash_delete_later;
- /* Current call-stack depth of tp_dealloc calls. */
- int trash_delete_nesting;
-
- int enabled;
- int debug;
- /* linked lists of container objects */
- struct gc_generation generations[NUM_GENERATIONS];
- PyGC_Head *generation0;
- /* a permanent generation which won't be collected */
- struct gc_generation permanent_generation;
- struct gc_generation_stats generation_stats[NUM_GENERATIONS];
- /* true if we are currently running the collector */
- int collecting;
- /* list of uncollectable objects */
- PyObject *garbage;
- /* a list of callbacks to be invoked when collection is performed */
- PyObject *callbacks;
- /* This is the number of objects that survived the last full
- collection. It approximates the number of long lived objects
- tracked by the GC.
-
- (by "full collection", we mean a collection of the oldest
- generation). */
- Py_ssize_t long_lived_total;
- /* This is the number of objects that survived all "non-full"
- collections, and are awaiting to undergo a full collection for
- the first time. */
- Py_ssize_t long_lived_pending;
-};
-
-PyAPI_FUNC(void) _PyGC_InitState(struct _gc_runtime_state *);
+#include "pymem.h" // PyMemAllocatorName
/* Set the memory allocator of the specified domain to the default.
@@ -188,17 +88,12 @@ struct _PyTraceMalloc_Config {
/* limit of the number of frames in a traceback, 1 by default.
Variable protected by the GIL. */
int max_nframe;
-
- /* use domain in trace key?
- Variable protected by the GIL. */
- int use_domain;
};
#define _PyTraceMalloc_Config_INIT \
{.initialized = TRACEMALLOC_NOT_INITIALIZED, \
.tracing = 0, \
- .max_nframe = 1, \
- .use_domain = 0}
+ .max_nframe = 1}
PyAPI_DATA(struct _PyTraceMalloc_Config) _Py_tracemalloc_config;
diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h
index 405efb9f460605..d96ba31207001a 100644
--- a/Include/internal/pycore_pystate.h
+++ b/Include/internal/pycore_pystate.h
@@ -8,288 +8,60 @@ extern "C" {
# error "this header requires Py_BUILD_CORE define"
#endif
-#include "pycore_gil.h" /* struct _gil_runtime_state */
-#include "pycore_pymem.h" /* struct _gc_runtime_state */
-#include "pycore_warnings.h" /* struct _warnings_runtime_state */
-
-
-/* ceval state */
-
-struct _pending_calls {
- int finishing;
- PyThread_type_lock lock;
- /* Request for running pending calls. */
- _Py_atomic_int calls_to_do;
- /* Request for looking at the `async_exc` field of the current
- thread state.
- Guarded by the GIL. */
- int async_exc;
-#define NPENDINGCALLS 32
- struct {
- int (*func)(void *);
- void *arg;
- } calls[NPENDINGCALLS];
- int first;
- int last;
-};
-
-struct _ceval_runtime_state {
- int recursion_limit;
- /* Records whether tracing is on for any thread. Counts the number
- of threads for which tstate->c_tracefunc is non-NULL, so if the
- value is 0, we know we don't have to check this thread's
- c_tracefunc. This speeds up the if statement in
- PyEval_EvalFrameEx() after fast_next_opcode. */
- int tracing_possible;
- /* This single variable consolidates all requests to break out of
- the fast path in the eval loop. */
- _Py_atomic_int eval_breaker;
- /* Request for dropping the GIL */
- _Py_atomic_int gil_drop_request;
- struct _pending_calls pending;
- /* Request for checking signals. */
- _Py_atomic_int signals_pending;
- struct _gil_runtime_state gil;
-};
-
-/* interpreter state */
-
-typedef PyObject* (*_PyFrameEvalFunction)(struct _frame *, int);
-
-#define _PY_NSMALLPOSINTS 257
-#define _PY_NSMALLNEGINTS 5
-
-// The PyInterpreterState typedef is in Include/pystate.h.
-struct _is {
-
- struct _is *next;
- struct _ts *tstate_head;
-
- /* Reference to the _PyRuntime global variable. This field exists
- to not have to pass runtime in addition to tstate to a function.
- Get runtime from tstate: tstate->interp->runtime. */
- struct pyruntimestate *runtime;
-
- int64_t id;
- int64_t id_refcount;
- int requires_idref;
- PyThread_type_lock id_mutex;
-
- int finalizing;
-
- struct _gc_runtime_state gc;
-
- PyObject *modules;
- PyObject *modules_by_index;
- PyObject *sysdict;
- PyObject *builtins;
- PyObject *importlib;
-
- /* Used in Modules/_threadmodule.c. */
- long num_threads;
- /* Support for runtime thread stack size tuning.
- A value of 0 means using the platform's default stack size
- or the size specified by the THREAD_STACK_SIZE macro. */
- /* Used in Python/thread.c. */
- size_t pythread_stacksize;
-
- PyObject *codec_search_path;
- PyObject *codec_search_cache;
- PyObject *codec_error_registry;
- int codecs_initialized;
-
- /* fs_codec.encoding is initialized to NULL.
- Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */
- struct {
- char *encoding; /* Filesystem encoding (encoded to UTF-8) */
- int utf8; /* encoding=="utf-8"? */
- char *errors; /* Filesystem errors (encoded to UTF-8) */
- _Py_error_handler error_handler;
- } fs_codec;
-
- PyConfig config;
-#ifdef HAVE_DLOPEN
- int dlopenflags;
-#endif
-
- PyObject *dict; /* Stores per-interpreter state */
-
- PyObject *builtins_copy;
- PyObject *import_func;
- /* Initialized to PyEval_EvalFrameDefault(). */
- _PyFrameEvalFunction eval_frame;
-
- Py_ssize_t co_extra_user_count;
- freefunc co_extra_freefuncs[MAX_CO_EXTRA_USERS];
-
-#ifdef HAVE_FORK
- PyObject *before_forkers;
- PyObject *after_forkers_parent;
- PyObject *after_forkers_child;
-#endif
- /* AtExit module */
- void (*pyexitfunc)(PyObject *);
- PyObject *pyexitmodule;
-
- uint64_t tstate_next_unique_id;
-
- struct _warnings_runtime_state warnings;
-
- PyObject *audit_hooks;
-
- struct {
- struct {
- int level;
- int atbol;
- } listnode;
- } parser;
-
-#if _PY_NSMALLNEGINTS + _PY_NSMALLPOSINTS > 0
- /* Small integers are preallocated in this array so that they
- can be shared.
- The integers that are preallocated are those in the range
- -_PY_NSMALLNEGINTS (inclusive) to _PY_NSMALLPOSINTS (not inclusive).
- */
- PyLongObject* small_ints[_PY_NSMALLNEGINTS + _PY_NSMALLPOSINTS];
-#endif
-};
-
-PyAPI_FUNC(struct _is*) _PyInterpreterState_LookUpID(PY_INT64_T);
-
-PyAPI_FUNC(int) _PyInterpreterState_IDInitref(struct _is *);
-PyAPI_FUNC(void) _PyInterpreterState_IDIncref(struct _is *);
-PyAPI_FUNC(void) _PyInterpreterState_IDDecref(struct _is *);
-
-
-/* cross-interpreter data registry */
-
-/* For now we use a global registry of shareable classes. An
- alternative would be to add a tp_* slot for a class's
- crossinterpdatafunc. It would be simpler and more efficient. */
-
-struct _xidregitem;
-
-struct _xidregitem {
- PyTypeObject *cls;
- crossinterpdatafunc getdata;
- struct _xidregitem *next;
-};
-
-/* runtime audit hook state */
-
-typedef struct _Py_AuditHookEntry {
- struct _Py_AuditHookEntry *next;
- Py_AuditHookFunction hookCFunction;
- void *userData;
-} _Py_AuditHookEntry;
-
-/* GIL state */
-
-struct _gilstate_runtime_state {
- int check_enabled;
- /* Assuming the current thread holds the GIL, this is the
- PyThreadState for the current thread. */
- _Py_atomic_address tstate_current;
- PyThreadFrameGetter getframe;
- /* The single PyInterpreterState used by this process'
- GILState implementation
- */
- /* TODO: Given interp_main, it may be possible to kill this ref */
- PyInterpreterState *autoInterpreterState;
- Py_tss_t autoTSSkey;
-};
-
-/* hook for PyEval_GetFrame(), requested for Psyco */
-#define _PyThreadState_GetFrame _PyRuntime.gilstate.getframe
-
-/* Issue #26558: Flag to disable PyGILState_Check().
- If set to non-zero, PyGILState_Check() always return 1. */
-#define _PyGILState_check_enabled _PyRuntime.gilstate.check_enabled
-
-
-/* Full Python runtime state */
-
-typedef struct pyruntimestate {
- /* Is running Py_PreInitialize()? */
- int preinitializing;
-
- /* Is Python preinitialized? Set to 1 by Py_PreInitialize() */
- int preinitialized;
+#include "pycore_runtime.h" /* PyRuntimeState */
- /* Is Python core initialized? Set to 1 by _Py_InitializeCore() */
- int core_initialized;
- /* Is Python fully initialized? Set to 1 by Py_Initialize() */
- int initialized;
-
- /* Set by Py_FinalizeEx(). Only reset to NULL if Py_Initialize()
- is called again. */
- PyThreadState *finalizing;
-
- struct pyinterpreters {
- PyThread_type_lock mutex;
- PyInterpreterState *head;
- PyInterpreterState *main;
- /* _next_interp_id is an auto-numbered sequence of small
- integers. It gets initialized in _PyInterpreterState_Init(),
- which is called in Py_Initialize(), and used in
- PyInterpreterState_New(). A negative interpreter ID
- indicates an error occurred. The main interpreter will
- always have an ID of 0. Overflow results in a RuntimeError.
- If that becomes a problem later then we can adjust, e.g. by
- using a Python int. */
- int64_t next_id;
- } interpreters;
- // XXX Remove this field once we have a tp_* slot.
- struct _xidregistry {
- PyThread_type_lock mutex;
- struct _xidregitem *head;
- } xidregistry;
-
- unsigned long main_thread;
-
-#define NEXITFUNCS 32
- void (*exitfuncs[NEXITFUNCS])(void);
- int nexitfuncs;
-
- struct _ceval_runtime_state ceval;
- struct _gilstate_runtime_state gilstate;
-
- PyPreConfig preconfig;
-
- Py_OpenCodeHookFunction open_code_hook;
- void *open_code_userdata;
- _Py_AuditHookEntry *audit_hook_head;
-
- // XXX Consolidate globals found via the check-c-globals script.
-} _PyRuntimeState;
+/* Check if the current thread is the main thread.
+ Use _Py_IsMainInterpreter() to check if it's the main interpreter. */
+static inline int
+_Py_IsMainThread(void)
+{
+ unsigned long thread = PyThread_get_thread_ident();
+ return (thread == _PyRuntime.main_thread);
+}
-#define _PyRuntimeState_INIT \
- {.preinitialized = 0, .core_initialized = 0, .initialized = 0}
-/* Note: _PyRuntimeState_INIT sets other fields to 0/NULL */
-PyAPI_DATA(_PyRuntimeState) _PyRuntime;
-PyAPI_FUNC(PyStatus) _PyRuntimeState_Init(_PyRuntimeState *runtime);
-PyAPI_FUNC(void) _PyRuntimeState_Fini(_PyRuntimeState *runtime);
-PyAPI_FUNC(void) _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime);
+static inline int
+_Py_IsMainInterpreter(PyThreadState* tstate)
+{
+ /* Use directly _PyRuntime rather than tstate->interp->runtime, since
+ this function is used in performance critical code path (ceval) */
+ return (tstate->interp == _PyRuntime.interpreters.main);
+}
-/* Initialize _PyRuntimeState.
- Return NULL on success, or return an error message on failure. */
-PyAPI_FUNC(PyStatus) _PyRuntime_Initialize(void);
-PyAPI_FUNC(void) _PyRuntime_Finalize(void);
+/* Only handle signals on the main thread of the main interpreter. */
+static inline int
+_Py_ThreadCanHandleSignals(PyInterpreterState *interp)
+{
+ return (_Py_IsMainThread() && interp == _PyRuntime.interpreters.main);
+}
-#define _Py_CURRENTLY_FINALIZING(runtime, tstate) \
- (runtime->finalizing == tstate)
-PyAPI_FUNC(int) _Py_IsMainInterpreter(PyThreadState* tstate);
+/* Only execute pending calls on the main thread. */
+static inline int
+_Py_ThreadCanHandlePendingCalls(void)
+{
+ return _Py_IsMainThread();
+}
/* Variable and macro for in-line access to current thread
and interpreter state */
-#define _PyRuntimeState_GetThreadState(runtime) \
- ((PyThreadState*)_Py_atomic_load_relaxed(&(runtime)->gilstate.tstate_current))
+#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS
+PyAPI_FUNC(PyThreadState*) _PyThreadState_GetTSS(void);
+#endif
+
+static inline PyThreadState*
+_PyRuntimeState_GetThreadState(_PyRuntimeState *runtime)
+{
+#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS
+ return _PyThreadState_GetTSS();
+#else
+ return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->gilstate.tstate_current);
+#endif
+}
/* Get the current Python thread state.
@@ -300,7 +72,15 @@ PyAPI_FUNC(int) _Py_IsMainInterpreter(PyThreadState* tstate);
The caller must hold the GIL.
See also PyThreadState_Get() and PyThreadState_GET(). */
-#define _PyThreadState_GET() _PyRuntimeState_GetThreadState(&_PyRuntime)
+static inline PyThreadState*
+_PyThreadState_GET(void)
+{
+#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS
+ return _PyThreadState_GetTSS();
+#else
+ return _PyRuntimeState_GetThreadState(&_PyRuntime);
+#endif
+}
/* Redefine PyThreadState_GET() as an alias to _PyThreadState_GET() */
#undef PyThreadState_GET
@@ -314,7 +94,11 @@ PyAPI_FUNC(int) _Py_IsMainInterpreter(PyThreadState* tstate);
See also _PyInterpreterState_Get()
and _PyGILState_GetInterpreterStateUnsafe(). */
-#define _PyInterpreterState_GET_UNSAFE() (_PyThreadState_GET()->interp)
+static inline PyInterpreterState* _PyInterpreterState_GET(void) {
+ PyThreadState *tstate = _PyThreadState_GET();
+ assert(tstate != NULL);
+ return tstate->interp;
+}
/* Other */
@@ -332,9 +116,6 @@ PyAPI_FUNC(PyThreadState *) _PyThreadState_Swap(
PyAPI_FUNC(PyStatus) _PyInterpreterState_Enable(_PyRuntimeState *runtime);
PyAPI_FUNC(void) _PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime);
-/* Used by _PyImport_Cleanup() */
-extern void _PyInterpreterState_ClearModules(PyInterpreterState *interp);
-
PyAPI_FUNC(void) _PyGILState_Reinit(_PyRuntimeState *runtime);
diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h
new file mode 100644
index 00000000000000..ebdc12b23a9ca6
--- /dev/null
+++ b/Include/internal/pycore_runtime.h
@@ -0,0 +1,146 @@
+#ifndef Py_INTERNAL_RUNTIME_H
+#define Py_INTERNAL_RUNTIME_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+#include "pycore_atomic.h" /* _Py_atomic_address */
+#include "pycore_gil.h" // struct _gil_runtime_state
+
+/* ceval state */
+
+struct _ceval_runtime_state {
+ /* Request for checking signals. It is shared by all interpreters (see
+ bpo-40513). Any thread of any interpreter can receive a signal, but only
+ the main thread of the main interpreter can handle signals: see
+ _Py_ThreadCanHandleSignals(). */
+ _Py_atomic_int signals_pending;
+#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS
+ struct _gil_runtime_state gil;
+#endif
+};
+
+/* GIL state */
+
+struct _gilstate_runtime_state {
+ /* bpo-26558: Flag to disable PyGILState_Check().
+ If set to non-zero, PyGILState_Check() always return 1. */
+ int check_enabled;
+ /* Assuming the current thread holds the GIL, this is the
+ PyThreadState for the current thread. */
+ _Py_atomic_address tstate_current;
+ /* The single PyInterpreterState used by this process'
+ GILState implementation
+ */
+ /* TODO: Given interp_main, it may be possible to kill this ref */
+ PyInterpreterState *autoInterpreterState;
+ Py_tss_t autoTSSkey;
+};
+
+/* Runtime audit hook state */
+
+typedef struct _Py_AuditHookEntry {
+ struct _Py_AuditHookEntry *next;
+ Py_AuditHookFunction hookCFunction;
+ void *userData;
+} _Py_AuditHookEntry;
+
+/* Full Python runtime state */
+
+typedef struct pyruntimestate {
+ /* Is running Py_PreInitialize()? */
+ int preinitializing;
+
+ /* Is Python preinitialized? Set to 1 by Py_PreInitialize() */
+ int preinitialized;
+
+ /* Is Python core initialized? Set to 1 by _Py_InitializeCore() */
+ int core_initialized;
+
+ /* Is Python fully initialized? Set to 1 by Py_Initialize() */
+ int initialized;
+
+ /* Set by Py_FinalizeEx(). Only reset to NULL if Py_Initialize()
+ is called again.
+
+ Use _PyRuntimeState_GetFinalizing() and _PyRuntimeState_SetFinalizing()
+ to access it, don't access it directly. */
+ _Py_atomic_address _finalizing;
+
+ struct pyinterpreters {
+ PyThread_type_lock mutex;
+ PyInterpreterState *head;
+ PyInterpreterState *main;
+ /* _next_interp_id is an auto-numbered sequence of small
+ integers. It gets initialized in _PyInterpreterState_Init(),
+ which is called in Py_Initialize(), and used in
+ PyInterpreterState_New(). A negative interpreter ID
+ indicates an error occurred. The main interpreter will
+ always have an ID of 0. Overflow results in a RuntimeError.
+ If that becomes a problem later then we can adjust, e.g. by
+ using a Python int. */
+ int64_t next_id;
+ } interpreters;
+ // XXX Remove this field once we have a tp_* slot.
+ struct _xidregistry {
+ PyThread_type_lock mutex;
+ struct _xidregitem *head;
+ } xidregistry;
+
+ unsigned long main_thread;
+
+#define NEXITFUNCS 32
+ void (*exitfuncs[NEXITFUNCS])(void);
+ int nexitfuncs;
+
+ struct _ceval_runtime_state ceval;
+ struct _gilstate_runtime_state gilstate;
+
+ PyPreConfig preconfig;
+
+ Py_OpenCodeHookFunction open_code_hook;
+ void *open_code_userdata;
+ _Py_AuditHookEntry *audit_hook_head;
+
+ // XXX Consolidate globals found via the check-c-globals script.
+} _PyRuntimeState;
+
+#define _PyRuntimeState_INIT \
+ {.preinitialized = 0, .core_initialized = 0, .initialized = 0}
+/* Note: _PyRuntimeState_INIT sets other fields to 0/NULL */
+
+
+PyAPI_DATA(_PyRuntimeState) _PyRuntime;
+
+PyAPI_FUNC(PyStatus) _PyRuntimeState_Init(_PyRuntimeState *runtime);
+PyAPI_FUNC(void) _PyRuntimeState_Fini(_PyRuntimeState *runtime);
+
+#ifdef HAVE_FORK
+PyAPI_FUNC(void) _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime);
+#endif
+
+/* Initialize _PyRuntimeState.
+ Return NULL on success, or return an error message on failure. */
+PyAPI_FUNC(PyStatus) _PyRuntime_Initialize(void);
+
+PyAPI_FUNC(void) _PyRuntime_Finalize(void);
+
+
+static inline PyThreadState*
+_PyRuntimeState_GetFinalizing(_PyRuntimeState *runtime) {
+ return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->_finalizing);
+}
+
+static inline void
+_PyRuntimeState_SetFinalizing(_PyRuntimeState *runtime, PyThreadState *tstate) {
+ _Py_atomic_store_relaxed(&runtime->_finalizing, (uintptr_t)tstate);
+}
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_RUNTIME_H */
diff --git a/Include/internal/pycore_sysmodule.h b/Include/internal/pycore_sysmodule.h
new file mode 100644
index 00000000000000..738a7746a03842
--- /dev/null
+++ b/Include/internal/pycore_sysmodule.h
@@ -0,0 +1,24 @@
+#ifndef Py_INTERNAL_SYSMODULE_H
+#define Py_INTERNAL_SYSMODULE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef Py_BUILD_CORE
+# error "this header requires Py_BUILD_CORE define"
+#endif
+
+PyAPI_FUNC(int) _PySys_Audit(
+ PyThreadState *tstate,
+ const char *event,
+ const char *argFormat,
+ ...);
+
+/* We want minimal exposure of this function, so use extern rather than
+ PyAPI_FUNC() to not export the symbol. */
+extern void _PySys_ClearAuditHooks(PyThreadState *tstate);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTERNAL_SYSMODULE_H */
diff --git a/Include/internal/pycore_traceback.h b/Include/internal/pycore_traceback.h
index 99443d7ba2706f..1f092411a72ba5 100644
--- a/Include/internal/pycore_traceback.h
+++ b/Include/internal/pycore_traceback.h
@@ -89,7 +89,7 @@ PyAPI_FUNC(void) _Py_DumpHexadecimal(
PyAPI_FUNC(PyObject*) _PyTraceBack_FromFrame(
PyObject *tb_next,
- struct _frame *frame);
+ PyFrameObject *frame);
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_warnings.h b/Include/internal/pycore_warnings.h
index 9a72526279b872..cafe305edb0e02 100644
--- a/Include/internal/pycore_warnings.h
+++ b/Include/internal/pycore_warnings.h
@@ -17,6 +17,8 @@ struct _warnings_runtime_state {
long filters_version;
};
+extern PyStatus _PyWarnings_InitState(PyThreadState *tstate);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/iterobject.h b/Include/iterobject.h
index eec2ee271eb67d..51139bf1874088 100644
--- a/Include/iterobject.h
+++ b/Include/iterobject.h
@@ -8,12 +8,12 @@ extern "C" {
PyAPI_DATA(PyTypeObject) PySeqIter_Type;
PyAPI_DATA(PyTypeObject) PyCallIter_Type;
-#define PySeqIter_Check(op) (Py_TYPE(op) == &PySeqIter_Type)
+#define PySeqIter_Check(op) Py_IS_TYPE(op, &PySeqIter_Type)
PyAPI_FUNC(PyObject *) PySeqIter_New(PyObject *);
-#define PyCallIter_Check(op) (Py_TYPE(op) == &PyCallIter_Type)
+#define PyCallIter_Check(op) Py_IS_TYPE(op, &PyCallIter_Type)
PyAPI_FUNC(PyObject *) PyCallIter_New(PyObject *, PyObject *);
diff --git a/Include/listobject.h b/Include/listobject.h
index baf94152792ed4..2a8a25525d1d7b 100644
--- a/Include/listobject.h
+++ b/Include/listobject.h
@@ -1,16 +1,14 @@
+/* List object interface
-/* List object interface */
+ Another generally useful object type is a list of object pointers.
+ This is a mutable type: the list items can be changed, and items can be
+ added or removed. Out-of-range indices or non-list objects are ignored.
-/*
-Another generally useful object type is a list of object pointers.
-This is a mutable type: the list items can be changed, and items can be
-added or removed. Out-of-range indices or non-list objects are ignored.
-
-*** WARNING *** PyList_SetItem does not increment the new item's reference
-count, but does decrement the reference count of the item it replaces,
-if not nil. It does *decrement* the reference count if it is *not*
-inserted in the list. Similarly, PyList_GetItem does not increment the
-returned item's reference count.
+ WARNING: PyList_SetItem does not increment the new item's reference count,
+ but does decrement the reference count of the item it replaces, if not nil.
+ It does *decrement* the reference count if it is *not* inserted in the list.
+ Similarly, PyList_GetItem does not increment the returned item's reference
+ count.
*/
#ifndef Py_LISTOBJECT_H
@@ -19,59 +17,33 @@ returned item's reference count.
extern "C" {
#endif
-#ifndef Py_LIMITED_API
-typedef struct {
- PyObject_VAR_HEAD
- /* Vector of pointers to list elements. list[0] is ob_item[0], etc. */
- PyObject **ob_item;
-
- /* ob_item contains space for 'allocated' elements. The number
- * currently in use is ob_size.
- * Invariants:
- * 0 <= ob_size <= allocated
- * len(list) == ob_size
- * ob_item == NULL implies ob_size == allocated == 0
- * list.sort() temporarily sets allocated to -1 to detect mutations.
- *
- * Items must normally not be NULL, except during construction when
- * the list is not yet visible outside the function that builds it.
- */
- Py_ssize_t allocated;
-} PyListObject;
-#endif
-
PyAPI_DATA(PyTypeObject) PyList_Type;
PyAPI_DATA(PyTypeObject) PyListIter_Type;
PyAPI_DATA(PyTypeObject) PyListRevIter_Type;
#define PyList_Check(op) \
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_LIST_SUBCLASS)
-#define PyList_CheckExact(op) (Py_TYPE(op) == &PyList_Type)
+#define PyList_CheckExact(op) Py_IS_TYPE(op, &PyList_Type)
PyAPI_FUNC(PyObject *) PyList_New(Py_ssize_t size);
PyAPI_FUNC(Py_ssize_t) PyList_Size(PyObject *);
+
PyAPI_FUNC(PyObject *) PyList_GetItem(PyObject *, Py_ssize_t);
PyAPI_FUNC(int) PyList_SetItem(PyObject *, Py_ssize_t, PyObject *);
PyAPI_FUNC(int) PyList_Insert(PyObject *, Py_ssize_t, PyObject *);
PyAPI_FUNC(int) PyList_Append(PyObject *, PyObject *);
+
PyAPI_FUNC(PyObject *) PyList_GetSlice(PyObject *, Py_ssize_t, Py_ssize_t);
PyAPI_FUNC(int) PyList_SetSlice(PyObject *, Py_ssize_t, Py_ssize_t, PyObject *);
+
PyAPI_FUNC(int) PyList_Sort(PyObject *);
PyAPI_FUNC(int) PyList_Reverse(PyObject *);
PyAPI_FUNC(PyObject *) PyList_AsTuple(PyObject *);
-#ifndef Py_LIMITED_API
-PyAPI_FUNC(PyObject *) _PyList_Extend(PyListObject *, PyObject *);
-
-PyAPI_FUNC(int) PyList_ClearFreeList(void);
-PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out);
-#endif
-/* Macro, trading safety for speed */
#ifndef Py_LIMITED_API
-#define PyList_GET_ITEM(op, i) (((PyListObject *)(op))->ob_item[i])
-#define PyList_SET_ITEM(op, i, v) (((PyListObject *)(op))->ob_item[i] = (v))
-#define PyList_GET_SIZE(op) (assert(PyList_Check(op)),Py_SIZE(op))
-#define _PyList_ITEMS(op) (((PyListObject *)(op))->ob_item)
+# define Py_CPYTHON_LISTOBJECT_H
+# include "cpython/listobject.h"
+# undef Py_CPYTHON_LISTOBJECT_H
#endif
#ifdef __cplusplus
diff --git a/Include/longobject.h b/Include/longobject.h
index 87b4d017d3234e..dad08c23f82113 100644
--- a/Include/longobject.h
+++ b/Include/longobject.h
@@ -13,7 +13,7 @@ PyAPI_DATA(PyTypeObject) PyLong_Type;
#define PyLong_Check(op) \
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_LONG_SUBCLASS)
-#define PyLong_CheckExact(op) (Py_TYPE(op) == &PyLong_Type)
+#define PyLong_CheckExact(op) Py_IS_TYPE(op, &PyLong_Type)
PyAPI_FUNC(PyObject *) PyLong_FromLong(long);
PyAPI_FUNC(PyObject *) PyLong_FromUnsignedLong(unsigned long);
@@ -173,23 +173,6 @@ PyAPI_FUNC(int) _PyLong_AsByteArray(PyLongObject* v,
unsigned char* bytes, size_t n,
int little_endian, int is_signed);
-/* _PyLong_FromNbInt: Convert the given object to a PyLongObject
- using the nb_int slot, if available. Raise TypeError if either the
- nb_int slot is not available or the result of the call to nb_int
- returns something not of type int.
-*/
-PyAPI_FUNC(PyObject *) _PyLong_FromNbInt(PyObject *);
-
-/* Convert the given object to a PyLongObject using the nb_index or
- nb_int slots, if available (the latter is deprecated).
- Raise TypeError if either nb_index and nb_int slots are not
- available or the result of the call to nb_index or nb_int
- returns something not of type int.
- Should be replaced with PyNumber_Index after the end of the
- deprecation period.
-*/
-PyAPI_FUNC(PyObject *) _PyLong_FromNbIndexOrNbInt(PyObject *);
-
/* _PyLong_Format: Convert the long to a string object with given base,
appending a base prefix of 0[box] if base is 2, 8 or 16. */
PyAPI_FUNC(PyObject *) _PyLong_Format(PyObject *obj, int base);
diff --git a/Include/memoryobject.h b/Include/memoryobject.h
index 990a716f220399..306028f4b225d8 100644
--- a/Include/memoryobject.h
+++ b/Include/memoryobject.h
@@ -11,7 +11,7 @@ PyAPI_DATA(PyTypeObject) _PyManagedBuffer_Type;
#endif
PyAPI_DATA(PyTypeObject) PyMemoryView_Type;
-#define PyMemoryView_Check(op) (Py_TYPE(op) == &PyMemoryView_Type)
+#define PyMemoryView_Check(op) Py_IS_TYPE(op, &PyMemoryView_Type)
#ifndef Py_LIMITED_API
/* Get a pointer to the memoryview's private copy of the exporter's buffer. */
diff --git a/Include/methodobject.h b/Include/methodobject.h
index d9f8d4f80c2cd8..12e049b4043ba5 100644
--- a/Include/methodobject.h
+++ b/Include/methodobject.h
@@ -13,7 +13,8 @@ extern "C" {
PyAPI_DATA(PyTypeObject) PyCFunction_Type;
-#define PyCFunction_Check(op) (Py_TYPE(op) == &PyCFunction_Type)
+#define PyCFunction_CheckExact(op) Py_IS_TYPE(op, &PyCFunction_Type)
+#define PyCFunction_Check(op) PyObject_TypeCheck(op, &PyCFunction_Type)
typedef PyObject *(*PyCFunction)(PyObject *, PyObject *);
typedef PyObject *(*_PyCFunctionFast) (PyObject *, PyObject *const *, Py_ssize_t);
@@ -22,21 +23,13 @@ typedef PyObject *(*PyCFunctionWithKeywords)(PyObject *, PyObject *,
typedef PyObject *(*_PyCFunctionFastWithKeywords) (PyObject *,
PyObject *const *, Py_ssize_t,
PyObject *);
+typedef PyObject *(*PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *,
+ size_t, PyObject *);
+
PyAPI_FUNC(PyCFunction) PyCFunction_GetFunction(PyObject *);
PyAPI_FUNC(PyObject *) PyCFunction_GetSelf(PyObject *);
PyAPI_FUNC(int) PyCFunction_GetFlags(PyObject *);
-/* Macros for direct access to these values. Type checks are *not*
- done, so use with care. */
-#ifndef Py_LIMITED_API
-#define PyCFunction_GET_FUNCTION(func) \
- (((PyCFunctionObject *)func) -> m_ml -> ml_meth)
-#define PyCFunction_GET_SELF(func) \
- (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \
- NULL : ((PyCFunctionObject *)func) -> m_self)
-#define PyCFunction_GET_FLAGS(func) \
- (((PyCFunctionObject *)func) -> m_ml -> ml_flags)
-#endif
Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyCFunction_Call(PyObject *, PyObject *, PyObject *);
struct PyMethodDef {
@@ -52,6 +45,13 @@ typedef struct PyMethodDef PyMethodDef;
PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *,
PyObject *);
+#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000
+#define PyCFunction_NewEx(ML, SELF, MOD) PyCMethod_New((ML), (SELF), (MOD), NULL)
+PyAPI_FUNC(PyObject *) PyCMethod_New(PyMethodDef *, PyObject *,
+ PyObject *, PyTypeObject *);
+#endif
+
+
/* Flag passed to newmethodobject */
/* #define METH_OLDARGS 0x0000 -- unsupported now */
#define METH_VARARGS 0x0001
@@ -84,15 +84,24 @@ PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *,
#define METH_STACKLESS 0x0000
#endif
+/* METH_METHOD means the function stores an
+ * additional reference to the class that defines it;
+ * both self and class are passed to it.
+ * It uses PyCMethodObject instead of PyCFunctionObject.
+ * May not be combined with METH_NOARGS, METH_O, METH_CLASS or METH_STATIC.
+ */
+
+#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000
+#define METH_METHOD 0x0200
+#endif
+
+
#ifndef Py_LIMITED_API
-typedef struct {
- PyObject_HEAD
- PyMethodDef *m_ml; /* Description of the C function to call */
- PyObject *m_self; /* Passed as 'self' arg to the C func, can be NULL */
- PyObject *m_module; /* The __module__ attribute, can be anything */
- PyObject *m_weakreflist; /* List of weak references */
- vectorcallfunc vectorcall;
-} PyCFunctionObject;
+
+#define Py_CPYTHON_METHODOBJECT_H
+#include "cpython/methodobject.h"
+#undef Py_CPYTHON_METHODOBJECT_H
+
#endif
#ifdef __cplusplus
diff --git a/Include/modsupport.h b/Include/modsupport.h
index f90ede4831e32b..4c4aab65bac103 100644
--- a/Include/modsupport.h
+++ b/Include/modsupport.h
@@ -60,9 +60,12 @@ PyAPI_FUNC(int) _PyArg_UnpackStack(
...);
PyAPI_FUNC(int) _PyArg_NoKeywords(const char *funcname, PyObject *kwargs);
+PyAPI_FUNC(int) _PyArg_NoKwnames(const char *funcname, PyObject *kwnames);
PyAPI_FUNC(int) _PyArg_NoPositional(const char *funcname, PyObject *args);
#define _PyArg_NoKeywords(funcname, kwargs) \
((kwargs) == NULL || _PyArg_NoKeywords((funcname), (kwargs)))
+#define _PyArg_NoKwnames(funcname, kwnames) \
+ ((kwnames) == NULL || _PyArg_NoKwnames((funcname), (kwnames)))
#define _PyArg_NoPositional(funcname, args) \
((args) == NULL || _PyArg_NoPositional((funcname), (args)))
@@ -136,6 +139,10 @@ void _PyArg_Fini(void);
PyAPI_FUNC(int) PyModule_AddObject(PyObject *, const char *, PyObject *);
PyAPI_FUNC(int) PyModule_AddIntConstant(PyObject *, const char *, long);
PyAPI_FUNC(int) PyModule_AddStringConstant(PyObject *, const char *, const char *);
+#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000
+/* New in 3.9 */
+PyAPI_FUNC(int) PyModule_AddType(PyObject *module, PyTypeObject *type);
+#endif /* Py_LIMITED_API */
#define PyModule_AddIntMacro(m, c) PyModule_AddIntConstant(m, #c, c)
#define PyModule_AddStringMacro(m, c) PyModule_AddStringConstant(m, #c, c)
diff --git a/Include/moduleobject.h b/Include/moduleobject.h
index e246fd2faf9184..cf9ad40c0a17a0 100644
--- a/Include/moduleobject.h
+++ b/Include/moduleobject.h
@@ -10,7 +10,7 @@ extern "C" {
PyAPI_DATA(PyTypeObject) PyModule_Type;
#define PyModule_Check(op) PyObject_TypeCheck(op, &PyModule_Type)
-#define PyModule_CheckExact(op) (Py_TYPE(op) == &PyModule_Type)
+#define PyModule_CheckExact(op) Py_IS_TYPE(op, &PyModule_Type)
#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03030000
PyAPI_FUNC(PyObject *) PyModule_NewObject(
diff --git a/Include/node.h b/Include/node.h
index 2b390740973873..ca24f289085920 100644
--- a/Include/node.h
+++ b/Include/node.h
@@ -31,7 +31,6 @@ PyAPI_FUNC(Py_ssize_t) _PyNode_SizeOf(node *n);
#define NCH(n) ((n)->n_nchildren)
#define CHILD(n, i) (&(n)->n_child[i])
-#define RCHILD(n, i) (CHILD(n, NCH(n) + i))
#define TYPE(n) ((n)->n_type)
#define STR(n) ((n)->n_str)
#define LINENO(n) ((n)->n_lineno)
diff --git a/Include/object.h b/Include/object.h
index e7e9c1b8ed752d..537567040f9871 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -27,7 +27,7 @@ of data it contains. An object's type is fixed when it is created.
Types themselves are represented as objects; an object contains a
pointer to the corresponding type object. The type itself has a type
pointer pointing to the object representing the type 'type', which
-contains a pointer to itself!).
+contains a pointer to itself!.
Objects do not float around in memory; once allocated an object keeps
the same size and address. Objects that must hold variable-size data
@@ -110,6 +110,7 @@ typedef struct _object {
/* Cast argument to PyObject* type. */
#define _PyObject_CAST(op) ((PyObject*)(op))
+#define _PyObject_CAST_CONST(op) ((const PyObject*)(op))
typedef struct {
PyObject ob_base;
@@ -118,10 +119,50 @@ typedef struct {
/* Cast argument to PyVarObject* type. */
#define _PyVarObject_CAST(op) ((PyVarObject*)(op))
+#define _PyVarObject_CAST_CONST(op) ((const PyVarObject*)(op))
+
+
+static inline Py_ssize_t _Py_REFCNT(const PyObject *ob) {
+ return ob->ob_refcnt;
+}
+#define Py_REFCNT(ob) _Py_REFCNT(_PyObject_CAST_CONST(ob))
+
+
+static inline Py_ssize_t _Py_SIZE(const PyVarObject *ob) {
+ return ob->ob_size;
+}
+#define Py_SIZE(ob) _Py_SIZE(_PyVarObject_CAST_CONST(ob))
+
+
+static inline PyTypeObject* _Py_TYPE(const PyObject *ob) {
+ return ob->ob_type;
+}
+#define Py_TYPE(ob) _Py_TYPE(_PyObject_CAST_CONST(ob))
+
+
+static inline int _Py_IS_TYPE(const PyObject *ob, const PyTypeObject *type) {
+ return ob->ob_type == type;
+}
+#define Py_IS_TYPE(ob, type) _Py_IS_TYPE(_PyObject_CAST_CONST(ob), type)
+
+
+static inline void _Py_SET_REFCNT(PyObject *ob, Py_ssize_t refcnt) {
+ ob->ob_refcnt = refcnt;
+}
+#define Py_SET_REFCNT(ob, refcnt) _Py_SET_REFCNT(_PyObject_CAST(ob), refcnt)
+
+
+static inline void _Py_SET_TYPE(PyObject *ob, PyTypeObject *type) {
+ ob->ob_type = type;
+}
+#define Py_SET_TYPE(ob, type) _Py_SET_TYPE(_PyObject_CAST(ob), type)
+
+
+static inline void _Py_SET_SIZE(PyVarObject *ob, Py_ssize_t size) {
+ ob->ob_size = size;
+}
+#define Py_SET_SIZE(ob, size) _Py_SET_SIZE(_PyVarObject_CAST(ob), size)
-#define Py_REFCNT(ob) (_PyObject_CAST(ob)->ob_refcnt)
-#define Py_TYPE(ob) (_PyObject_CAST(ob)->ob_type)
-#define Py_SIZE(ob) (_PyVarObject_CAST(ob)->ob_size)
/*
Type objects contain a string containing the type name (to help somewhat
@@ -191,11 +232,16 @@ PyAPI_FUNC(PyObject*) PyType_FromSpecWithBases(PyType_Spec*, PyObject*);
#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03040000
PyAPI_FUNC(void*) PyType_GetSlot(PyTypeObject*, int);
#endif
+#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000
+PyAPI_FUNC(PyObject*) PyType_FromModuleAndSpec(PyObject *, PyType_Spec *, PyObject *);
+PyAPI_FUNC(PyObject *) PyType_GetModule(struct _typeobject *);
+PyAPI_FUNC(void *) PyType_GetModuleState(struct _typeobject *);
+#endif
/* Generic type check */
PyAPI_FUNC(int) PyType_IsSubtype(PyTypeObject *, PyTypeObject *);
#define PyObject_TypeCheck(ob, tp) \
- (Py_TYPE(ob) == (tp) || PyType_IsSubtype(Py_TYPE(ob), (tp)))
+ (Py_IS_TYPE(ob, tp) || PyType_IsSubtype(Py_TYPE(ob), (tp)))
PyAPI_DATA(PyTypeObject) PyType_Type; /* built-in 'type' */
PyAPI_DATA(PyTypeObject) PyBaseObject_Type; /* built-in 'object' */
@@ -279,7 +325,9 @@ given type object has a specified feature.
/* Set if the type implements the vectorcall protocol (PEP 590) */
#ifndef Py_LIMITED_API
-#define _Py_TPFLAGS_HAVE_VECTORCALL (1UL << 11)
+#define Py_TPFLAGS_HAVE_VECTORCALL (1UL << 11)
+// Backwards compatibility alias for API that was provisional in Python 3.8
+#define _Py_TPFLAGS_HAVE_VECTORCALL Py_TPFLAGS_HAVE_VECTORCALL
#endif
/* Set if the type is 'ready' -- fully initialized */
@@ -590,11 +638,7 @@ times.
static inline int
PyType_HasFeature(PyTypeObject *type, unsigned long feature) {
-#ifdef Py_LIMITED_API
return ((PyType_GetFlags(type) & feature) != 0);
-#else
- return ((type->tp_flags & feature) != 0);
-#endif
}
#define PyType_FastSubclass(type, flag) PyType_HasFeature(type, flag)
@@ -605,7 +649,7 @@ static inline int _PyType_Check(PyObject *op) {
#define PyType_Check(op) _PyType_Check(_PyObject_CAST(op))
static inline int _PyType_CheckExact(PyObject *op) {
- return (Py_TYPE(op) == &PyType_Type);
+ return Py_IS_TYPE(op, &PyType_Type);
}
#define PyType_CheckExact(op) _PyType_CheckExact(_PyObject_CAST(op))
diff --git a/Include/objimpl.h b/Include/objimpl.h
index 45919251f43a51..030d7eee29723e 100644
--- a/Include/objimpl.h
+++ b/Include/objimpl.h
@@ -122,12 +122,18 @@ PyAPI_FUNC(PyVarObject *) PyObject_InitVar(PyVarObject *,
PyAPI_FUNC(PyObject *) _PyObject_New(PyTypeObject *);
PyAPI_FUNC(PyVarObject *) _PyObject_NewVar(PyTypeObject *, Py_ssize_t);
-#define PyObject_New(type, typeobj) \
- ( (type *) _PyObject_New(typeobj) )
+#define PyObject_New(type, typeobj) ((type *)_PyObject_New(typeobj))
+
+// Alias to PyObject_New(). In Python 3.8, PyObject_NEW() called directly
+// PyObject_MALLOC() with _PyObject_SIZE().
+#define PyObject_NEW(type, typeobj) PyObject_New(type, typeobj)
+
#define PyObject_NewVar(type, typeobj, n) \
( (type *) _PyObject_NewVar((typeobj), (n)) )
-#define _PyObject_SIZE(typeobj) ( (typeobj)->tp_basicsize )
+// Alias to PyObject_New(). In Python 3.8, PyObject_NEW() called directly
+// PyObject_MALLOC() with _PyObject_VAR_SIZE().
+#define PyObject_NEW_VAR(type, typeobj, n) PyObject_NewVar(type, typeobj, n)
#ifdef Py_LIMITED_API
@@ -143,64 +149,6 @@ PyAPI_FUNC(PyVarObject *) _PyObject_NewVar(PyTypeObject *, Py_ssize_t);
#endif
-/* _PyObject_VAR_SIZE returns the number of bytes (as size_t) allocated for a
- vrbl-size object with nitems items, exclusive of gc overhead (if any). The
- value is rounded up to the closest multiple of sizeof(void *), in order to
- ensure that pointer fields at the end of the object are correctly aligned
- for the platform (this is of special importance for subclasses of, e.g.,
- str or int, so that pointers can be stored after the embedded data).
-
- Note that there's no memory wastage in doing this, as malloc has to
- return (at worst) pointer-aligned memory anyway.
-*/
-#if ((SIZEOF_VOID_P - 1) & SIZEOF_VOID_P) != 0
-# error "_PyObject_VAR_SIZE requires SIZEOF_VOID_P be a power of 2"
-#endif
-
-#define _PyObject_VAR_SIZE(typeobj, nitems) \
- _Py_SIZE_ROUND_UP((typeobj)->tp_basicsize + \
- (nitems)*(typeobj)->tp_itemsize, \
- SIZEOF_VOID_P)
-
-#define PyObject_NEW(type, typeobj) \
-( (type *) PyObject_Init( \
- (PyObject *) PyObject_MALLOC( _PyObject_SIZE(typeobj) ), (typeobj)) )
-
-#define PyObject_NEW_VAR(type, typeobj, n) \
-( (type *) PyObject_InitVar( \
- (PyVarObject *) PyObject_MALLOC(_PyObject_VAR_SIZE((typeobj),(n)) ),\
- (typeobj), (n)) )
-
-/* This example code implements an object constructor with a custom
- allocator, where PyObject_New is inlined, and shows the important
- distinction between two steps (at least):
- 1) the actual allocation of the object storage;
- 2) the initialization of the Python specific fields
- in this storage with PyObject_{Init, InitVar}.
-
- PyObject *
- YourObject_New(...)
- {
- PyObject *op;
-
- op = (PyObject *) Your_Allocator(_PyObject_SIZE(YourTypeStruct));
- if (op == NULL)
- return PyErr_NoMemory();
-
- PyObject_Init(op, &YourTypeStruct);
-
- op->ob_field = value;
- ...
- return op;
- }
-
- Note that in C++, the use of the new operator usually implies that
- the 1st step is performed automatically for you, so in a C++ class
- constructor you would start directly with PyObject_Init/InitVar
-*/
-
-
-
/*
* Garbage Collection Support
* ==========================
@@ -238,6 +186,8 @@ PyAPI_FUNC(void) PyObject_GC_Del(void *);
#define PyObject_GC_NewVar(type, typeobj, n) \
( (type *) _PyObject_GC_NewVar((typeobj), (n)) )
+PyAPI_FUNC(int) PyObject_GC_IsTracked(PyObject *);
+PyAPI_FUNC(int) PyObject_GC_IsFinalized(PyObject *);
/* Utility macro to help write tp_traverse functions.
* To use this macro, the tp_traverse function must name its arguments
diff --git a/Include/odictobject.h b/Include/odictobject.h
index 35aff8a29a6e34..e070413017d801 100644
--- a/Include/odictobject.h
+++ b/Include/odictobject.h
@@ -19,7 +19,7 @@ PyAPI_DATA(PyTypeObject) PyODictItems_Type;
PyAPI_DATA(PyTypeObject) PyODictValues_Type;
#define PyODict_Check(op) PyObject_TypeCheck(op, &PyODict_Type)
-#define PyODict_CheckExact(op) (Py_TYPE(op) == &PyODict_Type)
+#define PyODict_CheckExact(op) Py_IS_TYPE(op, &PyODict_Type)
#define PyODict_SIZE(op) PyDict_GET_SIZE((op))
PyAPI_FUNC(PyObject *) PyODict_New(void);
diff --git a/Include/patchlevel.h b/Include/patchlevel.h
index a62e175d966517..8578b6597f6023 100644
--- a/Include/patchlevel.h
+++ b/Include/patchlevel.h
@@ -17,13 +17,13 @@
/* Version parsed out into numeric values */
/*--start constants--*/
#define PY_MAJOR_VERSION 3
-#define PY_MINOR_VERSION 9
+#define PY_MINOR_VERSION 10
#define PY_MICRO_VERSION 0
#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_ALPHA
-#define PY_RELEASE_SERIAL 3
+#define PY_RELEASE_SERIAL 0
/* Version as a string */
-#define PY_VERSION "3.9.0a3+"
+#define PY_VERSION "3.10.0a0"
/*--end constants--*/
/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
diff --git a/Include/picklebufobject.h b/Include/picklebufobject.h
index f07e900bf26dac..0df2561dceaea0 100644
--- a/Include/picklebufobject.h
+++ b/Include/picklebufobject.h
@@ -12,7 +12,7 @@ extern "C" {
PyAPI_DATA(PyTypeObject) PyPickleBuffer_Type;
-#define PyPickleBuffer_Check(op) (Py_TYPE(op) == &PyPickleBuffer_Type)
+#define PyPickleBuffer_Check(op) Py_IS_TYPE(op, &PyPickleBuffer_Type)
/* Create a PickleBuffer redirecting to the given buffer-enabled object */
PyAPI_FUNC(PyObject *) PyPickleBuffer_FromObject(PyObject *);
diff --git a/Include/py_curses.h b/Include/py_curses.h
index 2702b37ea7cfe9..b70252d9d7605e 100644
--- a/Include/py_curses.h
+++ b/Include/py_curses.h
@@ -64,7 +64,7 @@ typedef struct {
char *encoding;
} PyCursesWindowObject;
-#define PyCursesWindow_Check(v) (Py_TYPE(v) == &PyCursesWindow_Type)
+#define PyCursesWindow_Check(v) Py_IS_TYPE(v, &PyCursesWindow_Type)
#define PyCurses_CAPSULE_NAME "_curses._C_API"
@@ -97,4 +97,3 @@ static const char catchall_NULL[] = "curses function returned NULL";
#endif /* !defined(Py_CURSES_H) */
-
diff --git a/Include/pycapsule.h b/Include/pycapsule.h
index d9ecda7a4b6e4a..fb5d503fea73f1 100644
--- a/Include/pycapsule.h
+++ b/Include/pycapsule.h
@@ -22,7 +22,7 @@ PyAPI_DATA(PyTypeObject) PyCapsule_Type;
typedef void (*PyCapsule_Destructor)(PyObject *);
-#define PyCapsule_CheckExact(op) (Py_TYPE(op) == &PyCapsule_Type)
+#define PyCapsule_CheckExact(op) Py_IS_TYPE(op, &PyCapsule_Type)
PyAPI_FUNC(PyObject *) PyCapsule_New(
diff --git a/Include/pydebug.h b/Include/pydebug.h
index bd4aafe3b49f83..78bcb118be4659 100644
--- a/Include/pydebug.h
+++ b/Include/pydebug.h
@@ -5,8 +5,6 @@
extern "C" {
#endif
-/* These global variable are defined in pylifecycle.c */
-/* XXX (ncoghlan): move these declarations to pylifecycle.h? */
PyAPI_DATA(int) Py_DebugFlag;
PyAPI_DATA(int) Py_VerboseFlag;
PyAPI_DATA(int) Py_QuietFlag;
diff --git a/Include/pyerrors.h b/Include/pyerrors.h
index 5125a51ec1aa17..399bb7c3a6fac0 100644
--- a/Include/pyerrors.h
+++ b/Include/pyerrors.h
@@ -21,7 +21,11 @@ PyAPI_FUNC(void) PyErr_GetExcInfo(PyObject **, PyObject **, PyObject **);
PyAPI_FUNC(void) PyErr_SetExcInfo(PyObject *, PyObject *, PyObject *);
#endif
-/* Defined in Python/pylifecycle.c */
+/* Defined in Python/pylifecycle.c
+
+ The Py_FatalError() function is replaced with a macro which logs
+ automatically the name of the current function, unless the Py_LIMITED_API
+ macro is defined. */
PyAPI_FUNC(void) _Py_NO_RETURN Py_FatalError(const char *message);
#if defined(Py_DEBUG) || defined(Py_LIMITED_API)
@@ -54,11 +58,11 @@ PyAPI_FUNC(void) PyException_SetContext(PyObject *, PyObject *);
PyType_FastSubclass((PyTypeObject*)(x), Py_TPFLAGS_BASE_EXC_SUBCLASS))
#define PyExceptionInstance_Check(x) \
- PyType_FastSubclass((x)->ob_type, Py_TPFLAGS_BASE_EXC_SUBCLASS)
+ PyType_FastSubclass(Py_TYPE(x), Py_TPFLAGS_BASE_EXC_SUBCLASS)
PyAPI_FUNC(const char *) PyExceptionClass_Name(PyObject *);
-#define PyExceptionInstance_Class(x) ((PyObject*)((x)->ob_type))
+#define PyExceptionInstance_Class(x) ((PyObject*)Py_TYPE(x))
/* Predefined exceptions */
diff --git a/Include/pyframe.h b/Include/pyframe.h
new file mode 100644
index 00000000000000..3816224201c7e4
--- /dev/null
+++ b/Include/pyframe.h
@@ -0,0 +1,22 @@
+/* Limited C API of PyFrame API
+ *
+ * Include "frameobject.h" to get the PyFrameObject structure.
+ */
+
+#ifndef Py_PYFRAME_H
+#define Py_PYFRAME_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct _frame PyFrameObject;
+
+/* Return the line of code the frame is currently executing. */
+PyAPI_FUNC(int) PyFrame_GetLineNumber(PyFrameObject *);
+
+PyAPI_FUNC(PyCodeObject *) PyFrame_GetCode(PyFrameObject *frame);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_PYFRAME_H */
diff --git a/Include/pyhash.h b/Include/pyhash.h
index 2f398589cee7ef..4437b870332bde 100644
--- a/Include/pyhash.h
+++ b/Include/pyhash.h
@@ -9,6 +9,8 @@ extern "C" {
#ifndef Py_LIMITED_API
PyAPI_FUNC(Py_hash_t) _Py_HashDouble(double);
PyAPI_FUNC(Py_hash_t) _Py_HashPointer(const void*);
+// Similar to _Py_HashPointer(), but don't replace -1 with -2
+PyAPI_FUNC(Py_hash_t) _Py_HashPointerRaw(const void*);
PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void*, Py_ssize_t);
#endif
diff --git a/Include/pymacro.h b/Include/pymacro.h
index c080fb164e353a..856cae774d61c7 100644
--- a/Include/pymacro.h
+++ b/Include/pymacro.h
@@ -101,7 +101,7 @@
#endif
#if defined(RANDALL_WAS_HERE)
-#define Py_UNREACHABLE() \
+# define Py_UNREACHABLE() \
Py_FatalError( \
"If you're seeing this, the code is in what I thought was\n" \
"an unreachable state.\n\n" \
@@ -113,13 +113,17 @@
"I'm so sorry.\n" \
"https://xkcd.com/2200")
#elif defined(Py_DEBUG)
-#define Py_UNREACHABLE() \
+# define Py_UNREACHABLE() \
Py_FatalError( \
"We've reached an unreachable state. Anything is possible.\n" \
"The limits were in our heads all along. Follow your dreams.\n" \
"https://xkcd.com/2200")
+#elif defined(__GNUC__) || defined(__clang__) || defined(__INTEL_COMPILER)
+# define Py_UNREACHABLE() __builtin_unreachable()
+#elif defined(_MSC_VER)
+# define Py_UNREACHABLE() __assume(0)
#else
-#define Py_UNREACHABLE() \
+# define Py_UNREACHABLE() \
Py_FatalError("Unreachable C code path reached")
#endif
diff --git a/Include/pyport.h b/Include/pyport.h
index 64c73f012e7a71..63d3b81de5d232 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -768,11 +768,11 @@ extern char * _getpty(int *, int, mode_t, int);
*/
#ifdef WORDS_BIGENDIAN
-#define PY_BIG_ENDIAN 1
-#define PY_LITTLE_ENDIAN 0
+# define PY_BIG_ENDIAN 1
+# define PY_LITTLE_ENDIAN 0
#else
-#define PY_BIG_ENDIAN 0
-#define PY_LITTLE_ENDIAN 1
+# define PY_BIG_ENDIAN 0
+# define PY_LITTLE_ENDIAN 1
#endif
#ifdef Py_BUILD_CORE
@@ -832,6 +832,7 @@ extern _invalid_parameter_handler _Py_silent_invalid_parameter_handler;
PyAPI_FUNC(void) _Py_NO_RETURN PyThread_exit_thread(void); */
#if defined(__clang__) || \
+ defined(__xlc__) || \
(defined(__GNUC__) && \
((__GNUC__ >= 3) || \
(__GNUC__ == 2) && (__GNUC_MINOR__ >= 5)))
diff --git a/Include/pystate.h b/Include/pystate.h
index 1cb2305696f292..bae440778b261a 100644
--- a/Include/pystate.h
+++ b/Include/pystate.h
@@ -7,27 +7,35 @@
extern "C" {
#endif
-#include "pythread.h"
-
/* This limitation is for performance and simplicity. If needed it can be
removed (with effort). */
#define MAX_CO_EXTRA_USERS 255
/* Forward declarations for PyFrameObject, PyThreadState
and PyInterpreterState */
-struct _frame;
struct _ts;
struct _is;
/* struct _ts is defined in cpython/pystate.h */
typedef struct _ts PyThreadState;
-/* struct _is is defined in internal/pycore_pystate.h */
+/* struct _is is defined in internal/pycore_interp.h */
typedef struct _is PyInterpreterState;
PyAPI_FUNC(PyInterpreterState *) PyInterpreterState_New(void);
PyAPI_FUNC(void) PyInterpreterState_Clear(PyInterpreterState *);
PyAPI_FUNC(void) PyInterpreterState_Delete(PyInterpreterState *);
+#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000
+/* New in 3.9 */
+/* Get the current interpreter state.
+
+ Issue a fatal error if there no current Python thread state or no current
+ interpreter. It cannot return NULL.
+
+ The caller must hold the GIL. */
+PyAPI_FUNC(PyInterpreterState *) PyInterpreterState_Get(void);
+#endif
+
#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03080000
/* New in 3.8 */
PyAPI_FUNC(PyObject *) PyInterpreterState_GetDict(PyInterpreterState *);
@@ -76,6 +84,13 @@ PyAPI_FUNC(PyThreadState *) PyThreadState_Swap(PyThreadState *);
PyAPI_FUNC(PyObject *) PyThreadState_GetDict(void);
PyAPI_FUNC(int) PyThreadState_SetAsyncExc(unsigned long, PyObject *);
+#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000
+/* New in 3.9 */
+PyAPI_FUNC(PyInterpreterState*) PyThreadState_GetInterpreter(PyThreadState *tstate);
+PyAPI_FUNC(PyFrameObject*) PyThreadState_GetFrame(PyThreadState *tstate);
+PyAPI_FUNC(uint64_t) PyThreadState_GetID(PyThreadState *tstate);
+#endif
+
typedef
enum {PyGILState_LOCKED, PyGILState_UNLOCKED}
PyGILState_STATE;
diff --git a/Include/pythread.h b/Include/pythread.h
index 1cf83b7a36d148..bb9d86412218ad 100644
--- a/Include/pythread.h
+++ b/Include/pythread.h
@@ -36,6 +36,15 @@ PyAPI_FUNC(int) PyThread_acquire_lock(PyThread_type_lock, int);
#define WAIT_LOCK 1
#define NOWAIT_LOCK 0
+#ifndef Py_LIMITED_API
+#ifdef HAVE_FORK
+/* Private function to reinitialize a lock at fork in the child process.
+ Reset the lock to the unlocked state.
+ Return 0 on success, return -1 on error. */
+PyAPI_FUNC(int) _PyThread_at_fork_reinit(PyThread_type_lock *lock);
+#endif /* HAVE_FORK */
+#endif /* !Py_LIMITED_API */
+
/* PY_TIMEOUT_T is the integral type used to specify timeouts when waiting
on a lock (see PyThread_acquire_lock_timed() below).
PY_TIMEOUT_MAX is the highest usable value (in microseconds) of that
diff --git a/Include/rangeobject.h b/Include/rangeobject.h
index 7e4dc28894b042..d6af8473f9e8d3 100644
--- a/Include/rangeobject.h
+++ b/Include/rangeobject.h
@@ -19,7 +19,7 @@ PyAPI_DATA(PyTypeObject) PyRange_Type;
PyAPI_DATA(PyTypeObject) PyRangeIter_Type;
PyAPI_DATA(PyTypeObject) PyLongRangeIter_Type;
-#define PyRange_Check(op) (Py_TYPE(op) == &PyRange_Type)
+#define PyRange_Check(op) Py_IS_TYPE(op, &PyRange_Type)
#ifdef __cplusplus
}
diff --git a/Include/setobject.h b/Include/setobject.h
index fc0ea83925f92f..119619ebe72994 100644
--- a/Include/setobject.h
+++ b/Include/setobject.h
@@ -70,7 +70,6 @@ PyAPI_DATA(PyObject *) _PySet_Dummy;
PyAPI_FUNC(int) _PySet_NextEntry(PyObject *set, Py_ssize_t *pos, PyObject **key, Py_hash_t *hash);
PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable);
-PyAPI_FUNC(int) PySet_ClearFreeList(void);
#endif /* Section excluded by Py_LIMITED_API */
@@ -88,18 +87,18 @@ PyAPI_FUNC(int) PySet_Discard(PyObject *set, PyObject *key);
PyAPI_FUNC(PyObject *) PySet_Pop(PyObject *set);
PyAPI_FUNC(Py_ssize_t) PySet_Size(PyObject *anyset);
-#define PyFrozenSet_CheckExact(ob) (Py_TYPE(ob) == &PyFrozenSet_Type)
+#define PyFrozenSet_CheckExact(ob) Py_IS_TYPE(ob, &PyFrozenSet_Type)
#define PyAnySet_CheckExact(ob) \
- (Py_TYPE(ob) == &PySet_Type || Py_TYPE(ob) == &PyFrozenSet_Type)
+ (Py_IS_TYPE(ob, &PySet_Type) || Py_IS_TYPE(ob, &PyFrozenSet_Type))
#define PyAnySet_Check(ob) \
- (Py_TYPE(ob) == &PySet_Type || Py_TYPE(ob) == &PyFrozenSet_Type || \
+ (Py_IS_TYPE(ob, &PySet_Type) || Py_IS_TYPE(ob, &PyFrozenSet_Type) || \
PyType_IsSubtype(Py_TYPE(ob), &PySet_Type) || \
PyType_IsSubtype(Py_TYPE(ob), &PyFrozenSet_Type))
#define PySet_Check(ob) \
- (Py_TYPE(ob) == &PySet_Type || \
+ (Py_IS_TYPE(ob, &PySet_Type) || \
PyType_IsSubtype(Py_TYPE(ob), &PySet_Type))
#define PyFrozenSet_Check(ob) \
- (Py_TYPE(ob) == &PyFrozenSet_Type || \
+ (Py_IS_TYPE(ob, &PyFrozenSet_Type) || \
PyType_IsSubtype(Py_TYPE(ob), &PyFrozenSet_Type))
#ifdef __cplusplus
diff --git a/Include/sliceobject.h b/Include/sliceobject.h
index aae6f3cc7945e1..2c889508b4b495 100644
--- a/Include/sliceobject.h
+++ b/Include/sliceobject.h
@@ -28,7 +28,7 @@ typedef struct {
PyAPI_DATA(PyTypeObject) PySlice_Type;
PyAPI_DATA(PyTypeObject) PyEllipsis_Type;
-#define PySlice_Check(op) (Py_TYPE(op) == &PySlice_Type)
+#define PySlice_Check(op) Py_IS_TYPE(op, &PySlice_Type)
PyAPI_FUNC(PyObject *) PySlice_New(PyObject* start, PyObject* stop,
PyObject* step);
diff --git a/Include/symtable.h b/Include/symtable.h
index 5dcfa7e2c2bb68..abd19a7923e1ba 100644
--- a/Include/symtable.h
+++ b/Include/symtable.h
@@ -69,7 +69,7 @@ typedef struct _symtable_entry {
PyAPI_DATA(PyTypeObject) PySTEntry_Type;
-#define PySTEntry_Check(op) (Py_TYPE(op) == &PySTEntry_Type)
+#define PySTEntry_Check(op) Py_IS_TYPE(op, &PySTEntry_Type)
PyAPI_FUNC(int) PyST_GetScope(PySTEntryObject *, PyObject *);
diff --git a/Include/token.h b/Include/token.h
index e08708baf196e8..9b8a3aae074674 100644
--- a/Include/token.h
+++ b/Include/token.h
@@ -78,6 +78,10 @@ extern "C" {
#define ISTERMINAL(x) ((x) < NT_OFFSET)
#define ISNONTERMINAL(x) ((x) >= NT_OFFSET)
#define ISEOF(x) ((x) == ENDMARKER)
+#define ISWHITESPACE(x) ((x) == ENDMARKER || \
+ (x) == NEWLINE || \
+ (x) == INDENT || \
+ (x) == DEDENT)
PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
diff --git a/Include/traceback.h b/Include/traceback.h
index b451927fafa3a8..781e5a6eec4edd 100644
--- a/Include/traceback.h
+++ b/Include/traceback.h
@@ -4,16 +4,14 @@
extern "C" {
#endif
-struct _frame;
-
/* Traceback interface */
-PyAPI_FUNC(int) PyTraceBack_Here(struct _frame *);
+PyAPI_FUNC(int) PyTraceBack_Here(PyFrameObject *);
PyAPI_FUNC(int) PyTraceBack_Print(PyObject *, PyObject *);
/* Reveal traceback type so we can typecheck traceback objects */
PyAPI_DATA(PyTypeObject) PyTraceBack_Type;
-#define PyTraceBack_Check(v) (Py_TYPE(v) == &PyTraceBack_Type)
+#define PyTraceBack_Check(v) Py_IS_TYPE(v, &PyTraceBack_Type)
#ifndef Py_LIMITED_API
diff --git a/Include/tupleobject.h b/Include/tupleobject.h
index 590902de9d0215..e796a320192c20 100644
--- a/Include/tupleobject.h
+++ b/Include/tupleobject.h
@@ -25,7 +25,7 @@ PyAPI_DATA(PyTypeObject) PyTupleIter_Type;
#define PyTuple_Check(op) \
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_TUPLE_SUBCLASS)
-#define PyTuple_CheckExact(op) (Py_TYPE(op) == &PyTuple_Type)
+#define PyTuple_CheckExact(op) Py_IS_TYPE(op, &PyTuple_Type)
PyAPI_FUNC(PyObject *) PyTuple_New(Py_ssize_t size);
PyAPI_FUNC(Py_ssize_t) PyTuple_Size(PyObject *);
@@ -34,8 +34,6 @@ PyAPI_FUNC(int) PyTuple_SetItem(PyObject *, Py_ssize_t, PyObject *);
PyAPI_FUNC(PyObject *) PyTuple_GetSlice(PyObject *, Py_ssize_t, Py_ssize_t);
PyAPI_FUNC(PyObject *) PyTuple_Pack(Py_ssize_t, ...);
-PyAPI_FUNC(int) PyTuple_ClearFreeList(void);
-
#ifndef Py_LIMITED_API
# define Py_CPYTHON_TUPLEOBJECT_H
# include "cpython/tupleobject.h"
diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h
index 4dea4942181225..500ce242e9f0e8 100644
--- a/Include/unicodeobject.h
+++ b/Include/unicodeobject.h
@@ -113,7 +113,7 @@ PyAPI_DATA(PyTypeObject) PyUnicodeIter_Type;
#define PyUnicode_Check(op) \
PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_UNICODE_SUBCLASS)
-#define PyUnicode_CheckExact(op) (Py_TYPE(op) == &PyUnicode_Type)
+#define PyUnicode_CheckExact(op) Py_IS_TYPE(op, &PyUnicode_Type)
/* --- Constants ---------------------------------------------------------- */
diff --git a/Include/weakrefobject.h b/Include/weakrefobject.h
index 17051568f3a6e9..ac4b4821c8a147 100644
--- a/Include/weakrefobject.h
+++ b/Include/weakrefobject.h
@@ -46,10 +46,10 @@ PyAPI_DATA(PyTypeObject) _PyWeakref_CallableProxyType;
#define PyWeakref_CheckRef(op) PyObject_TypeCheck(op, &_PyWeakref_RefType)
#define PyWeakref_CheckRefExact(op) \
- (Py_TYPE(op) == &_PyWeakref_RefType)
+ Py_IS_TYPE(op, &_PyWeakref_RefType)
#define PyWeakref_CheckProxy(op) \
- ((Py_TYPE(op) == &_PyWeakref_ProxyType) || \
- (Py_TYPE(op) == &_PyWeakref_CallableProxyType))
+ (Py_IS_TYPE(op, &_PyWeakref_ProxyType) || \
+ Py_IS_TYPE(op, &_PyWeakref_CallableProxyType))
#define PyWeakref_Check(op) \
(PyWeakref_CheckRef(op) || PyWeakref_CheckProxy(op))
diff --git a/Lib/__future__.py b/Lib/__future__.py
index e1135685d846ce..d7cb8ac5f49745 100644
--- a/Lib/__future__.py
+++ b/Lib/__future__.py
@@ -68,14 +68,14 @@
# this module.
CO_NESTED = 0x0010 # nested_scopes
CO_GENERATOR_ALLOWED = 0 # generators (obsolete, was 0x1000)
-CO_FUTURE_DIVISION = 0x2000 # division
-CO_FUTURE_ABSOLUTE_IMPORT = 0x4000 # perform absolute imports by default
-CO_FUTURE_WITH_STATEMENT = 0x8000 # with statement
-CO_FUTURE_PRINT_FUNCTION = 0x10000 # print function
-CO_FUTURE_UNICODE_LITERALS = 0x20000 # unicode string literals
-CO_FUTURE_BARRY_AS_BDFL = 0x40000
-CO_FUTURE_GENERATOR_STOP = 0x80000 # StopIteration becomes RuntimeError in generators
-CO_FUTURE_ANNOTATIONS = 0x100000 # annotations become strings at runtime
+CO_FUTURE_DIVISION = 0x20000 # division
+CO_FUTURE_ABSOLUTE_IMPORT = 0x40000 # perform absolute imports by default
+CO_FUTURE_WITH_STATEMENT = 0x80000 # with statement
+CO_FUTURE_PRINT_FUNCTION = 0x100000 # print function
+CO_FUTURE_UNICODE_LITERALS = 0x200000 # unicode string literals
+CO_FUTURE_BARRY_AS_BDFL = 0x400000
+CO_FUTURE_GENERATOR_STOP = 0x800000 # StopIteration becomes RuntimeError in generators
+CO_FUTURE_ANNOTATIONS = 0x1000000 # annotations become strings at runtime
class _Feature:
def __init__(self, optionalRelease, mandatoryRelease, compiler_flag):
diff --git a/Lib/_aix_support.py b/Lib/_aix_support.py
index 2c5cd3297de30d..45504934063df8 100644
--- a/Lib/_aix_support.py
+++ b/Lib/_aix_support.py
@@ -1,35 +1,20 @@
"""Shared AIX support functions."""
import sys
-from sysconfig import get_config_var
+import sysconfig
-# subprocess is not necessarily available early in the build process
-# if not available, the config_vars are also definitely not available
-# supply substitutes to bootstrap the build
try:
import subprocess
- _have_subprocess = True
- _tmp_bd = get_config_var("AIX_BUILDDATE")
- _bgt = get_config_var("BUILD_GNU_TYPE")
except ImportError: # pragma: no cover
- _have_subprocess = False
- _tmp_bd = None
- _bgt = "powerpc-ibm-aix6.1.7.0"
-
-# if get_config_var("AIX_BUILDDATE") was unknown, provide a substitute,
-# impossible builddate to specify 'unknown'
-_MISSING_BD = 9898
-try:
- _bd = int(_tmp_bd)
-except TypeError:
- _bd = _MISSING_BD
-
-# Infer the ABI bitwidth from maxsize (assuming 64 bit as the default)
-_sz = 32 if sys.maxsize == (2**31-1) else 64
+ # _aix_support is used in distutils by setup.py to build C extensions,
+ # before subprocess dependencies like _posixsubprocess are available.
+ import _bootsubprocess as subprocess
def _aix_tag(vrtl, bd):
# type: (List[int], int) -> str
+ # Infer the ABI bitwidth from maxsize (assuming 64 bit as the default)
+ _sz = 32 if sys.maxsize == (2**31-1) else 64
# vrtl[version, release, technology_level]
return "aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(vrtl[0], vrtl[1], vrtl[2], bd, _sz)
@@ -48,17 +33,12 @@ def _aix_bosmp64():
The fileset bos.mp64 is the AIX kernel. It's VRMF and builddate
reflect the current ABI levels of the runtime environment.
"""
- if _have_subprocess:
- # We expect all AIX systems to have lslpp installed in this location
- out = subprocess.check_output(["/usr/bin/lslpp", "-Lqc", "bos.mp64"])
- out = out.decode("utf-8").strip().split(":") # type: ignore
- # Use str() and int() to help mypy see types
- return str(out[2]), int(out[-1])
- else:
- from os import uname
-
- osname, host, release, version, machine = uname()
- return "{}.{}.0.0".format(version, release), _MISSING_BD
+ # We expect all AIX systems to have lslpp installed in this location
+ out = subprocess.check_output(["/usr/bin/lslpp", "-Lqc", "bos.mp64"])
+ out = out.decode("utf-8")
+ out = out.strip().split(":") # type: ignore
+ # Use str() and int() to help mypy see types
+ return (str(out[2]), int(out[-1]))
def aix_platform():
@@ -87,8 +67,10 @@ def aix_platform():
# extract vrtl from the BUILD_GNU_TYPE as an int
def _aix_bgt():
# type: () -> List[int]
- assert _bgt
- return _aix_vrtl(vrmf=_bgt)
+ gnu_type = sysconfig.get_config_var("BUILD_GNU_TYPE")
+ if not gnu_type:
+ raise ValueError("BUILD_GNU_TYPE is not defined")
+ return _aix_vrtl(vrmf=gnu_type)
def aix_buildtag():
@@ -96,4 +78,12 @@ def aix_buildtag():
"""
Return the platform_tag of the system Python was built on.
"""
- return _aix_tag(_aix_bgt(), _bd)
+ # AIX_BUILDDATE is defined by configure with:
+ # lslpp -Lcq bos.mp64 | awk -F: '{ print $NF }'
+ build_date = sysconfig.get_config_var("AIX_BUILDDATE")
+ try:
+ build_date = int(build_date)
+ except (ValueError, TypeError):
+ raise ValueError(f"AIX_BUILDDATE is not defined or invalid: "
+ f"{build_date!r}")
+ return _aix_tag(_aix_bgt(), build_date)
diff --git a/Lib/_bootsubprocess.py b/Lib/_bootsubprocess.py
new file mode 100644
index 00000000000000..014782f616c823
--- /dev/null
+++ b/Lib/_bootsubprocess.py
@@ -0,0 +1,97 @@
+"""
+Basic subprocess implementation for POSIX which only uses os functions. Only
+implement features required by setup.py to build C extension modules when
+subprocess is unavailable. setup.py is not used on Windows.
+"""
+import os
+
+
+# distutils.spawn used by distutils.command.build_ext
+# calls subprocess.Popen().wait()
+class Popen:
+ def __init__(self, cmd, env=None):
+ self._cmd = cmd
+ self._env = env
+ self.returncode = None
+
+ def wait(self):
+ pid = os.fork()
+ if pid == 0:
+ # Child process
+ try:
+ if self._env is not None:
+ os.execve(self._cmd[0], self._cmd, self._env)
+ else:
+ os.execv(self._cmd[0], self._cmd)
+ finally:
+ os._exit(1)
+ else:
+ # Parent process
+ _, status = os.waitpid(pid, 0)
+ self.returncode = os.waitstatus_to_exitcode(status)
+
+ return self.returncode
+
+
+def _check_cmd(cmd):
+ # Use regex [a-zA-Z0-9./-]+: reject empty string, space, etc.
+ safe_chars = []
+ for first, last in (("a", "z"), ("A", "Z"), ("0", "9")):
+ for ch in range(ord(first), ord(last) + 1):
+ safe_chars.append(chr(ch))
+ safe_chars.append("./-")
+ safe_chars = ''.join(safe_chars)
+
+ if isinstance(cmd, (tuple, list)):
+ check_strs = cmd
+ elif isinstance(cmd, str):
+ check_strs = [cmd]
+ else:
+ return False
+
+ for arg in check_strs:
+ if not isinstance(arg, str):
+ return False
+ if not arg:
+ # reject empty string
+ return False
+ for ch in arg:
+ if ch not in safe_chars:
+ return False
+
+ return True
+
+
+# _aix_support used by distutil.util calls subprocess.check_output()
+def check_output(cmd, **kwargs):
+ if kwargs:
+ raise NotImplementedError(repr(kwargs))
+
+ if not _check_cmd(cmd):
+ raise ValueError(f"unsupported command: {cmd!r}")
+
+ tmp_filename = "check_output.tmp"
+ if not isinstance(cmd, str):
+ cmd = " ".join(cmd)
+ cmd = f"{cmd} >{tmp_filename}"
+
+ try:
+ # system() spawns a shell
+ status = os.system(cmd)
+ exitcode = os.waitstatus_to_exitcode(status)
+ if exitcode:
+ raise ValueError(f"Command {cmd!r} returned non-zero "
+ f"exit status {exitcode!r}")
+
+ try:
+ with open(tmp_filename, "rb") as fp:
+ stdout = fp.read()
+ except FileNotFoundError:
+ stdout = b''
+ finally:
+ try:
+ os.unlink(tmp_filename)
+ except OSError:
+ pass
+
+ return stdout
diff --git a/Lib/_collections_abc.py b/Lib/_collections_abc.py
index 2b2ddba170e1b1..36cd9930003475 100644
--- a/Lib/_collections_abc.py
+++ b/Lib/_collections_abc.py
@@ -9,6 +9,8 @@
from abc import ABCMeta, abstractmethod
import sys
+GenericAlias = type(list[int])
+
__all__ = ["Awaitable", "Coroutine",
"AsyncIterable", "AsyncIterator", "AsyncGenerator",
"Hashable", "Iterable", "Iterator", "Generator", "Reversible",
@@ -110,6 +112,8 @@ def __subclasshook__(cls, C):
return _check_methods(C, "__await__")
return NotImplemented
+ __class_getitem__ = classmethod(GenericAlias)
+
class Coroutine(Awaitable):
@@ -169,6 +173,8 @@ def __subclasshook__(cls, C):
return _check_methods(C, "__aiter__")
return NotImplemented
+ __class_getitem__ = classmethod(GenericAlias)
+
class AsyncIterator(AsyncIterable):
@@ -255,6 +261,8 @@ def __subclasshook__(cls, C):
return _check_methods(C, "__iter__")
return NotImplemented
+ __class_getitem__ = classmethod(GenericAlias)
+
class Iterator(Iterable):
@@ -274,6 +282,7 @@ def __subclasshook__(cls, C):
return _check_methods(C, '__iter__', '__next__')
return NotImplemented
+
Iterator.register(bytes_iterator)
Iterator.register(bytearray_iterator)
#Iterator.register(callable_iterator)
@@ -353,6 +362,7 @@ def __subclasshook__(cls, C):
'send', 'throw', 'close')
return NotImplemented
+
Generator.register(generator)
@@ -385,6 +395,9 @@ def __subclasshook__(cls, C):
return _check_methods(C, "__contains__")
return NotImplemented
+ __class_getitem__ = classmethod(GenericAlias)
+
+
class Collection(Sized, Iterable, Container):
__slots__ = ()
@@ -395,6 +408,7 @@ def __subclasshook__(cls, C):
return _check_methods(C, "__len__", "__iter__", "__contains__")
return NotImplemented
+
class Callable(metaclass=ABCMeta):
__slots__ = ()
@@ -409,6 +423,8 @@ def __subclasshook__(cls, C):
return _check_methods(C, "__call__")
return NotImplemented
+ __class_getitem__ = classmethod(GenericAlias)
+
### SETS ###
@@ -550,6 +566,7 @@ def _hash(self):
h = 590923713
return h
+
Set.register(frozenset)
@@ -632,6 +649,7 @@ def __isub__(self, it):
self.discard(value)
return self
+
MutableSet.register(set)
@@ -688,6 +706,7 @@ def __eq__(self, other):
__reversed__ = None
+
Mapping.register(mappingproxy)
@@ -704,6 +723,8 @@ def __len__(self):
def __repr__(self):
return '{0.__class__.__name__}({0._mapping!r})'.format(self)
+ __class_getitem__ = classmethod(GenericAlias)
+
class KeysView(MappingView, Set):
@@ -719,6 +740,7 @@ def __contains__(self, key):
def __iter__(self):
yield from self._mapping
+
KeysView.register(dict_keys)
@@ -743,6 +765,7 @@ def __iter__(self):
for key in self._mapping:
yield (key, self._mapping[key])
+
ItemsView.register(dict_items)
@@ -761,6 +784,7 @@ def __iter__(self):
for key in self._mapping:
yield self._mapping[key]
+
ValuesView.register(dict_values)
@@ -847,6 +871,7 @@ def setdefault(self, key, default=None):
self[key] = default
return default
+
MutableMapping.register(dict)
@@ -914,6 +939,7 @@ def count(self, value):
'S.count(value) -> integer -- return number of occurrences of value'
return sum(1 for v in self if v is value or v == value)
+
Sequence.register(tuple)
Sequence.register(str)
Sequence.register(range)
@@ -1000,5 +1026,6 @@ def __iadd__(self, values):
self.extend(values)
return self
+
MutableSequence.register(list)
MutableSequence.register(bytearray) # Multiply inheriting, see ByteString
diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py
index db6674ea293fb0..e9efce7d7ed5bd 100644
--- a/Lib/_osx_support.py
+++ b/Lib/_osx_support.py
@@ -211,7 +211,7 @@ def _remove_universal_flags(_config_vars):
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub(r'-arch\s+\w+\s', ' ', flags, flags=re.ASCII)
- flags = re.sub('-isysroot [^ \t]*', ' ', flags)
+ flags = re.sub(r'-isysroot\s*\S+', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
@@ -287,7 +287,7 @@ def _check_for_unavailable_sdk(_config_vars):
# to /usr and /System/Library by either a standalone CLT
# package or the CLT component within Xcode.
cflags = _config_vars.get('CFLAGS', '')
- m = re.search(r'-isysroot\s+(\S+)', cflags)
+ m = re.search(r'-isysroot\s*(\S+)', cflags)
if m is not None:
sdk = m.group(1)
if not os.path.exists(sdk):
@@ -295,7 +295,7 @@ def _check_for_unavailable_sdk(_config_vars):
# Do not alter a config var explicitly overridden by env var
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
- flags = re.sub(r'-isysroot\s+\S+(?:\s|$)', ' ', flags)
+ flags = re.sub(r'-isysroot\s*\S+(?:\s|$)', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
@@ -320,7 +320,7 @@ def compiler_fixup(compiler_so, cc_args):
stripArch = stripSysroot = True
else:
stripArch = '-arch' in cc_args
- stripSysroot = '-isysroot' in cc_args
+ stripSysroot = any(arg for arg in cc_args if arg.startswith('-isysroot'))
if stripArch or 'ARCHFLAGS' in os.environ:
while True:
@@ -338,23 +338,34 @@ def compiler_fixup(compiler_so, cc_args):
if stripSysroot:
while True:
- try:
- index = compiler_so.index('-isysroot')
+ indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
+ if not indices:
+ break
+ index = indices[0]
+ if compiler_so[index] == '-isysroot':
# Strip this argument and the next one:
del compiler_so[index:index+2]
- except ValueError:
- break
+ else:
+ # It's '-isysroot/some/path' in one arg
+ del compiler_so[index:index+1]
# Check if the SDK that is used during compilation actually exists,
# the universal build requires the usage of a universal SDK and not all
# users have that installed by default.
sysroot = None
- if '-isysroot' in cc_args:
- idx = cc_args.index('-isysroot')
- sysroot = cc_args[idx+1]
- elif '-isysroot' in compiler_so:
- idx = compiler_so.index('-isysroot')
- sysroot = compiler_so[idx+1]
+ argvar = cc_args
+ indices = [i for i,x in enumerate(cc_args) if x.startswith('-isysroot')]
+ if not indices:
+ argvar = compiler_so
+ indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
+
+ for idx in indices:
+ if argvar[idx] == '-isysroot':
+ sysroot = argvar[idx+1]
+ break
+ else:
+ sysroot = argvar[idx][len('-isysroot'):]
+ break
if sysroot and not os.path.isdir(sysroot):
from distutils import log
diff --git a/Lib/_pydecimal.py b/Lib/_pydecimal.py
index c14d8ca86a1181..ab989e5206a9e9 100644
--- a/Lib/_pydecimal.py
+++ b/Lib/_pydecimal.py
@@ -140,8 +140,11 @@
# Limits for the C version for compatibility
'MAX_PREC', 'MAX_EMAX', 'MIN_EMIN', 'MIN_ETINY',
- # C version: compile time choice that enables the thread local context
- 'HAVE_THREADS'
+ # C version: compile time choice that enables the thread local context (deprecated, now always true)
+ 'HAVE_THREADS',
+
+ # C version: compile time choice that enables the coroutine local context
+ 'HAVE_CONTEXTVAR'
]
__xname__ = __name__ # sys.modules lookup (--without-threads)
@@ -172,6 +175,7 @@
# Compatibility with the C version
HAVE_THREADS = True
+HAVE_CONTEXTVAR = True
if sys.maxsize == 2**63-1:
MAX_PREC = 999999999999999999
MAX_EMAX = 999999999999999999
diff --git a/Lib/_pyio.py b/Lib/_pyio.py
index 4c2414672ed56c..4804ed27cd14d6 100644
--- a/Lib/_pyio.py
+++ b/Lib/_pyio.py
@@ -71,6 +71,7 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
'b' binary mode
't' text mode (default)
'+' open a disk file for updating (reading and writing)
+ 'U' universal newline mode (deprecated)
========= ===============================================================
The default mode is 'rt' (open for reading text). For binary random
@@ -86,6 +87,10 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
returned as strings, the bytes having been first decoded using a
platform-dependent encoding or using the specified encoding if given.
+ 'U' mode is deprecated and will raise an exception in future versions
+ of Python. It has no effect in Python 3. Use newline to control
+ universal newlines mode.
+
buffering is an optional integer used to set the buffering policy.
Pass 0 to switch buffering off (only allowed in binary mode), 1 to select
line buffering (only usable in text mode), and an integer > 1 to indicate
@@ -171,7 +176,7 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
if errors is not None and not isinstance(errors, str):
raise TypeError("invalid errors: %r" % errors)
modes = set(mode)
- if modes - set("axrwb+t") or len(mode) > len(modes):
+ if modes - set("axrwb+tU") or len(mode) > len(modes):
raise ValueError("invalid mode: %r" % mode)
creating = "x" in modes
reading = "r" in modes
@@ -180,6 +185,13 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
updating = "+" in modes
text = "t" in modes
binary = "b" in modes
+ if "U" in modes:
+ if creating or writing or appending or updating:
+ raise ValueError("mode U cannot be combined with 'x', 'w', 'a', or '+'")
+ import warnings
+ warnings.warn("'U' mode is deprecated",
+ DeprecationWarning, 2)
+ reading = True
if text and binary:
raise ValueError("can't have text and binary mode at once")
if creating + reading + writing + appending > 1:
@@ -792,6 +804,9 @@ def tell(self):
return pos
def truncate(self, pos=None):
+ self._checkClosed()
+ self._checkWritable()
+
# Flush the stream. We're mixing buffered I/O with lower-level I/O,
# and a flush may be necessary to synch both views of the current
# file state.
diff --git a/Lib/_weakrefset.py b/Lib/_weakrefset.py
index 7a84823622ee7c..b267780f0ced73 100644
--- a/Lib/_weakrefset.py
+++ b/Lib/_weakrefset.py
@@ -3,6 +3,7 @@
# by abc.py to load everything else at startup.
from _weakref import ref
+from types import GenericAlias
__all__ = ['WeakSet']
@@ -197,3 +198,5 @@ def isdisjoint(self, other):
def __repr__(self):
return repr(self.data)
+
+ __class_getitem__ = classmethod(GenericAlias)
diff --git a/Lib/antigravity.py b/Lib/antigravity.py
index c6f174ca6d8736..6dc520733577af 100644
--- a/Lib/antigravity.py
+++ b/Lib/antigravity.py
@@ -12,6 +12,6 @@ def geohash(latitude, longitude, datedow):
'''
# https://xkcd.com/426/
- h = hashlib.md5(datedow).hexdigest()
+ h = hashlib.md5(datedow, usedforsecurity=False).hexdigest()
p, q = [('%f' % float.fromhex('0.' + x)) for x in (h[:16], h[16:32])]
print('%d%s %d%s' % (latitude, p[1:], longitude, q[1:]))
diff --git a/Lib/argparse.py b/Lib/argparse.py
index 5d3ce2ad709f03..2677ef63e9e541 100644
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -129,7 +129,7 @@ def __repr__(self):
return '%s(%s)' % (type_name, ', '.join(arg_strings))
def _get_kwargs(self):
- return sorted(self.__dict__.items())
+ return list(self.__dict__.items())
def _get_args(self):
return []
@@ -2199,24 +2199,23 @@ def _parse_optional(self, arg_string):
action = self._option_string_actions[option_string]
return action, option_string, explicit_arg
- if self.allow_abbrev or not arg_string.startswith('--'):
- # search through all possible prefixes of the option string
- # and all actions in the parser for possible interpretations
- option_tuples = self._get_option_tuples(arg_string)
-
- # if multiple actions match, the option string was ambiguous
- if len(option_tuples) > 1:
- options = ', '.join([option_string
- for action, option_string, explicit_arg in option_tuples])
- args = {'option': arg_string, 'matches': options}
- msg = _('ambiguous option: %(option)s could match %(matches)s')
- self.error(msg % args)
-
- # if exactly one action matched, this segmentation is good,
- # so return the parsed action
- elif len(option_tuples) == 1:
- option_tuple, = option_tuples
- return option_tuple
+ # search through all possible prefixes of the option string
+ # and all actions in the parser for possible interpretations
+ option_tuples = self._get_option_tuples(arg_string)
+
+ # if multiple actions match, the option string was ambiguous
+ if len(option_tuples) > 1:
+ options = ', '.join([option_string
+ for action, option_string, explicit_arg in option_tuples])
+ args = {'option': arg_string, 'matches': options}
+ msg = _('ambiguous option: %(option)s could match %(matches)s')
+ self.error(msg % args)
+
+ # if exactly one action matched, this segmentation is good,
+ # so return the parsed action
+ elif len(option_tuples) == 1:
+ option_tuple, = option_tuples
+ return option_tuple
# if it was not found as an option, but it looks like a negative
# number, it was meant to be positional
@@ -2240,16 +2239,17 @@ def _get_option_tuples(self, option_string):
# split at the '='
chars = self.prefix_chars
if option_string[0] in chars and option_string[1] in chars:
- if '=' in option_string:
- option_prefix, explicit_arg = option_string.split('=', 1)
- else:
- option_prefix = option_string
- explicit_arg = None
- for option_string in self._option_string_actions:
- if option_string.startswith(option_prefix):
- action = self._option_string_actions[option_string]
- tup = action, option_string, explicit_arg
- result.append(tup)
+ if self.allow_abbrev:
+ if '=' in option_string:
+ option_prefix, explicit_arg = option_string.split('=', 1)
+ else:
+ option_prefix = option_string
+ explicit_arg = None
+ for option_string in self._option_string_actions:
+ if option_string.startswith(option_prefix):
+ action = self._option_string_actions[option_string]
+ tup = action, option_string, explicit_arg
+ result.append(tup)
# single character options can be concatenated with their arguments
# but multiple character options always have to have their argument
diff --git a/Lib/ast.py b/Lib/ast.py
index 495c0d618f12cd..6a5b39e270b9b5 100644
--- a/Lib/ast.py
+++ b/Lib/ast.py
@@ -27,6 +27,7 @@
import sys
from _ast import *
from contextlib import contextmanager, nullcontext
+from enum import IntEnum, auto
def parse(source, filename='', mode='exec', *,
@@ -61,11 +62,12 @@ def literal_eval(node_or_string):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, Expression):
node_or_string = node_or_string.body
+ def _raise_malformed_node(node):
+ raise ValueError(f'malformed node or string: {node!r}')
def _convert_num(node):
- if isinstance(node, Constant):
- if type(node.value) in (int, float, complex):
- return node.value
- raise ValueError('malformed node or string: ' + repr(node))
+ if not isinstance(node, Constant) or type(node.value) not in (int, float, complex):
+ _raise_malformed_node(node)
+ return node.value
def _convert_signed_num(node):
if isinstance(node, UnaryOp) and isinstance(node.op, (UAdd, USub)):
operand = _convert_num(node.operand)
@@ -87,6 +89,8 @@ def _convert(node):
node.func.id == 'set' and node.args == node.keywords == []):
return set()
elif isinstance(node, Dict):
+ if len(node.keys) != len(node.values):
+ _raise_malformed_node(node)
return dict(zip(map(_convert, node.keys),
map(_convert, node.values)))
elif isinstance(node, BinOp) and isinstance(node.op, (Add, Sub)):
@@ -122,31 +126,36 @@ def _format(node, level=0):
prefix = ''
sep = ', '
if isinstance(node, AST):
+ cls = type(node)
args = []
allsimple = True
keywords = annotate_fields
- for field in node._fields:
+ for name in node._fields:
try:
- value = getattr(node, field)
+ value = getattr(node, name)
except AttributeError:
keywords = True
+ continue
+ if value is None and getattr(cls, name, ...) is None:
+ keywords = True
+ continue
+ value, simple = _format(value, level)
+ allsimple = allsimple and simple
+ if keywords:
+ args.append('%s=%s' % (name, value))
else:
- value, simple = _format(value, level)
- allsimple = allsimple and simple
- if keywords:
- args.append('%s=%s' % (field, value))
- else:
- args.append(value)
+ args.append(value)
if include_attributes and node._attributes:
- for attr in node._attributes:
+ for name in node._attributes:
try:
- value = getattr(node, attr)
+ value = getattr(node, name)
except AttributeError:
- pass
- else:
- value, simple = _format(value, level)
- allsimple = allsimple and simple
- args.append('%s=%s' % (attr, value))
+ continue
+ if value is None and getattr(cls, name, ...) is None:
+ continue
+ value, simple = _format(value, level)
+ allsimple = allsimple and simple
+ args.append('%s=%s' % (name, value))
if allsimple and len(args) <= 3:
return '%s(%s)' % (node.__class__.__name__, ', '.join(args)), not args
return '%s(%s%s)' % (node.__class__.__name__, prefix, sep.join(args)), False
@@ -169,9 +178,10 @@ def copy_location(new_node, old_node):
attributes) from *old_node* to *new_node* if possible, and return *new_node*.
"""
for attr in 'lineno', 'col_offset', 'end_lineno', 'end_col_offset':
- if attr in old_node._attributes and attr in new_node._attributes \
- and hasattr(old_node, attr):
- setattr(new_node, attr, getattr(old_node, attr))
+ if attr in old_node._attributes and attr in new_node._attributes:
+ value = getattr(old_node, attr, None)
+ if value is not None:
+ setattr(new_node, attr, value)
return new_node
@@ -190,7 +200,7 @@ def _fix(node, lineno, col_offset, end_lineno, end_col_offset):
else:
lineno = node.lineno
if 'end_lineno' in node._attributes:
- if not hasattr(node, 'end_lineno'):
+ if getattr(node, 'end_lineno', None) is None:
node.end_lineno = end_lineno
else:
end_lineno = node.end_lineno
@@ -200,7 +210,7 @@ def _fix(node, lineno, col_offset, end_lineno, end_col_offset):
else:
col_offset = node.col_offset
if 'end_col_offset' in node._attributes:
- if not hasattr(node, 'end_col_offset'):
+ if getattr(node, 'end_col_offset', None) is None:
node.end_col_offset = end_col_offset
else:
end_col_offset = node.end_col_offset
@@ -302,7 +312,7 @@ def _splitlines_no_ff(source):
def _pad_whitespace(source):
- """Replace all chars except '\f\t' in a line with spaces."""
+ r"""Replace all chars except '\f\t' in a line with spaces."""
result = ''
for c in source:
if c in '\f\t':
@@ -322,6 +332,8 @@ def get_source_segment(source, node, *, padded=False):
be padded with spaces to match its original position.
"""
try:
+ if node.end_lineno is None or node.end_col_offset is None:
+ return None
lineno = node.lineno - 1
end_lineno = node.end_lineno - 1
col_offset = node.col_offset
@@ -436,11 +448,11 @@ class NodeTransformer(NodeVisitor):
class RewriteName(NodeTransformer):
def visit_Name(self, node):
- return copy_location(Subscript(
+ return Subscript(
value=Name(id='data', ctx=Load()),
- slice=Index(value=Str(s=node.id)),
+ slice=Constant(value=node.id),
ctx=node.ctx
- ), node)
+ )
Keep in mind that if the node you're operating on has child nodes you must
either transform the child nodes yourself or call the :meth:`generic_visit`
@@ -482,6 +494,7 @@ def generic_visit(self, node):
# It will be removed in future.
def _getter(self):
+ """Deprecated. Use value instead."""
return self.value
def _setter(self, value):
@@ -492,6 +505,9 @@ def _setter(self, value):
class _ABC(type):
+ def __init__(cls, *args):
+ cls.__doc__ = """Deprecated AST node class. Use ast.Constant instead"""
+
def __instancecheck__(cls, inst):
if not isinstance(inst, Constant):
return False
@@ -508,6 +524,13 @@ def __instancecheck__(cls, inst):
return type.__instancecheck__(cls, inst)
def _new(cls, *args, **kwargs):
+ for key in kwargs:
+ if key not in cls._fields:
+ # arbitrary keyword arguments are accepted
+ continue
+ pos = cls._fields.index(key)
+ if pos < len(args):
+ raise TypeError(f"{cls.__name__} got multiple values for argument {key!r}")
if cls in _const_types:
return Constant(*args, **kwargs)
return Constant.__new__(cls, *args, **kwargs)
@@ -545,6 +568,7 @@ def __new__(cls, *args, **kwargs):
_const_types_not = {
Num: (bool,),
}
+
_const_node_type_names = {
bool: 'NameConstant', # should be before int
type(None): 'NameConstant',
@@ -556,10 +580,74 @@ def __new__(cls, *args, **kwargs):
type(...): 'Ellipsis',
}
+class slice(AST):
+ """Deprecated AST node class."""
+
+class Index(slice):
+ """Deprecated AST node class. Use the index value directly instead."""
+ def __new__(cls, value, **kwargs):
+ return value
+
+class ExtSlice(slice):
+ """Deprecated AST node class. Use ast.Tuple instead."""
+ def __new__(cls, dims=(), **kwargs):
+ return Tuple(list(dims), Load(), **kwargs)
+
+def _dims_getter(self):
+ """Deprecated. Use elts instead."""
+ return self.elts
+
+def _dims_setter(self, value):
+ self.elts = value
+
+Tuple.dims = property(_dims_getter, _dims_setter)
+
+class Suite(mod):
+ """Deprecated AST node class. Unused in Python 3."""
+
+class AugLoad(expr_context):
+ """Deprecated AST node class. Unused in Python 3."""
+
+class AugStore(expr_context):
+ """Deprecated AST node class. Unused in Python 3."""
+
+class Param(expr_context):
+ """Deprecated AST node class. Unused in Python 3."""
+
+
# Large float and imaginary literals get turned into infinities in the AST.
# We unparse those infinities to INFSTR.
_INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
+class _Precedence(IntEnum):
+ """Precedence table that originated from python grammar."""
+
+ TUPLE = auto()
+ YIELD = auto() # 'yield', 'yield from'
+ TEST = auto() # 'if'-'else', 'lambda'
+ OR = auto() # 'or'
+ AND = auto() # 'and'
+ NOT = auto() # 'not'
+ CMP = auto() # '<', '>', '==', '>=', '<=', '!=',
+ # 'in', 'not in', 'is', 'is not'
+ EXPR = auto()
+ BOR = EXPR # '|'
+ BXOR = auto() # '^'
+ BAND = auto() # '&'
+ SHIFT = auto() # '<<', '>>'
+ ARITH = auto() # '+', '-'
+ TERM = auto() # '*', '@', '/', '%', '//'
+ FACTOR = auto() # unary '+', '-', '~'
+ POWER = auto() # '**'
+ AWAIT = auto() # 'await'
+ ATOM = auto()
+
+ def next(self):
+ try:
+ return self.__class__(self + 1)
+ except ValueError:
+ return self
+
class _Unparser(NodeVisitor):
"""Methods in this class recursively traverse an AST and
output source code for the abstract syntax; original formatting
@@ -568,6 +656,8 @@ class _Unparser(NodeVisitor):
def __init__(self):
self._source = []
self._buffer = []
+ self._precedences = {}
+ self._type_ignores = {}
self._indent = 0
def interleave(self, inter, f, seq):
@@ -582,10 +672,26 @@ def interleave(self, inter, f, seq):
inter()
f(x)
+ def items_view(self, traverser, items):
+ """Traverse and separate the given *items* with a comma and append it to
+ the buffer. If *items* is a single item sequence, a trailing comma
+ will be added."""
+ if len(items) == 1:
+ traverser(items[0])
+ self.write(",")
+ else:
+ self.interleave(lambda: self.write(", "), traverser, items)
+
+ def maybe_newline(self):
+ """Adds a newline if it isn't the start of generated source"""
+ if self._source:
+ self.write("\n")
+
def fill(self, text=""):
"""Indent a piece of text and append it, according to the current
indentation level"""
- self.write("\n" + " " * self._indent + text)
+ self.maybe_newline()
+ self.write(" " * self._indent + text)
def write(self, text):
"""Append a piece of text"""
@@ -601,11 +707,15 @@ def buffer(self):
return value
@contextmanager
- def block(self):
+ def block(self, *, extra = None):
"""A context manager for preparing the source for blocks. It adds
the character':', increases the indentation on enter and decreases
- the indentation on exit."""
+ the indentation on exit. If *extra* is given, it will be directly
+ appended after the colon character.
+ """
self.write(":")
+ if extra:
+ self.write(extra)
self._indent += 1
yield
self._indent -= 1
@@ -625,6 +735,38 @@ def delimit_if(self, start, end, condition):
else:
return nullcontext()
+ def require_parens(self, precedence, node):
+ """Shortcut to adding precedence related parens"""
+ return self.delimit_if("(", ")", self.get_precedence(node) > precedence)
+
+ def get_precedence(self, node):
+ return self._precedences.get(node, _Precedence.TEST)
+
+ def set_precedence(self, precedence, *nodes):
+ for node in nodes:
+ self._precedences[node] = precedence
+
+ def get_raw_docstring(self, node):
+ """If a docstring node is found in the body of the *node* parameter,
+ return that docstring node, None otherwise.
+
+ Logic mirrored from ``_PyAST_GetDocString``."""
+ if not isinstance(
+ node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)
+ ) or len(node.body) < 1:
+ return None
+ node = node.body[0]
+ if not isinstance(node, Expr):
+ return None
+ node = node.value
+ if isinstance(node, Constant) and isinstance(node.value, str):
+ return node
+
+ def get_type_comment(self, node):
+ comment = self._type_ignores.get(node.lineno) or node.type_comment
+ if comment is not None:
+ return f" # type: {comment}"
+
def traverse(self, node):
if isinstance(node, list):
for item in node:
@@ -639,16 +781,38 @@ def visit(self, node):
self.traverse(node)
return "".join(self._source)
+ def _write_docstring_and_traverse_body(self, node):
+ if (docstring := self.get_raw_docstring(node)):
+ self._write_docstring(docstring)
+ self.traverse(node.body[1:])
+ else:
+ self.traverse(node.body)
+
def visit_Module(self, node):
- for subnode in node.body:
- self.traverse(subnode)
+ self._type_ignores = {
+ ignore.lineno: f"ignore{ignore.tag}"
+ for ignore in node.type_ignores
+ }
+ self._write_docstring_and_traverse_body(node)
+ self._type_ignores.clear()
+
+ def visit_FunctionType(self, node):
+ with self.delimit("(", ")"):
+ self.interleave(
+ lambda: self.write(", "), self.traverse, node.argtypes
+ )
+
+ self.write(" -> ")
+ self.traverse(node.returns)
def visit_Expr(self, node):
self.fill()
+ self.set_precedence(_Precedence.YIELD, node.value)
self.traverse(node.value)
def visit_NamedExpr(self, node):
- with self.delimit("(", ")"):
+ with self.require_parens(_Precedence.TUPLE, node):
+ self.set_precedence(_Precedence.ATOM, node.target, node.value)
self.traverse(node.target)
self.write(" := ")
self.traverse(node.value)
@@ -671,6 +835,8 @@ def visit_Assign(self, node):
self.traverse(target)
self.write(" = ")
self.traverse(node.value)
+ if type_comment := self.get_type_comment(node):
+ self.write(type_comment)
def visit_AugAssign(self, node):
self.fill()
@@ -723,24 +889,27 @@ def visit_Nonlocal(self, node):
self.interleave(lambda: self.write(", "), self.write, node.names)
def visit_Await(self, node):
- with self.delimit("(", ")"):
+ with self.require_parens(_Precedence.AWAIT, node):
self.write("await")
if node.value:
self.write(" ")
+ self.set_precedence(_Precedence.ATOM, node.value)
self.traverse(node.value)
def visit_Yield(self, node):
- with self.delimit("(", ")"):
+ with self.require_parens(_Precedence.YIELD, node):
self.write("yield")
if node.value:
self.write(" ")
+ self.set_precedence(_Precedence.ATOM, node.value)
self.traverse(node.value)
def visit_YieldFrom(self, node):
- with self.delimit("(", ")"):
+ with self.require_parens(_Precedence.YIELD, node):
self.write("yield from ")
if not node.value:
raise ValueError("Node can't be used without a value attribute.")
+ self.set_precedence(_Precedence.ATOM, node.value)
self.traverse(node.value)
def visit_Raise(self, node):
@@ -782,12 +951,12 @@ def visit_ExceptHandler(self, node):
self.traverse(node.body)
def visit_ClassDef(self, node):
- self.write("\n")
+ self.maybe_newline()
for deco in node.decorator_list:
self.fill("@")
self.traverse(deco)
self.fill("class " + node.name)
- with self.delimit("(", ")"):
+ with self.delimit_if("(", ")", condition = node.bases or node.keywords):
comma = False
for e in node.bases:
if comma:
@@ -803,16 +972,16 @@ def visit_ClassDef(self, node):
self.traverse(e)
with self.block():
- self.traverse(node.body)
+ self._write_docstring_and_traverse_body(node)
def visit_FunctionDef(self, node):
- self.__FunctionDef_helper(node, "def")
+ self._function_helper(node, "def")
def visit_AsyncFunctionDef(self, node):
- self.__FunctionDef_helper(node, "async def")
+ self._function_helper(node, "async def")
- def __FunctionDef_helper(self, node, fill_suffix):
- self.write("\n")
+ def _function_helper(self, node, fill_suffix):
+ self.maybe_newline()
for deco in node.decorator_list:
self.fill("@")
self.traverse(deco)
@@ -823,21 +992,21 @@ def __FunctionDef_helper(self, node, fill_suffix):
if node.returns:
self.write(" -> ")
self.traverse(node.returns)
- with self.block():
- self.traverse(node.body)
+ with self.block(extra=self.get_type_comment(node)):
+ self._write_docstring_and_traverse_body(node)
def visit_For(self, node):
- self.__For_helper("for ", node)
+ self._for_helper("for ", node)
def visit_AsyncFor(self, node):
- self.__For_helper("async for ", node)
+ self._for_helper("async for ", node)
- def __For_helper(self, fill, node):
+ def _for_helper(self, fill, node):
self.fill(fill)
self.traverse(node.target)
self.write(" in ")
self.traverse(node.iter)
- with self.block():
+ with self.block(extra=self.get_type_comment(node)):
self.traverse(node.body)
if node.orelse:
self.fill("else")
@@ -875,13 +1044,13 @@ def visit_While(self, node):
def visit_With(self, node):
self.fill("with ")
self.interleave(lambda: self.write(", "), self.traverse, node.items)
- with self.block():
+ with self.block(extra=self.get_type_comment(node)):
self.traverse(node.body)
def visit_AsyncWith(self, node):
self.fill("async with ")
self.interleave(lambda: self.write(", "), self.traverse, node.items)
- with self.block():
+ with self.block(extra=self.get_type_comment(node)):
self.traverse(node.body)
def visit_JoinedStr(self, node):
@@ -907,7 +1076,9 @@ def _fstring_Constant(self, node, write):
def _fstring_FormattedValue(self, node, write):
write("{")
- expr = type(self)().visit(node.value).rstrip("\n")
+ unparser = type(self)()
+ unparser.set_precedence(_Precedence.TEST.next(), node.value)
+ expr = unparser.visit(node.value)
if expr.startswith("{"):
write(" ") # Separate pair of opening brackets as "{ {"
write(expr)
@@ -925,6 +1096,30 @@ def _fstring_FormattedValue(self, node, write):
def visit_Name(self, node):
self.write(node.id)
+ def _write_docstring(self, node):
+ def esc_char(c):
+ if c in ("\n", "\t"):
+ # In the AST form, we don't know the author's intentation
+ # about how this should be displayed. We'll only escape
+ # \n and \t, because they are more likely to be unescaped
+ # in the source
+ return c
+ return c.encode('unicode_escape').decode('ascii')
+
+ self.fill()
+ if node.kind == "u":
+ self.write("u")
+
+ value = node.value
+ if value:
+ # Preserve quotes in the docstring by escaping them
+ value = "".join(map(esc_char, value))
+ if value[-1] == '"':
+ value = value.replace('"', '\\"', -1)
+ value = value.replace('"""', '""\\"')
+
+ self.write(f'"""{value}"""')
+
def _write_constant(self, value):
if isinstance(value, (float, complex)):
# Substitute overflowing decimal literal for AST infinities.
@@ -936,11 +1131,7 @@ def visit_Constant(self, node):
value = node.value
if isinstance(value, tuple):
with self.delimit("(", ")"):
- if len(value) == 1:
- self._write_constant(value[0])
- self.write(",")
- else:
- self.interleave(lambda: self.write(", "), self._write_constant, value)
+ self.items_view(self._write_constant, value)
elif value is ...:
self.write("...")
else:
@@ -983,24 +1174,28 @@ def visit_comprehension(self, node):
self.write(" async for ")
else:
self.write(" for ")
+ self.set_precedence(_Precedence.TUPLE, node.target)
self.traverse(node.target)
self.write(" in ")
+ self.set_precedence(_Precedence.TEST.next(), node.iter, *node.ifs)
self.traverse(node.iter)
for if_clause in node.ifs:
self.write(" if ")
self.traverse(if_clause)
def visit_IfExp(self, node):
- with self.delimit("(", ")"):
+ with self.require_parens(_Precedence.TEST, node):
+ self.set_precedence(_Precedence.TEST.next(), node.body, node.test)
self.traverse(node.body)
self.write(" if ")
self.traverse(node.test)
self.write(" else ")
+ self.set_precedence(_Precedence.TEST, node.orelse)
self.traverse(node.orelse)
def visit_Set(self, node):
if not node.elts:
- raise ValueError("Set node should has at least one item")
+ raise ValueError("Set node should have at least one item")
with self.delimit("{", "}"):
self.interleave(lambda: self.write(", "), self.traverse, node.elts)
@@ -1016,6 +1211,7 @@ def write_item(item):
# for dictionary unpacking operator in dicts {**{'y': 2}}
# see PEP 448 for details
self.write("**")
+ self.set_precedence(_Precedence.EXPR, v)
self.traverse(v)
else:
write_key_value_pair(k, v)
@@ -1027,19 +1223,26 @@ def write_item(item):
def visit_Tuple(self, node):
with self.delimit("(", ")"):
- if len(node.elts) == 1:
- elt = node.elts[0]
- self.traverse(elt)
- self.write(",")
- else:
- self.interleave(lambda: self.write(", "), self.traverse, node.elts)
+ self.items_view(self.traverse, node.elts)
unop = {"Invert": "~", "Not": "not", "UAdd": "+", "USub": "-"}
+ unop_precedence = {
+ "not": _Precedence.NOT,
+ "~": _Precedence.FACTOR,
+ "+": _Precedence.FACTOR,
+ "-": _Precedence.FACTOR,
+ }
def visit_UnaryOp(self, node):
- with self.delimit("(", ")"):
- self.write(self.unop[node.op.__class__.__name__])
- self.write(" ")
+ operator = self.unop[node.op.__class__.__name__]
+ operator_precedence = self.unop_precedence[operator]
+ with self.require_parens(operator_precedence, node):
+ self.write(operator)
+ # factor prefixes (+, -, ~) shouldn't be seperated
+ # from the value they belong, (e.g: +1 instead of + 1)
+ if operator_precedence is not _Precedence.FACTOR:
+ self.write(" ")
+ self.set_precedence(operator_precedence, node.operand)
self.traverse(node.operand)
binop = {
@@ -1058,10 +1261,38 @@ def visit_UnaryOp(self, node):
"Pow": "**",
}
+ binop_precedence = {
+ "+": _Precedence.ARITH,
+ "-": _Precedence.ARITH,
+ "*": _Precedence.TERM,
+ "@": _Precedence.TERM,
+ "/": _Precedence.TERM,
+ "%": _Precedence.TERM,
+ "<<": _Precedence.SHIFT,
+ ">>": _Precedence.SHIFT,
+ "|": _Precedence.BOR,
+ "^": _Precedence.BXOR,
+ "&": _Precedence.BAND,
+ "//": _Precedence.TERM,
+ "**": _Precedence.POWER,
+ }
+
+ binop_rassoc = frozenset(("**",))
def visit_BinOp(self, node):
- with self.delimit("(", ")"):
+ operator = self.binop[node.op.__class__.__name__]
+ operator_precedence = self.binop_precedence[operator]
+ with self.require_parens(operator_precedence, node):
+ if operator in self.binop_rassoc:
+ left_precedence = operator_precedence.next()
+ right_precedence = operator_precedence
+ else:
+ left_precedence = operator_precedence
+ right_precedence = operator_precedence.next()
+
+ self.set_precedence(left_precedence, node.left)
self.traverse(node.left)
- self.write(" " + self.binop[node.op.__class__.__name__] + " ")
+ self.write(f" {operator} ")
+ self.set_precedence(right_precedence, node.right)
self.traverse(node.right)
cmpops = {
@@ -1078,20 +1309,32 @@ def visit_BinOp(self, node):
}
def visit_Compare(self, node):
- with self.delimit("(", ")"):
+ with self.require_parens(_Precedence.CMP, node):
+ self.set_precedence(_Precedence.CMP.next(), node.left, *node.comparators)
self.traverse(node.left)
for o, e in zip(node.ops, node.comparators):
self.write(" " + self.cmpops[o.__class__.__name__] + " ")
self.traverse(e)
boolops = {"And": "and", "Or": "or"}
+ boolop_precedence = {"and": _Precedence.AND, "or": _Precedence.OR}
def visit_BoolOp(self, node):
- with self.delimit("(", ")"):
- s = " %s " % self.boolops[node.op.__class__.__name__]
- self.interleave(lambda: self.write(s), self.traverse, node.values)
+ operator = self.boolops[node.op.__class__.__name__]
+ operator_precedence = self.boolop_precedence[operator]
+
+ def increasing_level_traverse(node):
+ nonlocal operator_precedence
+ operator_precedence = operator_precedence.next()
+ self.set_precedence(operator_precedence, node)
+ self.traverse(node)
+
+ with self.require_parens(operator_precedence, node):
+ s = f" {operator} "
+ self.interleave(lambda: self.write(s), increasing_level_traverse, node.values)
def visit_Attribute(self, node):
+ self.set_precedence(_Precedence.ATOM, node.value)
self.traverse(node.value)
# Special case: 3.__abs__() is a syntax error, so if node.value
# is an integer literal then we need to either parenthesize
@@ -1102,6 +1345,7 @@ def visit_Attribute(self, node):
self.write(node.attr)
def visit_Call(self, node):
+ self.set_precedence(_Precedence.ATOM, node.func)
self.traverse(node.func)
with self.delimit("(", ")"):
comma = False
@@ -1119,20 +1363,32 @@ def visit_Call(self, node):
self.traverse(e)
def visit_Subscript(self, node):
+ def is_simple_tuple(slice_value):
+ # when unparsing a non-empty tuple, the parantheses can be safely
+ # omitted if there aren't any elements that explicitly requires
+ # parantheses (such as starred expressions).
+ return (
+ isinstance(slice_value, Tuple)
+ and slice_value.elts
+ and not any(isinstance(elt, Starred) for elt in slice_value.elts)
+ )
+
+ self.set_precedence(_Precedence.ATOM, node.value)
self.traverse(node.value)
with self.delimit("[", "]"):
- self.traverse(node.slice)
+ if is_simple_tuple(node.slice):
+ self.items_view(self.traverse, node.slice.elts)
+ else:
+ self.traverse(node.slice)
def visit_Starred(self, node):
self.write("*")
+ self.set_precedence(_Precedence.EXPR, node.value)
self.traverse(node.value)
def visit_Ellipsis(self, node):
self.write("...")
- def visit_Index(self, node):
- self.traverse(node.value)
-
def visit_Slice(self, node):
if node.lower:
self.traverse(node.lower)
@@ -1143,9 +1399,6 @@ def visit_Slice(self, node):
self.write(":")
self.traverse(node.step)
- def visit_ExtSlice(self, node):
- self.interleave(lambda: self.write(", "), self.traverse, node.dims)
-
def visit_arg(self, node):
self.write(node.arg)
if node.annotation:
@@ -1212,10 +1465,11 @@ def visit_keyword(self, node):
self.traverse(node.value)
def visit_Lambda(self, node):
- with self.delimit("(", ")"):
+ with self.require_parens(_Precedence.TEST, node):
self.write("lambda ")
self.traverse(node.args)
self.write(": ")
+ self.set_precedence(_Precedence.TEST, node.body)
self.traverse(node.body)
def visit_alias(self, node):
diff --git a/Lib/asyncio/__init__.py b/Lib/asyncio/__init__.py
index 28c2e2c429f34a..eb84bfb189ccf3 100644
--- a/Lib/asyncio/__init__.py
+++ b/Lib/asyncio/__init__.py
@@ -17,6 +17,7 @@
from .streams import *
from .subprocess import *
from .tasks import *
+from .threads import *
from .transports import *
# Exposed for _asynciomodule.c to implement now deprecated
@@ -35,6 +36,7 @@
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
+ threads.__all__ +
transports.__all__)
if sys.platform == 'win32': # pragma: no cover
diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py
index d78724b015370e..b2d446a51fedb5 100644
--- a/Lib/asyncio/base_events.py
+++ b/Lib/asyncio/base_events.py
@@ -806,7 +806,9 @@ def run_in_executor(self, executor, func, *args):
# Only check when the default executor is being used
self._check_default_executor()
if executor is None:
- executor = concurrent.futures.ThreadPoolExecutor()
+ executor = concurrent.futures.ThreadPoolExecutor(
+ thread_name_prefix='asyncio'
+ )
self._default_executor = executor
return futures.wrap_future(
executor.submit(func, *args), loop=self)
diff --git a/Lib/asyncio/base_tasks.py b/Lib/asyncio/base_tasks.py
index e2da462fde7400..09bb171a2ce750 100644
--- a/Lib/asyncio/base_tasks.py
+++ b/Lib/asyncio/base_tasks.py
@@ -24,11 +24,18 @@ def _task_repr_info(task):
def _task_get_stack(task, limit):
frames = []
- try:
- # 'async def' coroutines
+ if hasattr(task._coro, 'cr_frame'):
+ # case 1: 'async def' coroutines
f = task._coro.cr_frame
- except AttributeError:
+ elif hasattr(task._coro, 'gi_frame'):
+ # case 2: legacy coroutines
f = task._coro.gi_frame
+ elif hasattr(task._coro, 'ag_frame'):
+ # case 3: async generators
+ f = task._coro.ag_frame
+ else:
+ # case 4: unknown objects
+ f = None
if f is not None:
while f is not None:
if limit is not None:
diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py
index c7343f515ca222..70017cb86a0596 100644
--- a/Lib/asyncio/events.py
+++ b/Lib/asyncio/events.py
@@ -19,7 +19,6 @@
import threading
from . import format_helpers
-from . import exceptions
class Handle:
@@ -397,7 +396,7 @@ async def create_unix_server(
The return value is a Server object, which can be used to stop
the service.
- path is a str, representing a file systsem path to bind the
+ path is a str, representing a file system path to bind the
server socket to.
sock can optionally be specified in order to use a preexisting
diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py
index a3cf379ee81705..bed4da52fd4d98 100644
--- a/Lib/asyncio/futures.py
+++ b/Lib/asyncio/futures.py
@@ -51,6 +51,9 @@ class Future:
_exception = None
_loop = None
_source_traceback = None
+ _cancel_message = None
+ # A saved CancelledError for later chaining as an exception context.
+ _cancelled_exc = None
# This field is used for a dual purpose:
# - Its presence is a marker to declare that a class implements
@@ -123,7 +126,22 @@ def get_loop(self):
raise RuntimeError("Future object is not initialized.")
return loop
- def cancel(self):
+ def _make_cancelled_error(self):
+ """Create the CancelledError to raise if the Future is cancelled.
+
+ This should only be called once when handling a cancellation since
+ it erases the saved context exception value.
+ """
+ if self._cancel_message is None:
+ exc = exceptions.CancelledError()
+ else:
+ exc = exceptions.CancelledError(self._cancel_message)
+ exc.__context__ = self._cancelled_exc
+ # Remove the reference since we don't need this anymore.
+ self._cancelled_exc = None
+ return exc
+
+ def cancel(self, msg=None):
"""Cancel the future and schedule callbacks.
If the future is already done or cancelled, return False. Otherwise,
@@ -134,6 +152,7 @@ def cancel(self):
if self._state != _PENDING:
return False
self._state = _CANCELLED
+ self._cancel_message = msg
self.__schedule_callbacks()
return True
@@ -173,7 +192,8 @@ def result(self):
the future is done and has an exception set, this exception is raised.
"""
if self._state == _CANCELLED:
- raise exceptions.CancelledError
+ exc = self._make_cancelled_error()
+ raise exc
if self._state != _FINISHED:
raise exceptions.InvalidStateError('Result is not ready.')
self.__log_traceback = False
@@ -190,7 +210,8 @@ def exception(self):
InvalidStateError.
"""
if self._state == _CANCELLED:
- raise exceptions.CancelledError
+ exc = self._make_cancelled_error()
+ raise exc
if self._state != _FINISHED:
raise exceptions.InvalidStateError('Exception is not set.')
self.__log_traceback = False
diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py
index a05cbb6bdd69dc..884a58f2ed6507 100644
--- a/Lib/asyncio/selector_events.py
+++ b/Lib/asyncio/selector_events.py
@@ -266,6 +266,7 @@ def _add_reader(self, fd, callback, *args):
(handle, writer))
if reader is not None:
reader.cancel()
+ return handle
def _remove_reader(self, fd):
if self.is_closed():
@@ -302,6 +303,7 @@ def _add_writer(self, fd, callback, *args):
(reader, handle))
if writer is not None:
writer.cancel()
+ return handle
def _remove_writer(self, fd):
"""Remove a writer callback."""
@@ -329,7 +331,7 @@ def _remove_writer(self, fd):
def add_reader(self, fd, callback, *args):
"""Add a reader callback."""
self._ensure_fd_no_transport(fd)
- return self._add_reader(fd, callback, *args)
+ self._add_reader(fd, callback, *args)
def remove_reader(self, fd):
"""Remove a reader callback."""
@@ -339,7 +341,7 @@ def remove_reader(self, fd):
def add_writer(self, fd, callback, *args):
"""Add a writer callback.."""
self._ensure_fd_no_transport(fd)
- return self._add_writer(fd, callback, *args)
+ self._add_writer(fd, callback, *args)
def remove_writer(self, fd):
"""Remove a writer callback."""
@@ -362,13 +364,15 @@ async def sock_recv(self, sock, n):
pass
fut = self.create_future()
fd = sock.fileno()
- self.add_reader(fd, self._sock_recv, fut, sock, n)
+ self._ensure_fd_no_transport(fd)
+ handle = self._add_reader(fd, self._sock_recv, fut, sock, n)
fut.add_done_callback(
- functools.partial(self._sock_read_done, fd))
+ functools.partial(self._sock_read_done, fd, handle=handle))
return await fut
- def _sock_read_done(self, fd, fut):
- self.remove_reader(fd)
+ def _sock_read_done(self, fd, fut, handle=None):
+ if handle is None or not handle.cancelled():
+ self.remove_reader(fd)
def _sock_recv(self, fut, sock, n):
# _sock_recv() can add itself as an I/O callback if the operation can't
@@ -401,9 +405,10 @@ async def sock_recv_into(self, sock, buf):
pass
fut = self.create_future()
fd = sock.fileno()
- self.add_reader(fd, self._sock_recv_into, fut, sock, buf)
+ self._ensure_fd_no_transport(fd)
+ handle = self._add_reader(fd, self._sock_recv_into, fut, sock, buf)
fut.add_done_callback(
- functools.partial(self._sock_read_done, fd))
+ functools.partial(self._sock_read_done, fd, handle=handle))
return await fut
def _sock_recv_into(self, fut, sock, buf):
@@ -446,11 +451,12 @@ async def sock_sendall(self, sock, data):
fut = self.create_future()
fd = sock.fileno()
- fut.add_done_callback(
- functools.partial(self._sock_write_done, fd))
+ self._ensure_fd_no_transport(fd)
# use a trick with a list in closure to store a mutable state
- self.add_writer(fd, self._sock_sendall, fut, sock,
- memoryview(data), [n])
+ handle = self._add_writer(fd, self._sock_sendall, fut, sock,
+ memoryview(data), [n])
+ fut.add_done_callback(
+ functools.partial(self._sock_write_done, fd, handle=handle))
return await fut
def _sock_sendall(self, fut, sock, view, pos):
@@ -502,9 +508,11 @@ def _sock_connect(self, fut, sock, address):
# connection runs in background. We have to wait until the socket
# becomes writable to be notified when the connection succeed or
# fails.
+ self._ensure_fd_no_transport(fd)
+ handle = self._add_writer(
+ fd, self._sock_connect_cb, fut, sock, address)
fut.add_done_callback(
- functools.partial(self._sock_write_done, fd))
- self.add_writer(fd, self._sock_connect_cb, fut, sock, address)
+ functools.partial(self._sock_write_done, fd, handle=handle))
except (SystemExit, KeyboardInterrupt):
raise
except BaseException as exc:
@@ -512,8 +520,9 @@ def _sock_connect(self, fut, sock, address):
else:
fut.set_result(None)
- def _sock_write_done(self, fd, fut):
- self.remove_writer(fd)
+ def _sock_write_done(self, fd, fut, handle=None):
+ if handle is None or not handle.cancelled():
+ self.remove_writer(fd)
def _sock_connect_cb(self, fut, sock, address):
if fut.done():
diff --git a/Lib/asyncio/sslproto.py b/Lib/asyncio/sslproto.py
index 3eca6b4a39128b..cad25b26539f5a 100644
--- a/Lib/asyncio/sslproto.py
+++ b/Lib/asyncio/sslproto.py
@@ -5,7 +5,6 @@
except ImportError: # pragma: no cover
ssl = None
-from . import base_events
from . import constants
from . import protocols
from . import transports
diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py
index 717837d8568436..21b98b6647bd90 100644
--- a/Lib/asyncio/tasks.py
+++ b/Lib/asyncio/tasks.py
@@ -230,7 +230,7 @@ def print_stack(self, *, limit=None, file=None):
"""
return base_tasks._task_print_stack(self, limit, file)
- def cancel(self):
+ def cancel(self, msg=None):
"""Request that this task cancel itself.
This arranges for a CancelledError to be thrown into the
@@ -254,13 +254,14 @@ def cancel(self):
if self.done():
return False
if self._fut_waiter is not None:
- if self._fut_waiter.cancel():
+ if self._fut_waiter.cancel(msg=msg):
# Leave self._fut_waiter; it may be a Task that
# catches and ignores the cancellation so we may have
# to cancel it again later.
return True
# It must be the case that self.__step is already scheduled.
self._must_cancel = True
+ self._cancel_message = msg
return True
def __step(self, exc=None):
@@ -269,7 +270,7 @@ def __step(self, exc=None):
f'_step(): already done: {self!r}, {exc!r}')
if self._must_cancel:
if not isinstance(exc, exceptions.CancelledError):
- exc = exceptions.CancelledError()
+ exc = self._make_cancelled_error()
self._must_cancel = False
coro = self._coro
self._fut_waiter = None
@@ -287,10 +288,12 @@ def __step(self, exc=None):
if self._must_cancel:
# Task is cancelled right before coro stops.
self._must_cancel = False
- super().cancel()
+ super().cancel(msg=self._cancel_message)
else:
super().set_result(exc.value)
- except exceptions.CancelledError:
+ except exceptions.CancelledError as exc:
+ # Save the original exception so we can chain it later.
+ self._cancelled_exc = exc
super().cancel() # I.e., Future.cancel(self).
except (KeyboardInterrupt, SystemExit) as exc:
super().set_exception(exc)
@@ -319,7 +322,8 @@ def __step(self, exc=None):
self.__wakeup, context=self._context)
self._fut_waiter = result
if self._must_cancel:
- if self._fut_waiter.cancel():
+ if self._fut_waiter.cancel(
+ msg=self._cancel_message):
self._must_cancel = False
else:
new_exc = RuntimeError(
@@ -496,7 +500,15 @@ async def wait_for(fut, timeout, *, loop=None):
# after wait_for() returns.
# See https://bugs.python.org/issue32751
await _cancel_and_wait(fut, loop=loop)
- raise exceptions.TimeoutError()
+ # In case task cancellation failed with some
+ # exception, we should re-raise it
+ # See https://bugs.python.org/issue40607
+ try:
+ fut.result()
+ except exceptions.CancelledError as exc:
+ raise exceptions.TimeoutError() from exc
+ else:
+ raise exceptions.TimeoutError()
finally:
timeout_handle.cancel()
@@ -708,12 +720,12 @@ def __init__(self, children, *, loop=None):
self._children = children
self._cancel_requested = False
- def cancel(self):
+ def cancel(self, msg=None):
if self.done():
return False
ret = False
for child in self._children:
- if child.cancel():
+ if child.cancel(msg=msg):
ret = True
if ret:
# If any child tasks were actually cancelled, we should
@@ -772,7 +784,7 @@ def _done_callback(fut):
# Check if 'fut' is cancelled first, as
# 'fut.exception()' will *raise* a CancelledError
# instead of returning it.
- exc = exceptions.CancelledError()
+ exc = fut._make_cancelled_error()
outer.set_exception(exc)
return
else:
@@ -788,10 +800,15 @@ def _done_callback(fut):
for fut in children:
if fut.cancelled():
- # Check if 'fut' is cancelled first, as
- # 'fut.exception()' will *raise* a CancelledError
- # instead of returning it.
- res = exceptions.CancelledError()
+ # Check if 'fut' is cancelled first, as 'fut.exception()'
+ # will *raise* a CancelledError instead of returning it.
+ # Also, since we're adding the exception return value
+ # to 'results' instead of raising it, don't bother
+ # setting __context__. This also lets us preserve
+ # calling '_make_cancelled_error()' at most once.
+ res = exceptions.CancelledError(
+ '' if fut._cancel_message is None else
+ fut._cancel_message)
else:
res = fut.exception()
if res is None:
@@ -802,7 +819,8 @@ def _done_callback(fut):
# If gather is being cancelled we must propagate the
# cancellation regardless of *return_exceptions* argument.
# See issue 32684.
- outer.set_exception(exceptions.CancelledError())
+ exc = fut._make_cancelled_error()
+ outer.set_exception(exc)
else:
outer.set_result(results)
diff --git a/Lib/asyncio/threads.py b/Lib/asyncio/threads.py
new file mode 100644
index 00000000000000..51e0ba95d822e5
--- /dev/null
+++ b/Lib/asyncio/threads.py
@@ -0,0 +1,25 @@
+"""High-level support for working with threads in asyncio"""
+
+import functools
+import contextvars
+
+from . import events
+
+
+__all__ = "to_thread",
+
+
+async def to_thread(func, /, *args, **kwargs):
+ """Asynchronously run function *func* in a separate thread.
+
+ Any *args and **kwargs supplied for this function are directly passed
+ to *func*. Also, the current :class:`contextvars.Context` is propogated,
+ allowing context variables from the main thread to be accessed in the
+ separate thread.
+
+ Return an asyncio.Future which represents the eventual result of *func*.
+ """
+ loop = events.get_running_loop()
+ ctx = contextvars.copy_context()
+ func_call = functools.partial(ctx.run, func, *args, **kwargs)
+ return await loop.run_in_executor(None, func_call)
diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py
index 19d713545e4cd1..f34a5b4b443736 100644
--- a/Lib/asyncio/unix_events.py
+++ b/Lib/asyncio/unix_events.py
@@ -101,7 +101,7 @@ def add_signal_handler(self, sig, callback, *args):
try:
# Register a dummy signal handler to ask Python to write the signal
- # number in the wakup file descriptor. _process_self_data() will
+ # number in the wakeup file descriptor. _process_self_data() will
# read signal numbers from this file descriptor to handle signals.
signal.signal(sig, _sighandler_noop)
diff --git a/Lib/asyncio/windows_events.py b/Lib/asyncio/windows_events.py
index ac51109ff1a83d..c07fe3241c569a 100644
--- a/Lib/asyncio/windows_events.py
+++ b/Lib/asyncio/windows_events.py
@@ -75,9 +75,9 @@ def _cancel_overlapped(self):
self._loop.call_exception_handler(context)
self._ov = None
- def cancel(self):
+ def cancel(self, msg=None):
self._cancel_overlapped()
- return super().cancel()
+ return super().cancel(msg=msg)
def set_exception(self, exception):
super().set_exception(exception)
@@ -149,9 +149,9 @@ def _unregister_wait(self):
self._unregister_wait_cb(None)
- def cancel(self):
+ def cancel(self, msg=None):
self._unregister_wait()
- return super().cancel()
+ return super().cancel(msg=msg)
def set_exception(self, exception):
self._unregister_wait()
diff --git a/Lib/bdb.py b/Lib/bdb.py
index 7b19ba3690362d..b18a0612d8c789 100644
--- a/Lib/bdb.py
+++ b/Lib/bdb.py
@@ -548,14 +548,7 @@ def format_stack_entry(self, frame_lineno, lprefix=': '):
s += frame.f_code.co_name
else:
s += ""
- if '__args__' in frame.f_locals:
- args = frame.f_locals['__args__']
- else:
- args = None
- if args:
- s += reprlib.repr(args)
- else:
- s += '()'
+ s += '()'
if '__return__' in frame.f_locals:
rv = frame.f_locals['__return__']
s += '->'
diff --git a/Lib/bz2.py b/Lib/bz2.py
index e094fbb548bc95..ce07ebeb142d92 100644
--- a/Lib/bz2.py
+++ b/Lib/bz2.py
@@ -12,7 +12,6 @@
from builtins import open as _builtin_open
import io
import os
-import warnings
import _compression
from threading import RLock
diff --git a/Lib/codecs.py b/Lib/codecs.py
index 21c45a7d10a4c9..7f23e9775df804 100644
--- a/Lib/codecs.py
+++ b/Lib/codecs.py
@@ -905,11 +905,16 @@ def open(filename, mode='r', encoding=None, errors='strict', buffering=-1):
file = builtins.open(filename, mode, buffering)
if encoding is None:
return file
- info = lookup(encoding)
- srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors)
- # Add attributes to simplify introspection
- srw.encoding = encoding
- return srw
+
+ try:
+ info = lookup(encoding)
+ srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors)
+ # Add attributes to simplify introspection
+ srw.encoding = encoding
+ return srw
+ except:
+ file.close()
+ raise
def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'):
diff --git a/Lib/codeop.py b/Lib/codeop.py
index 082285f94fe847..835e68c09ba272 100644
--- a/Lib/codeop.py
+++ b/Lib/codeop.py
@@ -112,7 +112,8 @@ def compile_command(source, filename="", symbol="single"):
source -- the source string; may contain \n characters
filename -- optional filename from which source was read; default
""
- symbol -- optional grammar start symbol; "single" (default) or "eval"
+ symbol -- optional grammar start symbol; "single" (default), "exec"
+ or "eval"
Return value / exceptions raised:
diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py
index cec6c9781a15e0..55fb46c9bc157f 100644
--- a/Lib/collections/__init__.py
+++ b/Lib/collections/__init__.py
@@ -39,6 +39,21 @@
pass
+def __getattr__(name):
+ # For backwards compatibility, continue to make the collections ABCs
+ # through Python 3.6 available through the collections module.
+ # Note, no new collections ABCs were added in Python 3.7
+ if name in _collections_abc.__all__:
+ obj = getattr(_collections_abc, name)
+ import warnings
+ warnings.warn("Using or importing the ABCs from 'collections' instead "
+ "of from 'collections.abc' is deprecated since Python 3.3, "
+ "and in 3.10 it will stop working",
+ DeprecationWarning, stacklevel=2)
+ globals()[name] = obj
+ return obj
+ raise AttributeError(f'module {__name__!r} has no attribute {name!r}')
+
################################################################################
### OrderedDict
################################################################################
@@ -278,6 +293,24 @@ def __eq__(self, other):
return dict.__eq__(self, other) and all(map(_eq, self, other))
return dict.__eq__(self, other)
+ def __ior__(self, other):
+ self.update(other)
+ return self
+
+ def __or__(self, other):
+ if not isinstance(other, dict):
+ return NotImplemented
+ new = self.__class__(self)
+ new.update(other)
+ return new
+
+ def __ror__(self, other):
+ if not isinstance(other, dict):
+ return NotImplemented
+ new = self.__class__(other)
+ new.update(self)
+ return new
+
try:
from _collections import OrderedDict
@@ -373,11 +406,9 @@ def namedtuple(typename, field_names, *, rename=False, defaults=None, module=Non
# Create all the named tuple methods to be added to the class namespace
- s = f'def __new__(_cls, {arg_list}): return _tuple_new(_cls, ({arg_list}))'
+ s = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'
namespace = {'_tuple_new': tuple_new, '__name__': f'namedtuple_{typename}'}
- # Note: exec() has the side-effect of interning the field names
- exec(s, namespace)
- __new__ = namespace['__new__']
+ __new__ = eval(s, namespace)
__new__.__doc__ = f'Create new instance of {typename}({arg_list})'
if defaults is not None:
__new__.__defaults__ = defaults
@@ -678,6 +709,25 @@ def __repr__(self):
#
# To strip negative and zero counts, add-in an empty counter:
# c += Counter()
+ #
+ # When the multiplicities are all zero or one, multiset operations
+ # are guaranteed to be equivalent to the corresponding operations
+ # for regular sets.
+ # Given counter multisets such as:
+ # cp = Counter(a=1, b=0, c=1)
+ # cq = Counter(c=1, d=0, e=1)
+ # The corresponding regular sets would be:
+ # sp = {'a', 'c'}
+ # sq = {'c', 'e'}
+ # All of the following relations would hold:
+ # set(cp + cq) == sp | sq
+ # set(cp - cq) == sp - sq
+ # set(cp | cq) == sp | sq
+ # set(cp & cq) == sp & sq
+ # cp.isequal(cq) == (sp == sq)
+ # cp.issubset(cq) == sp.issubset(sq)
+ # cp.issuperset(cq) == sp.issuperset(sq)
+ # cp.isdisjoint(cq) == sp.isdisjoint(sq)
def __add__(self, other):
'''Add counts from two counters.
@@ -836,6 +886,92 @@ def __iand__(self, other):
self[elem] = other_count
return self._keep_positive()
+ def isequal(self, other):
+ ''' Test whether counts agree exactly.
+
+ Negative or missing counts are treated as zero.
+
+ This is different than the inherited __eq__() method which
+ treats negative or missing counts as distinct from zero:
+
+ >>> Counter(a=1, b=0).isequal(Counter(a=1))
+ True
+ >>> Counter(a=1, b=0) == Counter(a=1)
+ False
+
+ Logically equivalent to: +self == +other
+ '''
+ if not isinstance(other, Counter):
+ other = Counter(other)
+ for elem in set(self) | set(other):
+ left = self[elem]
+ right = other[elem]
+ if left == right:
+ continue
+ if left < 0:
+ left = 0
+ if right < 0:
+ right = 0
+ if left != right:
+ return False
+ return True
+
+ def issubset(self, other):
+ '''True if the counts in self are less than or equal to those in other.
+
+ Negative or missing counts are treated as zero.
+
+ Logically equivalent to: not self - (+other)
+ '''
+ if not isinstance(other, Counter):
+ other = Counter(other)
+ for elem, count in self.items():
+ other_count = other[elem]
+ if other_count < 0:
+ other_count = 0
+ if count > other_count:
+ return False
+ return True
+
+ def issuperset(self, other):
+ '''True if the counts in self are greater than or equal to those in other.
+
+ Negative or missing counts are treated as zero.
+
+ Logically equivalent to: not other - (+self)
+ '''
+ if not isinstance(other, Counter):
+ other = Counter(other)
+ return other.issubset(self)
+
+ def isdisjoint(self, other):
+ '''True if none of the elements in self overlap with those in other.
+
+ Negative or missing counts are ignored.
+
+ Logically equivalent to: not (+self) & (+other)
+ '''
+ if not isinstance(other, Counter):
+ other = Counter(other)
+ for elem, count in self.items():
+ if count > 0 and other[elem] > 0:
+ return False
+ return True
+
+ # Rich comparison operators for multiset subset and superset tests
+ # have been deliberately omitted due to semantic conflicts with the
+ # existing inherited dict equality method. Subset and superset
+ # semantics ignore zero counts and require that p⊆q ∧ p⊇q ⇔ p=q;
+ # however, that would not be the case for p=Counter(a=1, b=0)
+ # and q=Counter(a=1) where the dictionaries are not equal.
+
+ def _omitted(self, other):
+ raise TypeError(
+ 'Rich comparison operators have been deliberately omitted. '
+ 'Use the isequal(), issubset(), and issuperset() methods instead.')
+
+ __lt__ = __le__ = __gt__ = __ge__ = __lt__ = _omitted
+
########################################################################
### ChainMap
@@ -946,6 +1082,25 @@ def clear(self):
'Clear maps[0], leaving maps[1:] intact.'
self.maps[0].clear()
+ def __ior__(self, other):
+ self.maps[0] |= other
+ return self
+
+ def __or__(self, other):
+ if isinstance(other, _collections_abc.Mapping):
+ m = self.maps[0].copy()
+ m.update(other)
+ return self.__class__(m, *self.maps[1:])
+ return NotImplemented
+
+ def __ror__(self, other):
+ if isinstance(other, _collections_abc.Mapping):
+ m = dict(other)
+ for child in reversed(self.maps):
+ m.update(child)
+ return self.__class__(m)
+ return NotImplemented
+
################################################################################
### UserDict
@@ -979,6 +1134,26 @@ def __contains__(self, key):
# Now, add the methods in dicts but not in MutableMapping
def __repr__(self): return repr(self.data)
+
+ def __or__(self, other):
+ if isinstance(other, UserDict):
+ return self.__class__(self.data | other.data)
+ if isinstance(other, dict):
+ return self.__class__(self.data | other)
+ return NotImplemented
+ def __ror__(self, other):
+ if isinstance(other, UserDict):
+ return self.__class__(other.data | self.data)
+ if isinstance(other, dict):
+ return self.__class__(other | self.data)
+ return NotImplemented
+ def __ior__(self, other):
+ if isinstance(other, UserDict):
+ self.data |= other.data
+ else:
+ self.data |= other
+ return self
+
def __copy__(self):
inst = self.__class__.__new__(self.__class__)
inst.__dict__.update(self.__dict__)
@@ -1167,6 +1342,14 @@ def count(self, sub, start=0, end=_sys.maxsize):
if isinstance(sub, UserString):
sub = sub.data
return self.data.count(sub, start, end)
+ def removeprefix(self, prefix, /):
+ if isinstance(prefix, UserString):
+ prefix = prefix.data
+ return self.__class__(self.data.removeprefix(prefix))
+ def removesuffix(self, suffix, /):
+ if isinstance(suffix, UserString):
+ suffix = suffix.data
+ return self.__class__(self.data.removesuffix(suffix))
def encode(self, encoding='utf-8', errors='strict'):
encoding = 'utf-8' if encoding is None else encoding
errors = 'strict' if errors is None else errors
diff --git a/Lib/compileall.py b/Lib/compileall.py
index 8cfde5b7b3e0aa..fe7f450c55e1c5 100644
--- a/Lib/compileall.py
+++ b/Lib/compileall.py
@@ -15,6 +15,7 @@
import importlib.util
import py_compile
import struct
+import filecmp
from functools import partial
from pathlib import Path
@@ -46,8 +47,8 @@ def _walk_dir(dir, maxlevels, quiet=0):
def compile_dir(dir, maxlevels=None, ddir=None, force=False,
rx=None, quiet=0, legacy=False, optimize=-1, workers=1,
- invalidation_mode=None, stripdir=None,
- prependdir=None, limit_sl_dest=None):
+ invalidation_mode=None, *, stripdir=None,
+ prependdir=None, limit_sl_dest=None, hardlink_dupes=False):
"""Byte-compile all modules in the given directory tree.
Arguments (only dir is required):
@@ -66,12 +67,20 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False,
workers: maximum number of parallel workers
invalidation_mode: how the up-to-dateness of the pyc will be checked
stripdir: part of path to left-strip from source file path
- prependdir: path to prepend to beggining of original file path, applied
+ prependdir: path to prepend to beginning of original file path, applied
after stripdir
limit_sl_dest: ignore symlinks if they are pointing outside of
the defined path
+ hardlink_dupes: hardlink duplicated pyc files
"""
ProcessPoolExecutor = None
+ if ddir is not None and (stripdir is not None or prependdir is not None):
+ raise ValueError(("Destination dir (ddir) cannot be used "
+ "in combination with stripdir or prependdir"))
+ if ddir is not None:
+ stripdir = dir
+ prependdir = ddir
+ ddir = None
if workers < 0:
raise ValueError('workers must be greater or equal to 0')
if workers != 1:
@@ -97,7 +106,8 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False,
invalidation_mode=invalidation_mode,
stripdir=stripdir,
prependdir=prependdir,
- limit_sl_dest=limit_sl_dest),
+ limit_sl_dest=limit_sl_dest,
+ hardlink_dupes=hardlink_dupes),
files)
success = min(results, default=True)
else:
@@ -105,14 +115,15 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False,
if not compile_file(file, ddir, force, rx, quiet,
legacy, optimize, invalidation_mode,
stripdir=stripdir, prependdir=prependdir,
- limit_sl_dest=limit_sl_dest):
+ limit_sl_dest=limit_sl_dest,
+ hardlink_dupes=hardlink_dupes):
success = False
return success
def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
legacy=False, optimize=-1,
- invalidation_mode=None, stripdir=None, prependdir=None,
- limit_sl_dest=None):
+ invalidation_mode=None, *, stripdir=None, prependdir=None,
+ limit_sl_dest=None, hardlink_dupes=False):
"""Byte-compile one file.
Arguments (only fullname is required):
@@ -129,10 +140,11 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
files each with one optimization level.
invalidation_mode: how the up-to-dateness of the pyc will be checked
stripdir: part of path to left-strip from source file path
- prependdir: path to prepend to beggining of original file path, applied
+ prependdir: path to prepend to beginning of original file path, applied
after stripdir
limit_sl_dest: ignore symlinks if they are pointing outside of
the defined path.
+ hardlink_dupes: hardlink duplicated pyc files
"""
if ddir is not None and (stripdir is not None or prependdir is not None):
@@ -169,6 +181,14 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
if isinstance(optimize, int):
optimize = [optimize]
+ # Use set() to remove duplicates.
+ # Use sorted() to create pyc files in a deterministic order.
+ optimize = sorted(set(optimize))
+
+ if hardlink_dupes and len(optimize) < 2:
+ raise ValueError("Hardlinking of duplicated bytecode makes sense "
+ "only for more than one optimization level")
+
if rx is not None:
mo = rx.search(fullname)
if mo:
@@ -213,10 +233,16 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
if not quiet:
print('Compiling {!r}...'.format(fullname))
try:
- for opt_level, cfile in opt_cfiles.items():
+ for index, opt_level in enumerate(optimize):
+ cfile = opt_cfiles[opt_level]
ok = py_compile.compile(fullname, cfile, dfile, True,
optimize=opt_level,
invalidation_mode=invalidation_mode)
+ if index > 0 and hardlink_dupes:
+ previous_cfile = opt_cfiles[optimize[index - 1]]
+ if filecmp.cmp(cfile, previous_cfile, shallow=False):
+ os.unlink(cfile)
+ os.link(previous_cfile, cfile)
except py_compile.PyCompileError as err:
success = False
if quiet >= 2:
@@ -345,6 +371,9 @@ def main():
'Python interpreter itself (specified by -O).'))
parser.add_argument('-e', metavar='DIR', dest='limit_sl_dest',
help='Ignore symlinks pointing outsite of the DIR')
+ parser.add_argument('--hardlink-dupes', action='store_true',
+ dest='hardlink_dupes',
+ help='Hardlink duplicated pyc files')
args = parser.parse_args()
compile_dests = args.compile_dest
@@ -364,6 +393,10 @@ def main():
if args.opt_levels is None:
args.opt_levels = [-1]
+ if len(args.opt_levels) == 1 and args.hardlink_dupes:
+ parser.error(("Hardlinking of duplicated bytecode makes sense "
+ "only for more than one optimization level."))
+
if args.ddir is not None and (
args.stripdir is not None or args.prependdir is not None
):
@@ -397,7 +430,8 @@ def main():
stripdir=args.stripdir,
prependdir=args.prependdir,
optimize=args.opt_levels,
- limit_sl_dest=args.limit_sl_dest):
+ limit_sl_dest=args.limit_sl_dest,
+ hardlink_dupes=args.hardlink_dupes):
success = False
else:
if not compile_dir(dest, maxlevels, args.ddir,
@@ -407,7 +441,8 @@ def main():
stripdir=args.stripdir,
prependdir=args.prependdir,
optimize=args.opt_levels,
- limit_sl_dest=args.limit_sl_dest):
+ limit_sl_dest=args.limit_sl_dest,
+ hardlink_dupes=args.hardlink_dupes):
success = False
return success
else:
diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py
index fd0acec55d0460..bf546f8ae1d1cc 100644
--- a/Lib/concurrent/futures/_base.py
+++ b/Lib/concurrent/futures/_base.py
@@ -7,6 +7,7 @@
import logging
import threading
import time
+import types
FIRST_COMPLETED = 'FIRST_COMPLETED'
FIRST_EXCEPTION = 'FIRST_EXCEPTION'
@@ -544,6 +545,8 @@ def set_exception(self, exception):
self._condition.notify_all()
self._invoke_callbacks()
+ __class_getitem__ = classmethod(types.GenericAlias)
+
class Executor(object):
"""This is an abstract base class for concrete asynchronous executors."""
diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py
index fd9f572b6c7116..90bc98bf2ecd17 100644
--- a/Lib/concurrent/futures/process.py
+++ b/Lib/concurrent/futures/process.py
@@ -45,11 +45,9 @@
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
-import atexit
import os
from concurrent.futures import _base
import queue
-from queue import Full
import multiprocessing as mp
import multiprocessing.connection
from multiprocessing.queues import Queue
@@ -60,19 +58,6 @@
import sys
import traceback
-# Workers are created as daemon threads and processes. This is done to allow the
-# interpreter to exit when there are still idle processes in a
-# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However,
-# allowing workers to die with the interpreter has two undesirable properties:
-# - The workers would still be running during interpreter shutdown,
-# meaning that they would fail in unpredictable ways.
-# - The workers could be killed while evaluating a work item, which could
-# be bad if the callable being evaluated has external side-effects e.g.
-# writing to a file.
-#
-# To work around this problem, an exit handler is installed which tells the
-# workers to exit when their work queues are empty and then waits until the
-# threads/processes finish.
_threads_wakeups = weakref.WeakKeyDictionary()
_global_shutdown = False
@@ -80,18 +65,23 @@
class _ThreadWakeup:
def __init__(self):
+ self._closed = False
self._reader, self._writer = mp.Pipe(duplex=False)
def close(self):
- self._writer.close()
- self._reader.close()
+ if not self._closed:
+ self._closed = True
+ self._writer.close()
+ self._reader.close()
def wakeup(self):
- self._writer.send_bytes(b"")
+ if not self._closed:
+ self._writer.send_bytes(b"")
def clear(self):
- while self._reader.poll():
- self._reader.recv_bytes()
+ if not self._closed:
+ while self._reader.poll():
+ self._reader.recv_bytes()
def _python_exit():
@@ -99,10 +89,17 @@ def _python_exit():
_global_shutdown = True
items = list(_threads_wakeups.items())
for _, thread_wakeup in items:
+ # call not protected by ProcessPoolExecutor._shutdown_lock
thread_wakeup.wakeup()
for t, _ in items:
t.join()
+# Register for `_python_exit()` to be called just before joining all
+# non-daemon threads. This is used instead of `atexit.register()` for
+# compatibility with subinterpreters, which no longer support daemon threads.
+# See bpo-39812 for context.
+threading._register_atexit(_python_exit)
+
# Controls how many more calls than processes will be queued in the call queue.
# A smaller number will mean that processes spend more time idle waiting for
# work while a larger number will make Future.cancel() succeed less frequently
@@ -160,8 +157,11 @@ def __init__(self, work_id, fn, args, kwargs):
class _SafeQueue(Queue):
"""Safe Queue set exception to the future object linked to a job"""
- def __init__(self, max_size=0, *, ctx, pending_work_items):
+ def __init__(self, max_size=0, *, ctx, pending_work_items, shutdown_lock,
+ thread_wakeup):
self.pending_work_items = pending_work_items
+ self.shutdown_lock = shutdown_lock
+ self.thread_wakeup = thread_wakeup
super().__init__(max_size, ctx=ctx)
def _on_queue_feeder_error(self, e, obj):
@@ -169,8 +169,11 @@ def _on_queue_feeder_error(self, e, obj):
tb = traceback.format_exception(type(e), e, e.__traceback__)
e.__cause__ = _RemoteTraceback('\n"""\n{}"""'.format(''.join(tb)))
work_item = self.pending_work_items.pop(obj.work_id, None)
- # work_item can be None if another process terminated. In this case,
- # the queue_manager_thread fails all work_items with BrokenProcessPool
+ with self.shutdown_lock:
+ self.thread_wakeup.wakeup()
+ # work_item can be None if another process terminated. In this
+ # case, the executor_manager_thread fails all work_items
+ # with BrokenProcessPool
if work_item is not None:
work_item.future.set_exception(e)
else:
@@ -186,6 +189,7 @@ def _get_chunks(*iterables, chunksize):
return
yield chunk
+
def _process_chunk(fn, chunk):
""" Processes a chunk of an iterable passed to map.
@@ -249,120 +253,132 @@ def _process_worker(call_queue, result_queue, initializer, initargs):
del call_item
-def _add_call_item_to_queue(pending_work_items,
- work_ids,
- call_queue):
- """Fills call_queue with _WorkItems from pending_work_items.
+class _ExecutorManagerThread(threading.Thread):
+ """Manages the communication between this process and the worker processes.
- This function never blocks.
+ The manager is run in a local thread.
Args:
- pending_work_items: A dict mapping work ids to _WorkItems e.g.
- {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
- work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
- are consumed and the corresponding _WorkItems from
- pending_work_items are transformed into _CallItems and put in
- call_queue.
- call_queue: A multiprocessing.Queue that will be filled with _CallItems
- derived from _WorkItems.
+ executor: A reference to the ProcessPoolExecutor that owns
+ this thread. A weakref will be own by the manager as well as
+ references to internal objects used to introspect the state of
+ the executor.
"""
- while True:
- if call_queue.full():
- return
- try:
- work_id = work_ids.get(block=False)
- except queue.Empty:
- return
- else:
- work_item = pending_work_items[work_id]
-
- if work_item.future.set_running_or_notify_cancel():
- call_queue.put(_CallItem(work_id,
- work_item.fn,
- work_item.args,
- work_item.kwargs),
- block=True)
- else:
- del pending_work_items[work_id]
- continue
+ def __init__(self, executor):
+ # Store references to necessary internals of the executor.
+
+ # A _ThreadWakeup to allow waking up the queue_manager_thread from the
+ # main Thread and avoid deadlocks caused by permanently locked queues.
+ self.thread_wakeup = executor._executor_manager_thread_wakeup
+ self.shutdown_lock = executor._shutdown_lock
+
+ # A weakref.ref to the ProcessPoolExecutor that owns this thread. Used
+ # to determine if the ProcessPoolExecutor has been garbage collected
+ # and that the manager can exit.
+ # When the executor gets garbage collected, the weakref callback
+ # will wake up the queue management thread so that it can terminate
+ # if there is no pending work item.
+ def weakref_cb(_,
+ thread_wakeup=self.thread_wakeup,
+ shutdown_lock=self.shutdown_lock):
+ mp.util.debug('Executor collected: triggering callback for'
+ ' QueueManager wakeup')
+ with shutdown_lock:
+ thread_wakeup.wakeup()
-def _queue_management_worker(executor_reference,
- processes,
- pending_work_items,
- work_ids_queue,
- call_queue,
- result_queue,
- thread_wakeup):
- """Manages the communication between this process and the worker processes.
+ self.executor_reference = weakref.ref(executor, weakref_cb)
- This function is run in a local thread.
+ # A list of the ctx.Process instances used as workers.
+ self.processes = executor._processes
- Args:
- executor_reference: A weakref.ref to the ProcessPoolExecutor that owns
- this thread. Used to determine if the ProcessPoolExecutor has been
- garbage collected and that this function can exit.
- process: A list of the ctx.Process instances used as
- workers.
- pending_work_items: A dict mapping work ids to _WorkItems e.g.
- {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
- work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]).
- call_queue: A ctx.Queue that will be filled with _CallItems
- derived from _WorkItems for processing by the process workers.
- result_queue: A ctx.SimpleQueue of _ResultItems generated by the
- process workers.
- thread_wakeup: A _ThreadWakeup to allow waking up the
- queue_manager_thread from the main Thread and avoid deadlocks
- caused by permanently locked queues.
- """
- executor = None
+ # A ctx.Queue that will be filled with _CallItems derived from
+ # _WorkItems for processing by the process workers.
+ self.call_queue = executor._call_queue
- def shutting_down():
- return (_global_shutdown or executor is None
- or executor._shutdown_thread)
+ # A ctx.SimpleQueue of _ResultItems generated by the process workers.
+ self.result_queue = executor._result_queue
- def shutdown_worker():
- # This is an upper bound on the number of children alive.
- n_children_alive = sum(p.is_alive() for p in processes.values())
- n_children_to_stop = n_children_alive
- n_sentinels_sent = 0
- # Send the right number of sentinels, to make sure all children are
- # properly terminated.
- while n_sentinels_sent < n_children_to_stop and n_children_alive > 0:
- for i in range(n_children_to_stop - n_sentinels_sent):
- try:
- call_queue.put_nowait(None)
- n_sentinels_sent += 1
- except Full:
- break
- n_children_alive = sum(p.is_alive() for p in processes.values())
+ # A queue.Queue of work ids e.g. Queue([5, 6, ...]).
+ self.work_ids_queue = executor._work_ids
- # Release the queue's resources as soon as possible.
- call_queue.close()
- # If .join() is not called on the created processes then
- # some ctx.Queue methods may deadlock on Mac OS X.
- for p in processes.values():
- p.join()
+ # A dict mapping work ids to _WorkItems e.g.
+ # {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
+ self.pending_work_items = executor._pending_work_items
- result_reader = result_queue._reader
- wakeup_reader = thread_wakeup._reader
- readers = [result_reader, wakeup_reader]
+ super().__init__()
- while True:
- _add_call_item_to_queue(pending_work_items,
- work_ids_queue,
- call_queue)
+ def run(self):
+ # Main loop for the executor manager thread.
+
+ while True:
+ self.add_call_item_to_queue()
+
+ result_item, is_broken, cause = self.wait_result_broken_or_wakeup()
+
+ if is_broken:
+ self.terminate_broken(cause)
+ return
+ if result_item is not None:
+ self.process_result_item(result_item)
+ # Delete reference to result_item to avoid keeping references
+ # while waiting on new results.
+ del result_item
+
+ # attempt to increment idle process count
+ executor = self.executor_reference()
+ if executor is not None:
+ executor._idle_worker_semaphore.release()
+ del executor
+
+ if self.is_shutting_down():
+ self.flag_executor_shutting_down()
+ # Since no new work items can be added, it is safe to shutdown
+ # this thread if there are no pending work items.
+ if not self.pending_work_items:
+ self.join_executor_internals()
+ return
+
+ def add_call_item_to_queue(self):
+ # Fills call_queue with _WorkItems from pending_work_items.
+ # This function never blocks.
+ while True:
+ if self.call_queue.full():
+ return
+ try:
+ work_id = self.work_ids_queue.get(block=False)
+ except queue.Empty:
+ return
+ else:
+ work_item = self.pending_work_items[work_id]
+
+ if work_item.future.set_running_or_notify_cancel():
+ self.call_queue.put(_CallItem(work_id,
+ work_item.fn,
+ work_item.args,
+ work_item.kwargs),
+ block=True)
+ else:
+ del self.pending_work_items[work_id]
+ continue
+
+ def wait_result_broken_or_wakeup(self):
# Wait for a result to be ready in the result_queue while checking
# that all worker processes are still running, or for a wake up
# signal send. The wake up signals come either from new tasks being
# submitted, from the executor being shutdown/gc-ed, or from the
# shutdown of the python interpreter.
- worker_sentinels = [p.sentinel for p in processes.values()]
+ result_reader = self.result_queue._reader
+ assert not self.thread_wakeup._closed
+ wakeup_reader = self.thread_wakeup._reader
+ readers = [result_reader, wakeup_reader]
+ worker_sentinels = [p.sentinel for p in self.processes.values()]
ready = mp.connection.wait(readers + worker_sentinels)
cause = None
is_broken = True
+ result_item = None
if result_reader in ready:
try:
result_item = result_reader.recv()
@@ -372,97 +388,138 @@ def shutdown_worker():
elif wakeup_reader in ready:
is_broken = False
- result_item = None
- thread_wakeup.clear()
- if is_broken:
- # Mark the process pool broken so that submits fail right now.
- executor = executor_reference()
- if executor is not None:
- executor._broken = ('A child process terminated '
- 'abruptly, the process pool is not '
- 'usable anymore')
- executor._shutdown_thread = True
- executor = None
- bpe = BrokenProcessPool("A process in the process pool was "
- "terminated abruptly while the future was "
- "running or pending.")
- if cause is not None:
- bpe.__cause__ = _RemoteTraceback(
- f"\n'''\n{''.join(cause)}'''")
- # All futures in flight must be marked failed
- for work_id, work_item in pending_work_items.items():
- work_item.future.set_exception(bpe)
- # Delete references to object. See issue16284
- del work_item
- pending_work_items.clear()
- # Terminate remaining workers forcibly: the queues or their
- # locks may be in a dirty state and block forever.
- for p in processes.values():
- p.terminate()
- shutdown_worker()
- return
+
+ with self.shutdown_lock:
+ self.thread_wakeup.clear()
+
+ return result_item, is_broken, cause
+
+ def process_result_item(self, result_item):
+ # Process the received a result_item. This can be either the PID of a
+ # worker that exited gracefully or a _ResultItem
+
if isinstance(result_item, int):
# Clean shutdown of a worker using its PID
# (avoids marking the executor broken)
- assert shutting_down()
- p = processes.pop(result_item)
+ assert self.is_shutting_down()
+ p = self.processes.pop(result_item)
p.join()
- if not processes:
- shutdown_worker()
+ if not self.processes:
+ self.join_executor_internals()
return
- elif result_item is not None:
- work_item = pending_work_items.pop(result_item.work_id, None)
+ else:
+ # Received a _ResultItem so mark the future as completed.
+ work_item = self.pending_work_items.pop(result_item.work_id, None)
# work_item can be None if another process terminated (see above)
if work_item is not None:
if result_item.exception:
work_item.future.set_exception(result_item.exception)
else:
work_item.future.set_result(result_item.result)
- # Delete references to object. See issue16284
- del work_item
- # Delete reference to result_item
- del result_item
- # Check whether we should start shutting down.
- executor = executor_reference()
+ def is_shutting_down(self):
+ # Check whether we should start shutting down the executor.
+ executor = self.executor_reference()
# No more work items can be added if:
# - The interpreter is shutting down OR
# - The executor that owns this worker has been collected OR
# - The executor that owns this worker has been shutdown.
- if shutting_down():
- try:
- # Flag the executor as shutting down as early as possible if it
- # is not gc-ed yet.
- if executor is not None:
- executor._shutdown_thread = True
- # Unless there are pending work items, we have nothing to cancel.
- if pending_work_items and executor._cancel_pending_futures:
- # Cancel all pending futures and update pending_work_items
- # to only have futures that are currently running.
- new_pending_work_items = {}
- for work_id, work_item in pending_work_items.items():
- if not work_item.future.cancel():
- new_pending_work_items[work_id] = work_item
-
- pending_work_items = new_pending_work_items
- # Drain work_ids_queue since we no longer need to
- # add items to the call queue.
- while True:
- try:
- work_ids_queue.get_nowait()
- except queue.Empty:
- break
+ return (_global_shutdown or executor is None
+ or executor._shutdown_thread)
- # Since no new work items can be added, it is safe to shutdown
- # this thread if there are no pending work items.
- if not pending_work_items:
- shutdown_worker()
- return
- except Full:
- # This is not a problem: we will eventually be woken up (in
- # result_queue.get()) and be able to send a sentinel again.
- pass
- executor = None
+ def terminate_broken(self, cause):
+ # Terminate the executor because it is in a broken state. The cause
+ # argument can be used to display more information on the error that
+ # lead the executor into becoming broken.
+
+ # Mark the process pool broken so that submits fail right now.
+ executor = self.executor_reference()
+ if executor is not None:
+ executor._broken = ('A child process terminated '
+ 'abruptly, the process pool is not '
+ 'usable anymore')
+ executor._shutdown_thread = True
+ executor = None
+
+ # All pending tasks are to be marked failed with the following
+ # BrokenProcessPool error
+ bpe = BrokenProcessPool("A process in the process pool was "
+ "terminated abruptly while the future was "
+ "running or pending.")
+ if cause is not None:
+ bpe.__cause__ = _RemoteTraceback(
+ f"\n'''\n{''.join(cause)}'''")
+
+ # Mark pending tasks as failed.
+ for work_id, work_item in self.pending_work_items.items():
+ work_item.future.set_exception(bpe)
+ # Delete references to object. See issue16284
+ del work_item
+ self.pending_work_items.clear()
+
+ # Terminate remaining workers forcibly: the queues or their
+ # locks may be in a dirty state and block forever.
+ for p in self.processes.values():
+ p.terminate()
+
+ # clean up resources
+ self.join_executor_internals()
+
+ def flag_executor_shutting_down(self):
+ # Flag the executor as shutting down and cancel remaining tasks if
+ # requested as early as possible if it is not gc-ed yet.
+ executor = self.executor_reference()
+ if executor is not None:
+ executor._shutdown_thread = True
+ # Cancel pending work items if requested.
+ if executor._cancel_pending_futures:
+ # Cancel all pending futures and update pending_work_items
+ # to only have futures that are currently running.
+ new_pending_work_items = {}
+ for work_id, work_item in self.pending_work_items.items():
+ if not work_item.future.cancel():
+ new_pending_work_items[work_id] = work_item
+ self.pending_work_items = new_pending_work_items
+ # Drain work_ids_queue since we no longer need to
+ # add items to the call queue.
+ while True:
+ try:
+ self.work_ids_queue.get_nowait()
+ except queue.Empty:
+ break
+ # Make sure we do this only once to not waste time looping
+ # on running processes over and over.
+ executor._cancel_pending_futures = False
+
+ def shutdown_workers(self):
+ n_children_to_stop = self.get_n_children_alive()
+ n_sentinels_sent = 0
+ # Send the right number of sentinels, to make sure all children are
+ # properly terminated.
+ while (n_sentinels_sent < n_children_to_stop
+ and self.get_n_children_alive() > 0):
+ for i in range(n_children_to_stop - n_sentinels_sent):
+ try:
+ self.call_queue.put_nowait(None)
+ n_sentinels_sent += 1
+ except queue.Full:
+ break
+
+ def join_executor_internals(self):
+ self.shutdown_workers()
+ # Release the queue's resources as soon as possible.
+ self.call_queue.close()
+ self.call_queue.join_thread()
+ with self.shutdown_lock:
+ self.thread_wakeup.close()
+ # If .join() is not called on the created processes then
+ # some ctx.Queue methods may deadlock on Mac OS X.
+ for p in self.processes.values():
+ p.join()
+
+ def get_n_children_alive(self):
+ # This is an upper bound on the number of children alive.
+ return sum(p.is_alive() for p in self.processes.values())
_system_limits_checked = False
@@ -553,7 +610,7 @@ def __init__(self, max_workers=None, mp_context=None,
self._initargs = initargs
# Management thread
- self._queue_management_thread = None
+ self._executor_manager_thread = None
# Map of pids to processes
self._processes = {}
@@ -561,11 +618,22 @@ def __init__(self, max_workers=None, mp_context=None,
# Shutdown is a two-step process.
self._shutdown_thread = False
self._shutdown_lock = threading.Lock()
+ self._idle_worker_semaphore = threading.Semaphore(0)
self._broken = False
self._queue_count = 0
self._pending_work_items = {}
self._cancel_pending_futures = False
+ # _ThreadWakeup is a communication channel used to interrupt the wait
+ # of the main loop of executor_manager_thread from another thread (e.g.
+ # when calling executor.submit or executor.shutdown). We do not use the
+ # _result_queue to send wakeup signals to the executor_manager_thread
+ # as it could result in a deadlock if a worker process dies with the
+ # _result_queue write lock still acquired.
+ #
+ # _shutdown_lock must be locked to access _ThreadWakeup.
+ self._executor_manager_thread_wakeup = _ThreadWakeup()
+
# Create communication channels for the executor
# Make the call queue slightly larger than the number of processes to
# prevent the worker processes from idling. But don't make it too big
@@ -573,7 +641,9 @@ def __init__(self, max_workers=None, mp_context=None,
queue_size = self._max_workers + EXTRA_QUEUED_CALLS
self._call_queue = _SafeQueue(
max_size=queue_size, ctx=self._mp_context,
- pending_work_items=self._pending_work_items)
+ pending_work_items=self._pending_work_items,
+ shutdown_lock=self._shutdown_lock,
+ thread_wakeup=self._executor_manager_thread_wakeup)
# Killed worker processes can produce spurious "broken pipe"
# tracebacks in the queue's own worker thread. But we detect killed
# processes anyway, so silence the tracebacks.
@@ -581,43 +651,21 @@ def __init__(self, max_workers=None, mp_context=None,
self._result_queue = mp_context.SimpleQueue()
self._work_ids = queue.Queue()
- # _ThreadWakeup is a communication channel used to interrupt the wait
- # of the main loop of queue_manager_thread from another thread (e.g.
- # when calling executor.submit or executor.shutdown). We do not use the
- # _result_queue to send the wakeup signal to the queue_manager_thread
- # as it could result in a deadlock if a worker process dies with the
- # _result_queue write lock still acquired.
- self._queue_management_thread_wakeup = _ThreadWakeup()
-
- def _start_queue_management_thread(self):
- if self._queue_management_thread is None:
- # When the executor gets garbarge collected, the weakref callback
- # will wake up the queue management thread so that it can terminate
- # if there is no pending work item.
- def weakref_cb(_,
- thread_wakeup=self._queue_management_thread_wakeup):
- mp.util.debug('Executor collected: triggering callback for'
- ' QueueManager wakeup')
- thread_wakeup.wakeup()
+ def _start_executor_manager_thread(self):
+ if self._executor_manager_thread is None:
# Start the processes so that their sentinels are known.
- self._adjust_process_count()
- self._queue_management_thread = threading.Thread(
- target=_queue_management_worker,
- args=(weakref.ref(self, weakref_cb),
- self._processes,
- self._pending_work_items,
- self._work_ids,
- self._call_queue,
- self._result_queue,
- self._queue_management_thread_wakeup),
- name="QueueManagerThread")
- self._queue_management_thread.daemon = True
- self._queue_management_thread.start()
- _threads_wakeups[self._queue_management_thread] = \
- self._queue_management_thread_wakeup
+ self._executor_manager_thread = _ExecutorManagerThread(self)
+ self._executor_manager_thread.start()
+ _threads_wakeups[self._executor_manager_thread] = \
+ self._executor_manager_thread_wakeup
def _adjust_process_count(self):
- for _ in range(len(self._processes), self._max_workers):
+ # if there's an idle process, we don't need to spawn a new one.
+ if self._idle_worker_semaphore.acquire(blocking=False):
+ return
+
+ process_count = len(self._processes)
+ if process_count < self._max_workers:
p = self._mp_context.Process(
target=_process_worker,
args=(self._call_queue,
@@ -644,9 +692,10 @@ def submit(self, fn, /, *args, **kwargs):
self._work_ids.put(self._queue_count)
self._queue_count += 1
# Wake up queue management thread
- self._queue_management_thread_wakeup.wakeup()
+ self._executor_manager_thread_wakeup.wakeup()
- self._start_queue_management_thread()
+ self._adjust_process_count()
+ self._start_executor_manager_thread()
return f
submit.__doc__ = _base.Executor.submit.__doc__
@@ -683,27 +732,20 @@ def shutdown(self, wait=True, *, cancel_futures=False):
with self._shutdown_lock:
self._cancel_pending_futures = cancel_futures
self._shutdown_thread = True
+ if self._executor_manager_thread_wakeup is not None:
+ # Wake up queue management thread
+ self._executor_manager_thread_wakeup.wakeup()
- if self._queue_management_thread:
- # Wake up queue management thread
- self._queue_management_thread_wakeup.wakeup()
- if wait:
- self._queue_management_thread.join()
+ if self._executor_manager_thread is not None and wait:
+ self._executor_manager_thread.join()
# To reduce the risk of opening too many files, remove references to
# objects that use file descriptors.
- self._queue_management_thread = None
- if self._call_queue is not None:
- self._call_queue.close()
- if wait:
- self._call_queue.join_thread()
- self._call_queue = None
+ self._executor_manager_thread = None
+ self._call_queue = None
+ if self._result_queue is not None and wait:
+ self._result_queue.close()
self._result_queue = None
self._processes = None
-
- if self._queue_management_thread_wakeup:
- self._queue_management_thread_wakeup.close()
- self._queue_management_thread_wakeup = None
+ self._executor_manager_thread_wakeup = None
shutdown.__doc__ = _base.Executor.shutdown.__doc__
-
-atexit.register(_python_exit)
diff --git a/Lib/concurrent/futures/thread.py b/Lib/concurrent/futures/thread.py
index be79161bf8561d..b7a2cac7f57015 100644
--- a/Lib/concurrent/futures/thread.py
+++ b/Lib/concurrent/futures/thread.py
@@ -5,27 +5,14 @@
__author__ = 'Brian Quinlan (brian@sweetapp.com)'
-import atexit
from concurrent.futures import _base
import itertools
import queue
import threading
+import types
import weakref
import os
-# Workers are created as daemon threads. This is done to allow the interpreter
-# to exit when there are still idle threads in a ThreadPoolExecutor's thread
-# pool (i.e. shutdown() was not called). However, allowing workers to die with
-# the interpreter has two undesirable properties:
-# - The workers would still be running during interpreter shutdown,
-# meaning that they would fail in unpredictable ways.
-# - The workers could be killed while evaluating a work item, which could
-# be bad if the callable being evaluated has external side-effects e.g.
-# writing to a file.
-#
-# To work around this problem, an exit handler is installed which tells the
-# workers to exit when their work queues are empty and then waits until the
-# threads finish.
_threads_queues = weakref.WeakKeyDictionary()
_shutdown = False
@@ -43,7 +30,11 @@ def _python_exit():
for t, q in items:
t.join()
-atexit.register(_python_exit)
+# Register for `_python_exit()` to be called just before joining all
+# non-daemon threads. This is used instead of `atexit.register()` for
+# compatibility with subinterpreters, which no longer support daemon threads.
+# See bpo-39812 for context.
+threading._register_atexit(_python_exit)
class _WorkItem(object):
@@ -66,6 +57,8 @@ def run(self):
else:
self.future.set_result(result)
+ __class_getitem__ = classmethod(types.GenericAlias)
+
def _worker(executor_reference, work_queue, initializer, initargs):
if initializer is not None:
@@ -197,7 +190,6 @@ def weakref_cb(_, q=self._work_queue):
self._work_queue,
self._initializer,
self._initargs))
- t.daemon = True
t.start()
self._threads.add(t)
_threads_queues[t] = self._work_queue
diff --git a/Lib/contextlib.py b/Lib/contextlib.py
index 69c272831a55c2..ff92d9f913f4c2 100644
--- a/Lib/contextlib.py
+++ b/Lib/contextlib.py
@@ -4,7 +4,7 @@
import _collections_abc
from collections import deque
from functools import wraps
-from types import MethodType
+from types import MethodType, GenericAlias
__all__ = ["asynccontextmanager", "contextmanager", "closing", "nullcontext",
"AbstractContextManager", "AbstractAsyncContextManager",
@@ -16,6 +16,8 @@ class AbstractContextManager(abc.ABC):
"""An abstract base class for context managers."""
+ __class_getitem__ = classmethod(GenericAlias)
+
def __enter__(self):
"""Return `self` upon entering the runtime context."""
return self
@@ -36,6 +38,8 @@ class AbstractAsyncContextManager(abc.ABC):
"""An abstract base class for asynchronous context managers."""
+ __class_getitem__ = classmethod(GenericAlias)
+
async def __aenter__(self):
"""Return `self` upon entering the runtime context."""
return self
diff --git a/Lib/copy.py b/Lib/copy.py
index 41873f2c046cac..dd41c54dffe1d7 100644
--- a/Lib/copy.py
+++ b/Lib/copy.py
@@ -192,6 +192,7 @@ def _deepcopy_atomic(x, memo):
d[str] = _deepcopy_atomic
d[types.CodeType] = _deepcopy_atomic
d[type] = _deepcopy_atomic
+d[range] = _deepcopy_atomic
d[types.BuiltinFunctionType] = _deepcopy_atomic
d[types.FunctionType] = _deepcopy_atomic
d[weakref.ref] = _deepcopy_atomic
diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py
index 8f0991147d72f2..4afa4ebd422493 100644
--- a/Lib/ctypes/__init__.py
+++ b/Lib/ctypes/__init__.py
@@ -1,6 +1,7 @@
"""create and manipulate C data types in Python"""
import os as _os, sys as _sys
+import types as _types
__version__ = "1.1.0"
@@ -450,6 +451,8 @@ def __getitem__(self, name):
def LoadLibrary(self, name):
return self._dlltype(name)
+ __class_getitem__ = classmethod(_types.GenericAlias)
+
cdll = LibraryLoader(CDLL)
pydll = LibraryLoader(PyDLL)
diff --git a/Lib/ctypes/test/test_callbacks.py b/Lib/ctypes/test/test_callbacks.py
index f622093df61da5..d8e9c5a760e2c2 100644
--- a/Lib/ctypes/test/test_callbacks.py
+++ b/Lib/ctypes/test/test_callbacks.py
@@ -1,5 +1,7 @@
import functools
import unittest
+from test import support
+
from ctypes import *
from ctypes.test import need_symbol
import _ctypes_test
@@ -287,7 +289,36 @@ def callback(check, s):
self.assertEqual(s.second, check.second)
self.assertEqual(s.third, check.third)
-################################################################
+ def test_callback_too_many_args(self):
+ def func(*args):
+ return len(args)
+
+ CTYPES_MAX_ARGCOUNT = 1024
+ proto = CFUNCTYPE(c_int, *(c_int,) * CTYPES_MAX_ARGCOUNT)
+ cb = proto(func)
+ args1 = (1,) * CTYPES_MAX_ARGCOUNT
+ self.assertEqual(cb(*args1), CTYPES_MAX_ARGCOUNT)
+
+ args2 = (1,) * (CTYPES_MAX_ARGCOUNT + 1)
+ with self.assertRaises(ArgumentError):
+ cb(*args2)
+
+ def test_convert_result_error(self):
+ def func():
+ return ("tuple",)
+
+ proto = CFUNCTYPE(c_int)
+ ctypes_func = proto(func)
+ with support.catch_unraisable_exception() as cm:
+ # don't test the result since it is an uninitialized value
+ result = ctypes_func()
+
+ self.assertIsInstance(cm.unraisable.exc_value, TypeError)
+ self.assertEqual(cm.unraisable.err_msg,
+ "Exception ignored on converting result "
+ "of ctypes callback function")
+ self.assertIs(cm.unraisable.object, func)
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/ctypes/test/test_loading.py b/Lib/ctypes/test/test_loading.py
index 9b97d80086044d..ba655bceb8b215 100644
--- a/Lib/ctypes/test/test_loading.py
+++ b/Lib/ctypes/test/test_loading.py
@@ -3,7 +3,6 @@
import shutil
import subprocess
import sys
-import sysconfig
import unittest
import test.support
from ctypes.util import find_library
@@ -159,7 +158,8 @@ def should_fail(command):
should_pass("WinDLL('./_sqlite3.dll')")
# Insecure load flags should succeed
- should_pass("WinDLL('_sqlite3.dll', winmode=0)")
+ # Clear the DLL directory to avoid safe search settings propagating
+ should_pass("windll.kernel32.SetDllDirectoryW(None); WinDLL('_sqlite3.dll', winmode=0)")
# Full path load without DLL_LOAD_DIR shouldn't find dependency
should_fail("WinDLL(nt._getfullpathname('_sqlite3.dll'), " +
diff --git a/Lib/ctypes/test/test_numbers.py b/Lib/ctypes/test/test_numbers.py
index c6d843b2cd987f..db500e812beb15 100644
--- a/Lib/ctypes/test/test_numbers.py
+++ b/Lib/ctypes/test/test_numbers.py
@@ -134,8 +134,7 @@ def __index__(self):
for t in signed_types + unsigned_types:
self.assertRaises(TypeError, t, 3.14)
self.assertRaises(TypeError, t, f)
- with self.assertWarns(DeprecationWarning):
- self.assertEqual(t(d).value, 2)
+ self.assertRaises(TypeError, t, d)
self.assertEqual(t(i).value, 2)
def test_sizes(self):
diff --git a/Lib/ctypes/test/test_random_things.py b/Lib/ctypes/test/test_random_things.py
index ee5b2128ea0fab..2988e275cf4bbf 100644
--- a/Lib/ctypes/test/test_random_things.py
+++ b/Lib/ctypes/test/test_random_things.py
@@ -1,5 +1,9 @@
from ctypes import *
-import unittest, sys
+import contextlib
+from test import support
+import unittest
+import sys
+
def callback_func(arg):
42 / arg
@@ -34,41 +38,40 @@ class CallbackTracbackTestCase(unittest.TestCase):
# created, then a full traceback printed. When SystemExit is
# raised in a callback function, the interpreter exits.
- def capture_stderr(self, func, *args, **kw):
- # helper - call function 'func', and return the captured stderr
- import io
- old_stderr = sys.stderr
- logger = sys.stderr = io.StringIO()
- try:
- func(*args, **kw)
- finally:
- sys.stderr = old_stderr
- return logger.getvalue()
+ @contextlib.contextmanager
+ def expect_unraisable(self, exc_type, exc_msg=None):
+ with support.catch_unraisable_exception() as cm:
+ yield
+
+ self.assertIsInstance(cm.unraisable.exc_value, exc_type)
+ if exc_msg is not None:
+ self.assertEqual(str(cm.unraisable.exc_value), exc_msg)
+ self.assertEqual(cm.unraisable.err_msg,
+ "Exception ignored on calling ctypes "
+ "callback function")
+ self.assertIs(cm.unraisable.object, callback_func)
def test_ValueError(self):
cb = CFUNCTYPE(c_int, c_int)(callback_func)
- out = self.capture_stderr(cb, 42)
- self.assertEqual(out.splitlines()[-1],
- "ValueError: 42")
+ with self.expect_unraisable(ValueError, '42'):
+ cb(42)
def test_IntegerDivisionError(self):
cb = CFUNCTYPE(c_int, c_int)(callback_func)
- out = self.capture_stderr(cb, 0)
- self.assertEqual(out.splitlines()[-1][:19],
- "ZeroDivisionError: ")
+ with self.expect_unraisable(ZeroDivisionError):
+ cb(0)
def test_FloatDivisionError(self):
cb = CFUNCTYPE(c_int, c_double)(callback_func)
- out = self.capture_stderr(cb, 0.0)
- self.assertEqual(out.splitlines()[-1][:19],
- "ZeroDivisionError: ")
+ with self.expect_unraisable(ZeroDivisionError):
+ cb(0.0)
def test_TypeErrorDivisionError(self):
cb = CFUNCTYPE(c_int, c_char_p)(callback_func)
- out = self.capture_stderr(cb, b"spam")
- self.assertEqual(out.splitlines()[-1],
- "TypeError: "
- "unsupported operand type(s) for /: 'int' and 'bytes'")
+ err_msg = "unsupported operand type(s) for /: 'int' and 'bytes'"
+ with self.expect_unraisable(TypeError, err_msg):
+ cb(b"spam")
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/ctypes/test/test_unaligned_structures.py b/Lib/ctypes/test/test_unaligned_structures.py
index bcacfc8184b439..ee7fb45809bf7b 100644
--- a/Lib/ctypes/test/test_unaligned_structures.py
+++ b/Lib/ctypes/test/test_unaligned_structures.py
@@ -27,7 +27,6 @@ class Y(SwappedStructure):
class TestStructures(unittest.TestCase):
def test_native(self):
for typ in structures:
-## print typ.value
self.assertEqual(typ.value.offset, 1)
o = typ()
o.value = 4
@@ -35,7 +34,6 @@ def test_native(self):
def test_swapped(self):
for typ in byteswapped_structures:
-## print >> sys.stderr, typ.value
self.assertEqual(typ.value.offset, 1)
o = typ()
o.value = 4
diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py
index 00851c648a13b5..fc69508354bbe8 100644
--- a/Lib/dataclasses.py
+++ b/Lib/dataclasses.py
@@ -7,6 +7,7 @@
import builtins
import functools
import _thread
+from types import GenericAlias
__all__ = ['dataclass',
@@ -284,6 +285,8 @@ def __set_name__(self, owner, name):
# it.
func(self.default, owner, name)
+ __class_getitem__ = classmethod(GenericAlias)
+
class _DataclassParams:
__slots__ = ('init',
diff --git a/Lib/datetime.py b/Lib/datetime.py
index 67555191d02c18..3090978508c921 100644
--- a/Lib/datetime.py
+++ b/Lib/datetime.py
@@ -11,6 +11,7 @@
import time as _time
import math as _math
import sys
+from operator import index as _index
def _cmp(x, y):
return 0 if x == y else 1 if x > y else -1
@@ -380,42 +381,10 @@ def _check_utc_offset(name, offset):
"-timedelta(hours=24) and timedelta(hours=24)" %
(name, offset))
-def _check_int_field(value):
- if isinstance(value, int):
- return value
- if isinstance(value, float):
- raise TypeError('integer argument expected, got float')
- try:
- value = value.__index__()
- except AttributeError:
- pass
- else:
- if not isinstance(value, int):
- raise TypeError('__index__ returned non-int (type %s)' %
- type(value).__name__)
- return value
- orig = value
- try:
- value = value.__int__()
- except AttributeError:
- pass
- else:
- if not isinstance(value, int):
- raise TypeError('__int__ returned non-int (type %s)' %
- type(value).__name__)
- import warnings
- warnings.warn("an integer is required (got type %s)" %
- type(orig).__name__,
- DeprecationWarning,
- stacklevel=2)
- return value
- raise TypeError('an integer is required (got type %s)' %
- type(value).__name__)
-
def _check_date_fields(year, month, day):
- year = _check_int_field(year)
- month = _check_int_field(month)
- day = _check_int_field(day)
+ year = _index(year)
+ month = _index(month)
+ day = _index(day)
if not MINYEAR <= year <= MAXYEAR:
raise ValueError('year must be in %d..%d' % (MINYEAR, MAXYEAR), year)
if not 1 <= month <= 12:
@@ -426,10 +395,10 @@ def _check_date_fields(year, month, day):
return year, month, day
def _check_time_fields(hour, minute, second, microsecond, fold):
- hour = _check_int_field(hour)
- minute = _check_int_field(minute)
- second = _check_int_field(second)
- microsecond = _check_int_field(microsecond)
+ hour = _index(hour)
+ minute = _index(minute)
+ second = _index(second)
+ microsecond = _index(microsecond)
if not 0 <= hour <= 23:
raise ValueError('hour must be in 0..23', hour)
if not 0 <= minute <= 59:
@@ -1095,7 +1064,7 @@ def isoweekday(self):
return self.toordinal() % 7 or 7
def isocalendar(self):
- """Return a 3-tuple containing ISO year, week number, and weekday.
+ """Return a named tuple containing ISO year, week number, and weekday.
The first ISO week of the year is the (Mon-Sun) week
containing the year's first Thursday; everything else derives
@@ -1120,7 +1089,7 @@ def isocalendar(self):
if today >= _isoweek1monday(year+1):
year += 1
week = 0
- return year, week+1, day+1
+ return _IsoCalendarDate(year, week+1, day+1)
# Pickle support.
@@ -1210,6 +1179,36 @@ def __reduce__(self):
else:
return (self.__class__, args, state)
+
+class IsoCalendarDate(tuple):
+
+ def __new__(cls, year, week, weekday, /):
+ return super().__new__(cls, (year, week, weekday))
+
+ @property
+ def year(self):
+ return self[0]
+
+ @property
+ def week(self):
+ return self[1]
+
+ @property
+ def weekday(self):
+ return self[2]
+
+ def __reduce__(self):
+ # This code is intended to pickle the object without making the
+ # class public. See https://bugs.python.org/msg352381
+ return (tuple, (tuple(self),))
+
+ def __repr__(self):
+ return (f'{self.__class__.__name__}'
+ f'(year={self[0]}, week={self[1]}, weekday={self[2]})')
+
+
+_IsoCalendarDate = IsoCalendarDate
+del IsoCalendarDate
_tzinfo_class = tzinfo
class time:
@@ -1559,6 +1558,7 @@ def __reduce__(self):
time.max = time(23, 59, 59, 999999)
time.resolution = timedelta(microseconds=1)
+
class datetime(date):
"""datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]])
@@ -2508,13 +2508,13 @@ def _name_from_offset(delta):
# Clean up unused names
del (_DAYNAMES, _DAYS_BEFORE_MONTH, _DAYS_IN_MONTH, _DI100Y, _DI400Y,
_DI4Y, _EPOCH, _MAXORDINAL, _MONTHNAMES, _build_struct_time,
- _check_date_fields, _check_int_field, _check_time_fields,
+ _check_date_fields, _check_time_fields,
_check_tzinfo_arg, _check_tzname, _check_utc_offset, _cmp, _cmperror,
_date_class, _days_before_month, _days_before_year, _days_in_month,
- _format_time, _format_offset, _is_leap, _isoweek1monday, _math,
+ _format_time, _format_offset, _index, _is_leap, _isoweek1monday, _math,
_ord2ymd, _time, _time_class, _tzinfo_class, _wrap_strftime, _ymd2ord,
_divide_and_round, _parse_isoformat_date, _parse_isoformat_time,
- _parse_hh_mm_ss_ff)
+ _parse_hh_mm_ss_ff, _IsoCalendarDate)
# XXX Since import * above excludes names that start with _,
# docstring does not get overwritten. In the future, it may be
# appropriate to maintain a single module level docstring and
diff --git a/Lib/difflib.py b/Lib/difflib.py
index 5d756436a67e19..0dda80d3875739 100644
--- a/Lib/difflib.py
+++ b/Lib/difflib.py
@@ -32,6 +32,7 @@
from heapq import nlargest as _nlargest
from collections import namedtuple as _namedtuple
+from types import GenericAlias
Match = _namedtuple('Match', 'a b size')
@@ -129,7 +130,7 @@ class SequenceMatcher:
set_seq2(b)
Set the second sequence to be compared.
- find_longest_match(alo, ahi, blo, bhi)
+ find_longest_match(alo=0, ahi=None, blo=0, bhi=None)
Find longest matching block in a[alo:ahi] and b[blo:bhi].
get_matching_blocks()
@@ -333,9 +334,11 @@ def __chain_b(self):
for elt in popular: # ditto; as fast for 1% deletion
del b2j[elt]
- def find_longest_match(self, alo, ahi, blo, bhi):
+ def find_longest_match(self, alo=0, ahi=None, blo=0, bhi=None):
"""Find longest matching block in a[alo:ahi] and b[blo:bhi].
+ By default it will find the longest match in the entirety of a and b.
+
If isjunk is not defined:
Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
@@ -390,6 +393,10 @@ def find_longest_match(self, alo, ahi, blo, bhi):
# the unique 'b's and then matching the first two 'a's.
a, b, b2j, isbjunk = self.a, self.b, self.b2j, self.bjunk.__contains__
+ if ahi is None:
+ ahi = len(a)
+ if bhi is None:
+ bhi = len(b)
besti, bestj, bestsize = alo, blo, 0
# find longest junk-free match
# during an iteration of the loop, j2len[j] = length of longest
@@ -685,6 +692,9 @@ def real_quick_ratio(self):
# shorter sequence
return _calculate_ratio(min(la, lb), la + lb)
+ __class_getitem__ = classmethod(GenericAlias)
+
+
def get_close_matches(word, possibilities, n=3, cutoff=0.6):
"""Use SequenceMatcher to return list of the best "good enough" matches.
diff --git a/Lib/distutils/_msvccompiler.py b/Lib/distutils/_msvccompiler.py
index e8e4b717b9736f..af8099a4078192 100644
--- a/Lib/distutils/_msvccompiler.py
+++ b/Lib/distutils/_msvccompiler.py
@@ -14,8 +14,6 @@
# ported to VS 2015 by Steve Dower
import os
-import shutil
-import stat
import subprocess
import winreg
@@ -65,8 +63,6 @@ def _find_vc2017():
If vswhere.exe is not available, by definition, VS 2017 is not
installed.
"""
- import json
-
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
if not root:
return None, None
@@ -97,28 +93,11 @@ def _find_vc2017():
}
def _find_vcvarsall(plat_spec):
+ # bpo-38597: Removed vcruntime return value
_, best_dir = _find_vc2017()
- vcruntime = None
-
- if plat_spec in PLAT_SPEC_TO_RUNTIME:
- vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec]
- else:
- vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86'
-
- if best_dir:
- vcredist = os.path.join(best_dir, "..", "..", "redist", "MSVC", "**",
- vcruntime_plat, "Microsoft.VC14*.CRT", "vcruntime140.dll")
- try:
- import glob
- vcruntime = glob.glob(vcredist, recursive=True)[-1]
- except (ImportError, OSError, LookupError):
- vcruntime = None
if not best_dir:
best_version, best_dir = _find_vc2015()
- if best_version:
- vcruntime = os.path.join(best_dir, 'redist', vcruntime_plat,
- "Microsoft.VC140.CRT", "vcruntime140.dll")
if not best_dir:
log.debug("No suitable Visual C++ version found")
@@ -129,11 +108,7 @@ def _find_vcvarsall(plat_spec):
log.debug("%s cannot be found", vcvarsall)
return None, None
- if not vcruntime or not os.path.isfile(vcruntime):
- log.debug("%s cannot be found", vcruntime)
- vcruntime = None
-
- return vcvarsall, vcruntime
+ return vcvarsall, None
def _get_vc_env(plat_spec):
if os.getenv("DISTUTILS_USE_SDK"):
@@ -142,7 +117,7 @@ def _get_vc_env(plat_spec):
for key, value in os.environ.items()
}
- vcvarsall, vcruntime = _find_vcvarsall(plat_spec)
+ vcvarsall, _ = _find_vcvarsall(plat_spec)
if not vcvarsall:
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
@@ -163,8 +138,6 @@ def _get_vc_env(plat_spec):
if key and value
}
- if vcruntime:
- env['py_vcruntime_redist'] = vcruntime
return env
def _find_exe(exe, paths=None):
@@ -194,12 +167,6 @@ def _find_exe(exe, paths=None):
'win-arm64' : 'x86_arm64'
}
-# A set containing the DLLs that are guaranteed to be available for
-# all micro versions of this Python version. Known extension
-# dependencies that are not in this set will be copied to the output
-# path.
-_BUNDLED_DLLS = frozenset(['vcruntime140.dll'])
-
class MSVCCompiler(CCompiler) :
"""Concrete class that implements an interface to Microsoft Visual C++,
as defined by the CCompiler abstract class."""
@@ -263,7 +230,6 @@ def initialize(self, plat_name=None):
self.rc = _find_exe("rc.exe", paths) # resource compiler
self.mc = _find_exe("mc.exe", paths) # message compiler
self.mt = _find_exe("mt.exe", paths) # message compiler
- self._vcruntime_redist = vc_env.get('py_vcruntime_redist', '')
for dir in vc_env.get('include', '').split(os.pathsep):
if dir:
@@ -274,13 +240,12 @@ def initialize(self, plat_name=None):
self.add_library_dir(dir.rstrip(os.sep))
self.preprocess_options = None
- # If vcruntime_redist is available, link against it dynamically. Otherwise,
- # use /MT[d] to build statically, then switch from libucrt[d].lib to ucrt[d].lib
- # later to dynamically link to ucrtbase but not vcruntime.
+ # bpo-38597: Always compile with dynamic linking
+ # Future releases of Python 3.x will include all past
+ # versions of vcruntime*.dll for compatibility.
self.compile_options = [
- '/nologo', '/Ox', '/W3', '/GL', '/DNDEBUG'
+ '/nologo', '/Ox', '/W3', '/GL', '/DNDEBUG', '/MD'
]
- self.compile_options.append('/MD' if self._vcruntime_redist else '/MT')
self.compile_options_debug = [
'/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG'
@@ -289,8 +254,6 @@ def initialize(self, plat_name=None):
ldflags = [
'/nologo', '/INCREMENTAL:NO', '/LTCG'
]
- if not self._vcruntime_redist:
- ldflags.extend(('/nodefaultlib:libucrt.lib', 'ucrt.lib'))
ldflags_debug = [
'/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL'
@@ -532,24 +495,11 @@ def link(self,
try:
log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args))
self.spawn([self.linker] + ld_args)
- self._copy_vcruntime(output_dir)
except DistutilsExecError as msg:
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
- def _copy_vcruntime(self, output_dir):
- vcruntime = self._vcruntime_redist
- if not vcruntime or not os.path.isfile(vcruntime):
- return
-
- if os.path.basename(vcruntime).lower() in _BUNDLED_DLLS:
- return
-
- log.debug('Copying "%s"', vcruntime)
- vcruntime = shutil.copy(vcruntime, output_dir)
- os.chmod(vcruntime, stat.S_IWRITE)
-
def spawn(self, cmd):
old_path = os.getenv('path')
try:
diff --git a/Lib/distutils/bcppcompiler.py b/Lib/distutils/bcppcompiler.py
index 9f4c432d90e77f..071fea5d038cb0 100644
--- a/Lib/distutils/bcppcompiler.py
+++ b/Lib/distutils/bcppcompiler.py
@@ -14,10 +14,10 @@
import os
from distutils.errors import \
- DistutilsExecError, DistutilsPlatformError, \
+ DistutilsExecError, \
CompileError, LibError, LinkError, UnknownFileError
from distutils.ccompiler import \
- CCompiler, gen_preprocess_options, gen_lib_options
+ CCompiler, gen_preprocess_options
from distutils.file_util import write_file
from distutils.dep_util import newer
from distutils import log
diff --git a/Lib/distutils/ccompiler.py b/Lib/distutils/ccompiler.py
index 4cfc6c7065e922..b5ef143e72c564 100644
--- a/Lib/distutils/ccompiler.py
+++ b/Lib/distutils/ccompiler.py
@@ -8,7 +8,7 @@
from distutils.spawn import spawn
from distutils.file_util import move_file
from distutils.dir_util import mkpath
-from distutils.dep_util import newer_pairwise, newer_group
+from distutils.dep_util import newer_group
from distutils.util import split_quoted, execute
from distutils import log
diff --git a/Lib/distutils/command/bdist_msi.py b/Lib/distutils/command/bdist_msi.py
index f335a34898620e..0863a1883e7205 100644
--- a/Lib/distutils/command/bdist_msi.py
+++ b/Lib/distutils/command/bdist_msi.py
@@ -6,7 +6,9 @@
Implements the bdist_msi command.
"""
-import sys, os
+import os
+import sys
+import warnings
from distutils.core import Command
from distutils.dir_util import remove_tree
from distutils.sysconfig import get_python_version
@@ -122,6 +124,12 @@ class bdist_msi(Command):
'3.5', '3.6', '3.7', '3.8', '3.9']
other_version = 'X'
+ def __init__(self, *args, **kw):
+ super().__init__(*args, **kw)
+ warnings.warn("bdist_msi command is deprecated since Python 3.9, "
+ "use bdist_wheel (wheel packages) instead",
+ DeprecationWarning, 2)
+
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
diff --git a/Lib/distutils/command/bdist_rpm.py b/Lib/distutils/command/bdist_rpm.py
index 74381cc69a6ced..550cbfa1e28a23 100644
--- a/Lib/distutils/command/bdist_rpm.py
+++ b/Lib/distutils/command/bdist_rpm.py
@@ -6,7 +6,6 @@
import subprocess, sys, os
from distutils.core import Command
from distutils.debug import DEBUG
-from distutils.util import get_platform
from distutils.file_util import write_file
from distutils.errors import *
from distutils.sysconfig import get_python_version
diff --git a/Lib/distutils/command/bdist_wininst.py b/Lib/distutils/command/bdist_wininst.py
index b5ed6f041e19a0..0e9ddaa21419e9 100644
--- a/Lib/distutils/command/bdist_wininst.py
+++ b/Lib/distutils/command/bdist_wininst.py
@@ -8,7 +8,7 @@
import warnings
from distutils.core import Command
from distutils.util import get_platform
-from distutils.dir_util import create_tree, remove_tree
+from distutils.dir_util import remove_tree
from distutils.errors import *
from distutils.sysconfig import get_python_version
from distutils import log
diff --git a/Lib/distutils/command/check.py b/Lib/distutils/command/check.py
index 7ceabd3adf22d2..ada250064678ee 100644
--- a/Lib/distutils/command/check.py
+++ b/Lib/distutils/command/check.py
@@ -11,7 +11,6 @@
from docutils.parsers.rst import Parser
from docutils import frontend
from docutils import nodes
- from io import StringIO
class SilentReporter(Reporter):
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index c625c95bf7e8cf..aaa300efa96e6e 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -30,14 +30,14 @@
INSTALL_SCHEMES = {
'unix_prefix': {
'purelib': '$base/lib/python$py_version_short/site-packages',
- 'platlib': '$platbase/lib/python$py_version_short/site-packages',
+ 'platlib': '$platbase/$platlibdir/python$py_version_short/site-packages',
'headers': '$base/include/python$py_version_short$abiflags/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
},
'unix_home': {
'purelib': '$base/lib/python',
- 'platlib': '$base/lib/python',
+ 'platlib': '$base/$platlibdir/python',
'headers': '$base/include/python/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
@@ -298,6 +298,7 @@ def finalize_options(self):
'sys_exec_prefix': exec_prefix,
'exec_prefix': exec_prefix,
'abiflags': abiflags,
+ 'platlibdir': sys.platlibdir,
}
if HAS_USER_SITE:
diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py
index 11afa24b777a12..95e9fda186fc8f 100644
--- a/Lib/distutils/command/upload.py
+++ b/Lib/distutils/command/upload.py
@@ -7,7 +7,6 @@
import os
import io
-import platform
import hashlib
from base64 import standard_b64encode
from urllib.request import urlopen, Request, HTTPError
@@ -17,6 +16,16 @@
from distutils.spawn import spawn
from distutils import log
+
+# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
+# https://bugs.python.org/issue40698
+_FILE_CONTENT_DIGESTS = {
+ "md5_digest": getattr(hashlib, "md5", None),
+ "sha256_digest": getattr(hashlib, "sha256", None),
+ "blake2_256_digest": getattr(hashlib, "blake2b", None),
+}
+
+
class upload(PyPIRCCommand):
description = "upload binary package to PyPI"
@@ -88,6 +97,7 @@ def upload_file(self, command, pyversion, filename):
content = f.read()
finally:
f.close()
+
meta = self.distribution.metadata
data = {
# action
@@ -102,7 +112,6 @@ def upload_file(self, command, pyversion, filename):
'content': (os.path.basename(filename),content),
'filetype': command,
'pyversion': pyversion,
- 'md5_digest': hashlib.md5(content).hexdigest(),
# additional meta-data
'metadata_version': '1.0',
@@ -124,6 +133,16 @@ def upload_file(self, command, pyversion, filename):
data['comment'] = ''
+ # file content digests
+ for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
+ if digest_cons is None:
+ continue
+ try:
+ data[digest_name] = digest_cons(content).hexdigest()
+ except ValueError:
+ # hash digest not available or blocked by security policy
+ pass
+
if self.sign:
with open(filename + ".asc", "rb") as f:
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py
index 6c5d77746b6f56..66c12dd35830b2 100644
--- a/Lib/distutils/cygwinccompiler.py
+++ b/Lib/distutils/cygwinccompiler.py
@@ -51,12 +51,10 @@
from subprocess import Popen, PIPE, check_output
import re
-from distutils.ccompiler import gen_preprocess_options, gen_lib_options
from distutils.unixccompiler import UnixCCompiler
from distutils.file_util import write_file
from distutils.errors import (DistutilsExecError, CCompilerError,
CompileError, UnknownFileError)
-from distutils import log
from distutils.version import LooseVersion
from distutils.spawn import find_executable
diff --git a/Lib/distutils/msvc9compiler.py b/Lib/distutils/msvc9compiler.py
index 4c0036a0f13305..6934e964abd69b 100644
--- a/Lib/distutils/msvc9compiler.py
+++ b/Lib/distutils/msvc9compiler.py
@@ -19,8 +19,7 @@
from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError
-from distutils.ccompiler import CCompiler, gen_preprocess_options, \
- gen_lib_options
+from distutils.ccompiler import CCompiler, gen_lib_options
from distutils import log
from distutils.util import get_platform
diff --git a/Lib/distutils/msvccompiler.py b/Lib/distutils/msvccompiler.py
index d1de2fbfcb947b..d5857cb1ffe423 100644
--- a/Lib/distutils/msvccompiler.py
+++ b/Lib/distutils/msvccompiler.py
@@ -13,7 +13,7 @@
DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError
from distutils.ccompiler import \
- CCompiler, gen_preprocess_options, gen_lib_options
+ CCompiler, gen_lib_options
from distutils import log
_can_read_reg = False
diff --git a/Lib/distutils/spawn.py b/Lib/distutils/spawn.py
index ceb94945dc8bed..aad277b0ca7767 100644
--- a/Lib/distutils/spawn.py
+++ b/Lib/distutils/spawn.py
@@ -8,11 +8,18 @@
import sys
import os
+import subprocess
from distutils.errors import DistutilsPlatformError, DistutilsExecError
from distutils.debug import DEBUG
from distutils import log
+
+if sys.platform == 'darwin':
+ _cfg_target = None
+ _cfg_target_split = None
+
+
def spawn(cmd, search_path=1, verbose=0, dry_run=0):
"""Run another program, specified as a command list 'cmd', in a new process.
@@ -32,64 +39,16 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0):
# cmd is documented as a list, but just in case some code passes a tuple
# in, protect our %-formatting code against horrible death
cmd = list(cmd)
- if os.name == 'posix':
- _spawn_posix(cmd, search_path, dry_run=dry_run)
- elif os.name == 'nt':
- _spawn_nt(cmd, search_path, dry_run=dry_run)
- else:
- raise DistutilsPlatformError(
- "don't know how to spawn programs on platform '%s'" % os.name)
-
-def _nt_quote_args(args):
- """Quote command-line arguments for DOS/Windows conventions.
-
- Just wraps every argument which contains blanks in double quotes, and
- returns a new argument list.
- """
- # XXX this doesn't seem very robust to me -- but if the Windows guys
- # say it'll work, I guess I'll have to accept it. (What if an arg
- # contains quotes? What other magic characters, other than spaces,
- # have to be escaped? Is there an escaping mechanism other than
- # quoting?)
- for i, arg in enumerate(args):
- if ' ' in arg:
- args[i] = '"%s"' % arg
- return args
-
-def _spawn_nt(cmd, search_path=1, verbose=0, dry_run=0):
- executable = cmd[0]
- cmd = _nt_quote_args(cmd)
- if search_path:
- # either we find one or it stays the same
- executable = find_executable(executable) or executable
- log.info(' '.join([executable] + cmd[1:]))
- if not dry_run:
- # spawn for NT requires a full path to the .exe
- try:
- rc = os.spawnv(os.P_WAIT, executable, cmd)
- except OSError as exc:
- # this seems to happen when the command isn't found
- if not DEBUG:
- cmd = executable
- raise DistutilsExecError(
- "command %r failed: %s" % (cmd, exc.args[-1]))
- if rc != 0:
- # and this reflects the command running but failing
- if not DEBUG:
- cmd = executable
- raise DistutilsExecError(
- "command %r failed with exit status %d" % (cmd, rc))
-
-if sys.platform == 'darwin':
- _cfg_target = None
- _cfg_target_split = None
-def _spawn_posix(cmd, search_path=1, verbose=0, dry_run=0):
log.info(' '.join(cmd))
if dry_run:
return
- executable = cmd[0]
- exec_fn = search_path and os.execvp or os.execv
+
+ if search_path:
+ executable = find_executable(cmd[0])
+ if executable is not None:
+ cmd[0] = executable
+
env = None
if sys.platform == 'darwin':
global _cfg_target, _cfg_target_split
@@ -111,60 +70,17 @@ def _spawn_posix(cmd, search_path=1, verbose=0, dry_run=0):
raise DistutilsPlatformError(my_msg)
env = dict(os.environ,
MACOSX_DEPLOYMENT_TARGET=cur_target)
- exec_fn = search_path and os.execvpe or os.execve
- pid = os.fork()
- if pid == 0: # in the child
- try:
- if env is None:
- exec_fn(executable, cmd)
- else:
- exec_fn(executable, cmd, env)
- except OSError as e:
- if not DEBUG:
- cmd = executable
- sys.stderr.write("unable to execute %r: %s\n"
- % (cmd, e.strerror))
- os._exit(1)
+ proc = subprocess.Popen(cmd, env=env)
+ proc.wait()
+ exitcode = proc.returncode
+
+ if exitcode:
if not DEBUG:
- cmd = executable
- sys.stderr.write("unable to execute %r for unknown reasons" % cmd)
- os._exit(1)
- else: # in the parent
- # Loop until the child either exits or is terminated by a signal
- # (ie. keep waiting if it's merely stopped)
- while True:
- try:
- pid, status = os.waitpid(pid, 0)
- except OSError as exc:
- if not DEBUG:
- cmd = executable
- raise DistutilsExecError(
- "command %r failed: %s" % (cmd, exc.args[-1]))
- if os.WIFSIGNALED(status):
- if not DEBUG:
- cmd = executable
- raise DistutilsExecError(
- "command %r terminated by signal %d"
- % (cmd, os.WTERMSIG(status)))
- elif os.WIFEXITED(status):
- exit_status = os.WEXITSTATUS(status)
- if exit_status == 0:
- return # hey, it succeeded!
- else:
- if not DEBUG:
- cmd = executable
- raise DistutilsExecError(
- "command %r failed with exit status %d"
- % (cmd, exit_status))
- elif os.WIFSTOPPED(status):
- continue
- else:
- if not DEBUG:
- cmd = executable
- raise DistutilsExecError(
- "unknown error executing %r: termination status %d"
- % (cmd, status))
+ cmd = cmd[0]
+ raise DistutilsExecError(
+ "command %r failed with exit code %s" % (cmd, exitcode))
+
def find_executable(executable, path=None):
"""Tries to find 'executable' in the directories listed in 'path'.
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index b51629eb94f825..37feae5df72c93 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -15,7 +15,6 @@
import sys
from .errors import DistutilsPlatformError
-from .util import get_platform, get_host_platform
# These are needed in a couple of spots, so just compute them once.
PREFIX = os.path.normpath(sys.prefix)
@@ -146,8 +145,15 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
prefix = plat_specific and EXEC_PREFIX or PREFIX
if os.name == "posix":
- libpython = os.path.join(prefix,
- "lib", "python" + get_python_version())
+ if plat_specific or standard_lib:
+ # Platform-specific modules (any module from a non-pure-Python
+ # module distribution) or standard Python library modules.
+ libdir = sys.platlibdir
+ else:
+ # Pure Python
+ libdir = "lib"
+ libpython = os.path.join(prefix, libdir,
+ "python" + get_python_version())
if standard_lib:
return libpython
else:
diff --git a/Lib/distutils/tests/__init__.py b/Lib/distutils/tests/__init__.py
index 1b939cbd5db2bf..5d2e69e3e6a8f6 100644
--- a/Lib/distutils/tests/__init__.py
+++ b/Lib/distutils/tests/__init__.py
@@ -15,6 +15,7 @@
import os
import sys
import unittest
+import warnings
from test.support import run_unittest
@@ -22,6 +23,7 @@
def test_suite():
+ old_filters = warnings.filters[:]
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
@@ -29,6 +31,10 @@ def test_suite():
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
+ # bpo-40055: Save/restore warnings filters to leave them unchanged.
+ # Importing tests imports docutils which imports pkg_resources which adds a
+ # warnings filter.
+ warnings.filters[:] = old_filters
return suite
diff --git a/Lib/distutils/tests/test_bdist_msi.py b/Lib/distutils/tests/test_bdist_msi.py
index 15d8bdff2b4f55..418e60ec729779 100644
--- a/Lib/distutils/tests/test_bdist_msi.py
+++ b/Lib/distutils/tests/test_bdist_msi.py
@@ -1,7 +1,7 @@
"""Tests for distutils.command.bdist_msi."""
import sys
import unittest
-from test.support import run_unittest
+from test.support import run_unittest, check_warnings
from distutils.tests import support
@@ -14,7 +14,8 @@ def test_minimal(self):
# minimal test XXX need more tests
from distutils.command.bdist_msi import bdist_msi
project_dir, dist = self.create_dist()
- cmd = bdist_msi(dist)
+ with check_warnings(("", DeprecationWarning)):
+ cmd = bdist_msi(dist)
cmd.ensure_finalized()
diff --git a/Lib/distutils/tests/test_build_clib.py b/Lib/distutils/tests/test_build_clib.py
index 85d09906f228d4..abd8313770ef7a 100644
--- a/Lib/distutils/tests/test_build_clib.py
+++ b/Lib/distutils/tests/test_build_clib.py
@@ -8,7 +8,6 @@
from distutils.command.build_clib import build_clib
from distutils.errors import DistutilsSetupError
from distutils.tests import support
-from distutils.spawn import find_executable
class BuildCLibTestCase(support.TempdirManager,
support.LoggingSilencer,
diff --git a/Lib/distutils/tests/test_build_ext.py b/Lib/distutils/tests/test_build_ext.py
index 7e3eafa8ef231e..5e47e0773a9649 100644
--- a/Lib/distutils/tests/test_build_ext.py
+++ b/Lib/distutils/tests/test_build_ext.py
@@ -312,8 +312,8 @@ def test_unicode_module_names(self):
dist = Distribution({'name': 'xx', 'ext_modules': modules})
cmd = self.build_ext(dist)
cmd.ensure_finalized()
- self.assertRegex(cmd.get_ext_filename(modules[0].name), r'foo\..*')
- self.assertRegex(cmd.get_ext_filename(modules[1].name), r'föö\..*')
+ self.assertRegex(cmd.get_ext_filename(modules[0].name), r'foo(_d)?\..*')
+ self.assertRegex(cmd.get_ext_filename(modules[1].name), r'föö(_d)?\..*')
self.assertEqual(cmd.get_export_symbols(modules[0]), ['PyInit_foo'])
self.assertEqual(cmd.get_export_symbols(modules[1]), ['PyInitU_f_gkaa'])
diff --git a/Lib/distutils/tests/test_config_cmd.py b/Lib/distutils/tests/test_config_cmd.py
index b735fd334d8780..9aeab07b468361 100644
--- a/Lib/distutils/tests/test_config_cmd.py
+++ b/Lib/distutils/tests/test_config_cmd.py
@@ -39,7 +39,6 @@ def test_dump_file(self):
@unittest.skipIf(sys.platform == 'win32', "can't test on Windows")
def test_search_cpp(self):
- import shutil
cmd = missing_compiler_executable(['preprocessor'])
if cmd is not None:
self.skipTest('The %r command is not found' % cmd)
@@ -47,8 +46,7 @@ def test_search_cpp(self):
cmd = config(dist)
cmd._check_compiler()
compiler = cmd.compiler
- is_xlc = shutil.which(compiler.preprocessor[0]).startswith("/usr/vac")
- if is_xlc:
+ if sys.platform[:3] == "aix" and "xlc" in compiler.preprocessor[0].lower():
self.skipTest('xlc: The -E option overrides the -P, -o, and -qsyntaxonly options')
# simple pattern searches
diff --git a/Lib/distutils/tests/test_dist.py b/Lib/distutils/tests/test_dist.py
index cc34725a99efa4..60956dadef2346 100644
--- a/Lib/distutils/tests/test_dist.py
+++ b/Lib/distutils/tests/test_dist.py
@@ -8,7 +8,7 @@
from unittest import mock
-from distutils.dist import Distribution, fix_help_options, DistributionMetadata
+from distutils.dist import Distribution, fix_help_options
from distutils.cmd import Command
from test.support import (
diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py
index 287ab1989e4083..51c80e0421a8c1 100644
--- a/Lib/distutils/tests/test_install.py
+++ b/Lib/distutils/tests/test_install.py
@@ -58,7 +58,8 @@ def check_path(got, expected):
libdir = os.path.join(destination, "lib", "python")
check_path(cmd.install_lib, libdir)
- check_path(cmd.install_platlib, libdir)
+ platlibdir = os.path.join(destination, sys.platlibdir, "python")
+ check_path(cmd.install_platlib, platlibdir)
check_path(cmd.install_purelib, libdir)
check_path(cmd.install_headers,
os.path.join(destination, "include", "python", "foopkg"))
diff --git a/Lib/distutils/tests/test_msvccompiler.py b/Lib/distutils/tests/test_msvccompiler.py
index 70a9c93a4e8805..b518d6a78b3326 100644
--- a/Lib/distutils/tests/test_msvccompiler.py
+++ b/Lib/distutils/tests/test_msvccompiler.py
@@ -32,57 +32,6 @@ def _find_vcvarsall(plat_spec):
finally:
_msvccompiler._find_vcvarsall = old_find_vcvarsall
- def test_compiler_options(self):
- import distutils._msvccompiler as _msvccompiler
- # suppress path to vcruntime from _find_vcvarsall to
- # check that /MT is added to compile options
- old_find_vcvarsall = _msvccompiler._find_vcvarsall
- def _find_vcvarsall(plat_spec):
- return old_find_vcvarsall(plat_spec)[0], None
- _msvccompiler._find_vcvarsall = _find_vcvarsall
- try:
- compiler = _msvccompiler.MSVCCompiler()
- compiler.initialize()
-
- self.assertIn('/MT', compiler.compile_options)
- self.assertNotIn('/MD', compiler.compile_options)
- finally:
- _msvccompiler._find_vcvarsall = old_find_vcvarsall
-
- def test_vcruntime_copy(self):
- import distutils._msvccompiler as _msvccompiler
- # force path to a known file - it doesn't matter
- # what we copy as long as its name is not in
- # _msvccompiler._BUNDLED_DLLS
- old_find_vcvarsall = _msvccompiler._find_vcvarsall
- def _find_vcvarsall(plat_spec):
- return old_find_vcvarsall(plat_spec)[0], __file__
- _msvccompiler._find_vcvarsall = _find_vcvarsall
- try:
- tempdir = self.mkdtemp()
- compiler = _msvccompiler.MSVCCompiler()
- compiler.initialize()
- compiler._copy_vcruntime(tempdir)
-
- self.assertTrue(os.path.isfile(os.path.join(
- tempdir, os.path.basename(__file__))))
- finally:
- _msvccompiler._find_vcvarsall = old_find_vcvarsall
-
- def test_vcruntime_skip_copy(self):
- import distutils._msvccompiler as _msvccompiler
-
- tempdir = self.mkdtemp()
- compiler = _msvccompiler.MSVCCompiler()
- compiler.initialize()
- dll = compiler._vcruntime_redist
- self.assertTrue(os.path.isfile(dll), dll or "")
-
- compiler._copy_vcruntime(tempdir)
-
- self.assertFalse(os.path.isfile(os.path.join(
- tempdir, os.path.basename(dll))), dll or "")
-
def test_get_vc_env_unicode(self):
import distutils._msvccompiler as _msvccompiler
diff --git a/Lib/distutils/tests/test_spawn.py b/Lib/distutils/tests/test_spawn.py
index f9ae69ef86b3da..cf1faad5f4dd5d 100644
--- a/Lib/distutils/tests/test_spawn.py
+++ b/Lib/distutils/tests/test_spawn.py
@@ -2,13 +2,11 @@
import os
import stat
import sys
-import unittest
-from unittest import mock
+import unittest.mock
from test.support import run_unittest, unix_shell
from test import support as test_support
from distutils.spawn import find_executable
-from distutils.spawn import _nt_quote_args
from distutils.spawn import spawn
from distutils.errors import DistutilsExecError
from distutils.tests import support
@@ -17,16 +15,6 @@ class SpawnTestCase(support.TempdirManager,
support.LoggingSilencer,
unittest.TestCase):
- def test_nt_quote_args(self):
-
- for (args, wanted) in ((['with space', 'nospace'],
- ['"with space"', 'nospace']),
- (['nochange', 'nospace'],
- ['nochange', 'nospace'])):
- res = _nt_quote_args(args)
- self.assertEqual(res, wanted)
-
-
@unittest.skipUnless(os.name in ('nt', 'posix'),
'Runs only under posix or nt')
def test_spawn(self):
diff --git a/Lib/distutils/tests/test_upload.py b/Lib/distutils/tests/test_upload.py
index c17d8e7d54e98c..bca5516d2f74f6 100644
--- a/Lib/distutils/tests/test_upload.py
+++ b/Lib/distutils/tests/test_upload.py
@@ -130,14 +130,30 @@ def test_upload(self):
# what did we send ?
headers = dict(self.last_open.req.headers)
- self.assertEqual(headers['Content-length'], '2162')
+ self.assertGreaterEqual(int(headers['Content-length']), 2162)
content_type = headers['Content-type']
self.assertTrue(content_type.startswith('multipart/form-data'))
self.assertEqual(self.last_open.req.get_method(), 'POST')
expected_url = 'https://upload.pypi.org/legacy/'
self.assertEqual(self.last_open.req.get_full_url(), expected_url)
- self.assertTrue(b'xxx' in self.last_open.req.data)
- self.assertIn(b'protocol_version', self.last_open.req.data)
+ data = self.last_open.req.data
+ self.assertIn(b'xxx',data)
+ self.assertIn(b'protocol_version', data)
+ self.assertIn(b'sha256_digest', data)
+ self.assertIn(
+ b'cd2eb0837c9b4c962c22d2ff8b5441b7b45805887f051d39bf133b583baf'
+ b'6860',
+ data
+ )
+ if b'md5_digest' in data:
+ self.assertIn(b'f561aaf6ef0bf14d4208bb46a4ccb3ad', data)
+ if b'blake2_256_digest' in data:
+ self.assertIn(
+ b'b6f289a27d4fe90da63c503bfe0a9b761a8f76bb86148565065f040be'
+ b'6d1c3044cf7ded78ef800509bccb4b648e507d88dc6383d67642aadcc'
+ b'ce443f1534330a',
+ data
+ )
# The PyPI response body was echoed
results = self.get_logs(INFO)
@@ -166,7 +182,7 @@ def test_upload_correct_cr(self):
cmd.run()
headers = dict(self.last_open.req.headers)
- self.assertEqual(headers['Content-length'], '2172')
+ self.assertGreaterEqual(int(headers['Content-length']), 2172)
self.assertIn(b'long description\r', self.last_open.req.data)
def test_upload_fails(self):
diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py
index d10a78da311405..4d7a6de740ab3a 100644
--- a/Lib/distutils/unixccompiler.py
+++ b/Lib/distutils/unixccompiler.py
@@ -288,7 +288,7 @@ def find_library_file(self, dirs, lib, debug=0):
# vs
# /usr/lib/libedit.dylib
cflags = sysconfig.get_config_var('CFLAGS')
- m = re.search(r'-isysroot\s+(\S+)', cflags)
+ m = re.search(r'-isysroot\s*(\S+)', cflags)
if m is None:
sysroot = '/'
else:
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 02299514bdb593..baa503c83f8757 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -211,6 +211,13 @@ def _normalize_module(module, depth=2):
else:
raise TypeError("Expected a module, string, or None")
+def _newline_convert(data):
+ # We have two cases to cover and we need to make sure we do
+ # them in the right order
+ for newline in ('\r\n', '\r'):
+ data = data.replace(newline, '\n')
+ return data
+
def _load_testfile(filename, package, module_relative, encoding):
if module_relative:
package = _normalize_module(package, 3)
@@ -221,7 +228,7 @@ def _load_testfile(filename, package, module_relative, encoding):
file_contents = file_contents.decode(encoding)
# get_data() opens files as 'rb', so one must do the equivalent
# conversion as universal newlines would do.
- return file_contents.replace(os.linesep, '\n'), filename
+ return _newline_convert(file_contents), filename
with open(filename, encoding=encoding) as f:
return f.read(), filename
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
index 9c55ef7fb453be..51d355fbb0abc5 100644
--- a/Lib/email/_header_value_parser.py
+++ b/Lib/email/_header_value_parser.py
@@ -1218,12 +1218,21 @@ def get_bare_quoted_string(value):
if value[0] in WSP:
token, value = get_fws(value)
elif value[:2] == '=?':
+ valid_ew = False
try:
token, value = get_encoded_word(value)
bare_quoted_string.defects.append(errors.InvalidHeaderDefect(
"encoded word inside quoted string"))
+ valid_ew = True
except errors.HeaderParseError:
token, value = get_qcontent(value)
+ # Collapse the whitespace between two encoded words that occur in a
+ # bare-quoted-string.
+ if valid_ew and len(bare_quoted_string) > 1:
+ if (bare_quoted_string[-1].token_type == 'fws' and
+ bare_quoted_string[-2].token_type == 'encoded-word'):
+ bare_quoted_string[-1] = EWWhiteSpaceTerminal(
+ bare_quoted_string[-1], 'fws')
else:
token, value = get_qcontent(value)
bare_quoted_string.append(token)
diff --git a/Lib/email/contentmanager.py b/Lib/email/contentmanager.py
index b904ded94c92ef..2b4b8757f46f62 100644
--- a/Lib/email/contentmanager.py
+++ b/Lib/email/contentmanager.py
@@ -146,13 +146,13 @@ def embedded_body(lines): return linesep.join(lines) + linesep
def normal_body(lines): return b'\n'.join(lines) + b'\n'
if cte==None:
# Use heuristics to decide on the "best" encoding.
- try:
- return '7bit', normal_body(lines).decode('ascii')
- except UnicodeDecodeError:
- pass
- if (policy.cte_type == '8bit' and
- max(len(x) for x in lines) <= policy.max_line_length):
- return '8bit', normal_body(lines).decode('ascii', 'surrogateescape')
+ if max(len(x) for x in lines) <= policy.max_line_length:
+ try:
+ return '7bit', normal_body(lines).decode('ascii')
+ except UnicodeDecodeError:
+ pass
+ if policy.cte_type == '8bit':
+ return '8bit', normal_body(lines).decode('ascii', 'surrogateescape')
sniff = embedded_body(lines[:10])
sniff_qp = quoprimime.body_encode(sniff.decode('latin-1'),
policy.max_line_length)
diff --git a/Lib/email/headerregistry.py b/Lib/email/headerregistry.py
index cc1d1912918149..5d84fc0d82d0b0 100644
--- a/Lib/email/headerregistry.py
+++ b/Lib/email/headerregistry.py
@@ -31,6 +31,11 @@ def __init__(self, display_name='', username='', domain='', addr_spec=None):
without any Content Transfer Encoding.
"""
+
+ inputs = ''.join(filter(None, (display_name, username, domain, addr_spec)))
+ if '\r' in inputs or '\n' in inputs:
+ raise ValueError("invalid arguments; address parts cannot contain CR or LF")
+
# This clause with its potential 'raise' may only happen when an
# application program creates an Address object using an addr_spec
# keyword. The email library code itself must always supply username
diff --git a/Lib/encodings/punycode.py b/Lib/encodings/punycode.py
index 66c51013ea431a..1c5726447077b1 100644
--- a/Lib/encodings/punycode.py
+++ b/Lib/encodings/punycode.py
@@ -143,7 +143,7 @@ def decode_generalized_number(extended, extpos, bias, errors):
digit = char - 22 # 0x30-26
elif errors == "strict":
raise UnicodeError("Invalid extended code point '%s'"
- % extended[extpos])
+ % extended[extpos-1])
else:
return extpos, None
t = T(j, bias)
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
index 386ed6c25c763e..545fce656fd6f5 100644
--- a/Lib/ensurepip/__init__.py
+++ b/Lib/ensurepip/__init__.py
@@ -1,6 +1,7 @@
import os
import os.path
import sys
+import runpy
import tempfile
from importlib import resources
@@ -26,9 +27,18 @@ def _run_pip(args, additional_paths=None):
if additional_paths is not None:
sys.path = additional_paths + sys.path
- # Install the bundled software
- import pip._internal
- return pip._internal.main(args)
+ # Invoke pip as if it's the main module, and catch the exit.
+ backup_argv = sys.argv[:]
+ sys.argv[1:] = args
+ try:
+ # run_module() alters sys.modules and sys.argv, but restores them at exit
+ runpy.run_module("pip", run_name="__main__", alter_sys=True)
+ except SystemExit as exc:
+ return exc.code
+ finally:
+ sys.argv[:] = backup_argv
+
+ raise SystemError("pip did not exit, this should never happen")
def version():
diff --git a/Lib/enum.py b/Lib/enum.py
index 0be1b60cd6d8aa..49b552ba0ecf6c 100644
--- a/Lib/enum.py
+++ b/Lib/enum.py
@@ -60,6 +60,7 @@ def __init__(self):
self._member_names = []
self._last_values = []
self._ignore = []
+ self._auto_called = False
def __setitem__(self, key, value):
"""Changes anything not dundered or not a descriptor.
@@ -77,6 +78,9 @@ def __setitem__(self, key, value):
):
raise ValueError('_names_ are reserved for future Enum use')
if key == '_generate_next_value_':
+ # check if members already defined as auto()
+ if self._auto_called:
+ raise TypeError("_generate_next_value_ must be defined before members")
setattr(self, '_generate_next_value', value)
elif key == '_ignore_':
if isinstance(value, str):
@@ -100,6 +104,7 @@ def __setitem__(self, key, value):
# enum overwriting a descriptor?
raise TypeError('%r already defined as: %r' % (key, self[key]))
if isinstance(value, auto):
+ self._auto_called = True
if value.value == _auto_null:
value.value = self._generate_next_value(key, 1, len(self._member_names), self._last_values[:])
value = value.value
diff --git a/Lib/filecmp.py b/Lib/filecmp.py
index cfdca1e924f552..7a4da6beb50500 100644
--- a/Lib/filecmp.py
+++ b/Lib/filecmp.py
@@ -13,6 +13,7 @@
import os
import stat
from itertools import filterfalse
+from types import GenericAlias
__all__ = ['clear_cache', 'cmp', 'dircmp', 'cmpfiles', 'DEFAULT_IGNORES']
@@ -247,6 +248,9 @@ def __getattr__(self, attr):
self.methodmap[attr](self)
return getattr(self, attr)
+ __class_getitem__ = classmethod(GenericAlias)
+
+
def cmpfiles(a, b, common, shallow=True):
"""Compare common files in two directories.
diff --git a/Lib/fileinput.py b/Lib/fileinput.py
index 166c631689747d..0c31f93ed8f2e7 100644
--- a/Lib/fileinput.py
+++ b/Lib/fileinput.py
@@ -73,6 +73,7 @@
"""
import sys, os
+from types import GenericAlias
__all__ = ["input", "close", "nextfile", "filename", "lineno", "filelineno",
"fileno", "isfirstline", "isstdin", "FileInput", "hook_compressed",
@@ -209,10 +210,15 @@ def __init__(self, files=None, inplace=False, backup="", *,
self._isstdin = False
self._backupfilename = None
# restrict mode argument to reading modes
- if mode not in ('r', 'rb'):
- raise ValueError("FileInput opening mode must be 'r' or 'rb'")
+ if mode not in ('r', 'rU', 'U', 'rb'):
+ raise ValueError("FileInput opening mode must be one of "
+ "'r', 'rU', 'U' and 'rb'")
+ if 'U' in mode:
+ import warnings
+ warnings.warn("'U' mode is deprecated",
+ DeprecationWarning, 2)
self._mode = mode
- self._write_mode = mode.replace('r', 'w')
+ self._write_mode = mode.replace('r', 'w') if 'U' not in mode else 'w'
if openhook:
if inplace:
raise ValueError("FileInput cannot use an opening hook in inplace mode")
@@ -386,6 +392,8 @@ def isfirstline(self):
def isstdin(self):
return self._isstdin
+ __class_getitem__ = classmethod(GenericAlias)
+
def hook_compressed(filename, mode):
ext = os.path.splitext(filename)[1]
diff --git a/Lib/fnmatch.py b/Lib/fnmatch.py
index b98e6413295e1c..0eb1802bdb53c5 100644
--- a/Lib/fnmatch.py
+++ b/Lib/fnmatch.py
@@ -16,6 +16,12 @@
__all__ = ["filter", "fnmatch", "fnmatchcase", "translate"]
+# Build a thread-safe incrementing counter to help create unique regexp group
+# names across calls.
+from itertools import count
+_nextgroupnum = count().__next__
+del count
+
def fnmatch(name, pat):
"""Test whether FILENAME matches PATTERN.
@@ -77,15 +83,19 @@ def translate(pat):
There is no way to quote meta-characters.
"""
+ STAR = object()
+ res = []
+ add = res.append
i, n = 0, len(pat)
- res = ''
while i < n:
c = pat[i]
i = i+1
if c == '*':
- res = res + '.*'
+ # compress consecutive `*` into one
+ if (not res) or res[-1] is not STAR:
+ add(STAR)
elif c == '?':
- res = res + '.'
+ add('.')
elif c == '[':
j = i
if j < n and pat[j] == '!':
@@ -95,7 +105,7 @@ def translate(pat):
while j < n and pat[j] != ']':
j = j+1
if j >= n:
- res = res + '\\['
+ add('\\[')
else:
stuff = pat[i:j]
if '--' not in stuff:
@@ -122,7 +132,52 @@ def translate(pat):
stuff = '^' + stuff[1:]
elif stuff[0] in ('^', '['):
stuff = '\\' + stuff
- res = '%s[%s]' % (res, stuff)
+ add(f'[{stuff}]')
+ else:
+ add(re.escape(c))
+ assert i == n
+
+ # Deal with STARs.
+ inp = res
+ res = []
+ add = res.append
+ i, n = 0, len(inp)
+ # Fixed pieces at the start?
+ while i < n and inp[i] is not STAR:
+ add(inp[i])
+ i += 1
+ # Now deal with STAR fixed STAR fixed ...
+ # For an interior `STAR fixed` pairing, we want to do a minimal
+ # .*? match followed by `fixed`, with no possibility of backtracking.
+ # We can't spell that directly, but can trick it into working by matching
+ # .*?fixed
+ # in a lookahead assertion, save the matched part in a group, then
+ # consume that group via a backreference. If the overall match fails,
+ # the lookahead assertion won't try alternatives. So the translation is:
+ # (?=(?P.*?fixed))(?P=name)
+ # Group names are created as needed: g0, g1, g2, ...
+ # The numbers are obtained from _nextgroupnum() to ensure they're unique
+ # across calls and across threads. This is because people rely on the
+ # undocumented ability to join multiple translate() results together via
+ # "|" to build large regexps matching "one of many" shell patterns.
+ while i < n:
+ assert inp[i] is STAR
+ i += 1
+ if i == n:
+ add(".*")
+ break
+ assert inp[i] is not STAR
+ fixed = []
+ while i < n and inp[i] is not STAR:
+ fixed.append(inp[i])
+ i += 1
+ fixed = "".join(fixed)
+ if i == n:
+ add(".*")
+ add(fixed)
else:
- res = res + re.escape(c)
- return r'(?s:%s)\Z' % res
+ groupnum = _nextgroupnum()
+ add(f"(?=(?P.*?{fixed}))(?P=g{groupnum})")
+ assert i == n
+ res = "".join(res)
+ return fr'(?s:{res})\Z'
diff --git a/Lib/fractions.py b/Lib/fractions.py
index 501f4b74a0b7ac..de3e23b759227c 100644
--- a/Lib/fractions.py
+++ b/Lib/fractions.py
@@ -155,13 +155,9 @@ def __new__(cls, numerator=0, denominator=None, *, _normalize=True):
if denominator == 0:
raise ZeroDivisionError('Fraction(%s, 0)' % numerator)
if _normalize:
- if type(numerator) is int is type(denominator):
- # *very* normal case
- g = math.gcd(numerator, denominator)
- if denominator < 0:
- g = -g
- else:
- g = _gcd(numerator, denominator)
+ g = math.gcd(numerator, denominator)
+ if denominator < 0:
+ g = -g
numerator //= g
denominator //= g
self._numerator = numerator
@@ -625,7 +621,9 @@ def __ge__(a, b):
def __bool__(a):
"""a != 0"""
- return a._numerator != 0
+ # bpo-39274: Use bool() because (a._numerator != 0) can return an
+ # object which is not a bool.
+ return bool(a._numerator)
# support for pickling, copy, and deepcopy
diff --git a/Lib/ftplib.py b/Lib/ftplib.py
index 71b3c289551fd7..1f760ed1ce0bf0 100644
--- a/Lib/ftplib.py
+++ b/Lib/ftplib.py
@@ -75,13 +75,14 @@ class FTP:
'''An FTP client class.
To create a connection, call the class using these arguments:
- host, user, passwd, acct, timeout
+ host, user, passwd, acct, timeout, source_address, encoding
The first four arguments are all strings, and have default value ''.
- timeout must be numeric and defaults to None if not passed,
- meaning that no timeout will be set on any ftp socket(s)
+ The parameter ´timeout´ must be numeric and defaults to None if not
+ passed, meaning that no timeout will be set on any ftp socket(s).
If a timeout is passed, then this is now the default timeout for all ftp
socket operations for this instance.
+ The last parameter is the encoding of filenames, which defaults to utf-8.
Then use self.connect() with optional host and port argument.
@@ -102,15 +103,16 @@ class FTP:
file = None
welcome = None
passiveserver = 1
- encoding = "latin-1"
def __init__(self, host='', user='', passwd='', acct='',
- timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None):
+ timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None, *,
+ encoding='utf-8'):
"""Initialization method (called by class instantiation).
Initialize host to localhost, port to standard ftp port.
Optional arguments are host (for connect()),
and user, passwd, acct (for login()).
"""
+ self.encoding = encoding
self.source_address = source_address
self.timeout = timeout
if host:
@@ -706,9 +708,10 @@ class FTP_TLS(FTP):
'''
ssl_version = ssl.PROTOCOL_TLS_CLIENT
- def __init__(self, host='', user='', passwd='', acct='', keyfile=None,
- certfile=None, context=None,
- timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None):
+ def __init__(self, host='', user='', passwd='', acct='',
+ keyfile=None, certfile=None, context=None,
+ timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None, *,
+ encoding='utf-8'):
if context is not None and keyfile is not None:
raise ValueError("context and keyfile arguments are mutually "
"exclusive")
@@ -727,7 +730,8 @@ def __init__(self, host='', user='', passwd='', acct='', keyfile=None,
keyfile=keyfile)
self.context = context
self._prot_p = False
- super().__init__(host, user, passwd, acct, timeout, source_address)
+ super().__init__(host, user, passwd, acct,
+ timeout, source_address, encoding=encoding)
def login(self, user='', passwd='', acct='', secure=True):
if secure and not isinstance(self.sock, ssl.SSLSocket):
diff --git a/Lib/functools.py b/Lib/functools.py
index 050bec86051179..87c7d87438998b 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -10,15 +10,17 @@
# See C source code for _functools credits/copyright
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
- 'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce',
+ 'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce',
'TopologicalSorter', 'CycleError',
- 'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod']
+ 'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod',
+ 'cached_property']
from abc import get_cache_token
from collections import namedtuple
# import types, weakref # Deferred to single_dispatch()
from reprlib import recursive_repr
from _thread import RLock
+from types import GenericAlias
################################################################################
@@ -95,6 +97,8 @@ def _gt_from_lt(self, other, NotImplemented=NotImplemented):
def _le_from_lt(self, other, NotImplemented=NotImplemented):
'Return a <= b. Computed by @total_ordering from (a < b) or (a == b).'
op_result = self.__lt__(other)
+ if op_result is NotImplemented:
+ return op_result
return op_result or self == other
def _ge_from_lt(self, other, NotImplemented=NotImplemented):
@@ -135,6 +139,8 @@ def _lt_from_gt(self, other, NotImplemented=NotImplemented):
def _ge_from_gt(self, other, NotImplemented=NotImplemented):
'Return a >= b. Computed by @total_ordering from (a > b) or (a == b).'
op_result = self.__gt__(other)
+ if op_result is NotImplemented:
+ return op_result
return op_result or self == other
def _le_from_gt(self, other, NotImplemented=NotImplemented):
@@ -651,6 +657,9 @@ def __get__(self, obj, cls=None):
def __isabstractmethod__(self):
return getattr(self.func, "__isabstractmethod__", False)
+ __class_getitem__ = classmethod(GenericAlias)
+
+
# Helper functions
def _unwrap_partial(func):
@@ -879,6 +888,15 @@ def cache_clear():
pass
+################################################################################
+### cache -- simplified access to the infinity cache
+################################################################################
+
+def cache(user_function, /):
+ 'Simple lightweight unbounded cache. Sometimes called "memoize".'
+ return lru_cache(maxsize=None)(user_function)
+
+
################################################################################
### singledispatch() - single-dispatch generic function decorator
################################################################################
@@ -1203,3 +1221,5 @@ def __get__(self, instance, owner=None):
)
raise TypeError(msg) from None
return val
+
+ __class_getitem__ = classmethod(GenericAlias)
diff --git a/Lib/gettext.py b/Lib/gettext.py
index b98f501884b75a..77b67aef4204c9 100644
--- a/Lib/gettext.py
+++ b/Lib/gettext.py
@@ -46,7 +46,6 @@
# find this format documented anywhere.
-import locale
import os
import re
import sys
@@ -210,6 +209,7 @@ def func(n):
def _expand_lang(loc):
+ import locale
loc = locale.normalize(loc)
COMPONENT_CODESET = 1 << 0
COMPONENT_TERRITORY = 1 << 1
@@ -278,6 +278,7 @@ def lgettext(self, message):
import warnings
warnings.warn('lgettext() is deprecated, use gettext() instead',
DeprecationWarning, 2)
+ import locale
if self._fallback:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'.*\blgettext\b.*',
@@ -299,6 +300,7 @@ def lngettext(self, msgid1, msgid2, n):
import warnings
warnings.warn('lngettext() is deprecated, use ngettext() instead',
DeprecationWarning, 2)
+ import locale
if self._fallback:
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'.*\blngettext\b.*',
@@ -462,6 +464,7 @@ def lgettext(self, message):
import warnings
warnings.warn('lgettext() is deprecated, use gettext() instead',
DeprecationWarning, 2)
+ import locale
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
@@ -476,6 +479,7 @@ def lngettext(self, msgid1, msgid2, n):
import warnings
warnings.warn('lngettext() is deprecated, use ngettext() instead',
DeprecationWarning, 2)
+ import locale
try:
tmsg = self._catalog[(msgid1, self.plural(n))]
except KeyError:
@@ -668,6 +672,7 @@ def ldgettext(domain, message):
import warnings
warnings.warn('ldgettext() is deprecated, use dgettext() instead',
DeprecationWarning, 2)
+ import locale
codeset = _localecodesets.get(domain)
try:
with warnings.catch_warnings():
@@ -695,6 +700,7 @@ def ldngettext(domain, msgid1, msgid2, n):
import warnings
warnings.warn('ldngettext() is deprecated, use dngettext() instead',
DeprecationWarning, 2)
+ import locale
codeset = _localecodesets.get(domain)
try:
with warnings.catch_warnings():
diff --git a/Lib/glob.py b/Lib/glob.py
index 0b3fcc6bbb9af3..0dd2f8be661094 100644
--- a/Lib/glob.py
+++ b/Lib/glob.py
@@ -31,6 +31,7 @@ def iglob(pathname, *, recursive=False):
If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories.
"""
+ sys.audit("glob.glob", pathname, recursive)
it = _iglob(pathname, recursive, False)
if recursive and _isrecursive(pathname):
s = next(it) # skip empty string
@@ -38,7 +39,6 @@ def iglob(pathname, *, recursive=False):
return it
def _iglob(pathname, recursive, dironly):
- sys.audit("glob.glob", pathname, recursive)
dirname, basename = os.path.split(pathname)
if not has_magic(pathname):
assert not dironly
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
index 56873b7278b6af..1b6e50247c1815 100644
--- a/Lib/hashlib.py
+++ b/Lib/hashlib.py
@@ -71,8 +71,6 @@
__builtin_constructor_cache = {}
__block_openssl_constructor = {
- 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512',
- 'shake_128', 'shake_256',
'blake2b', 'blake2s',
}
@@ -125,10 +123,13 @@ def __get_openssl_constructor(name):
# Prefer our blake2 and sha3 implementation.
return __get_builtin_constructor(name)
try:
+ # MD5, SHA1, and SHA2 are in all supported OpenSSL versions
+ # SHA3/shake are available in OpenSSL 1.1.1+
f = getattr(_hashlib, 'openssl_' + name)
# Allow the C module to raise ValueError. The function will be
- # defined but the hash not actually available thanks to OpenSSL.
- f()
+ # defined but the hash not actually available. Don't fall back to
+ # builtin if the current security policy blocks a digest, bpo#40695.
+ f(usedforsecurity=False)
# Use the C function directly (very fast)
return f
except (AttributeError, ValueError):
@@ -154,7 +155,7 @@ def __hash_new(name, data=b'', **kwargs):
# salt, personal, tree hashing or SSE.
return __get_builtin_constructor(name)(data, **kwargs)
try:
- return _hashlib.new(name, data)
+ return _hashlib.new(name, data, **kwargs)
except ValueError:
# If the _hashlib module (OpenSSL) doesn't support the named
# hash, try using our builtin implementations.
diff --git a/Lib/hmac.py b/Lib/hmac.py
index b769876e6f7745..180bc378b52d62 100644
--- a/Lib/hmac.py
+++ b/Lib/hmac.py
@@ -4,14 +4,15 @@
"""
import warnings as _warnings
-from _operator import _compare_digest as compare_digest
try:
import _hashlib as _hashopenssl
except ImportError:
_hashopenssl = None
_openssl_md_meths = None
+ from _operator import _compare_digest as compare_digest
else:
_openssl_md_meths = frozenset(_hashopenssl.openssl_md_meth_names)
+ compare_digest = _hashopenssl.compare_digest
import hashlib as _hashlib
trans_5C = bytes((x ^ 0x5C) for x in range(256))
@@ -30,6 +31,10 @@ class HMAC:
"""
blocksize = 64 # 512-bit HMAC; can be changed in subclasses.
+ __slots__ = (
+ "_digest_cons", "_inner", "_outer", "block_size", "digest_size"
+ )
+
def __init__(self, key, msg=None, digestmod=''):
"""Create a new HMAC object.
@@ -51,18 +56,18 @@ def __init__(self, key, msg=None, digestmod=''):
raise TypeError("Missing required parameter 'digestmod'.")
if callable(digestmod):
- self.digest_cons = digestmod
+ self._digest_cons = digestmod
elif isinstance(digestmod, str):
- self.digest_cons = lambda d=b'': _hashlib.new(digestmod, d)
+ self._digest_cons = lambda d=b'': _hashlib.new(digestmod, d)
else:
- self.digest_cons = lambda d=b'': digestmod.new(d)
+ self._digest_cons = lambda d=b'': digestmod.new(d)
- self.outer = self.digest_cons()
- self.inner = self.digest_cons()
- self.digest_size = self.inner.digest_size
+ self._outer = self._digest_cons()
+ self._inner = self._digest_cons()
+ self.digest_size = self._inner.digest_size
- if hasattr(self.inner, 'block_size'):
- blocksize = self.inner.block_size
+ if hasattr(self._inner, 'block_size'):
+ blocksize = self._inner.block_size
if blocksize < 16:
_warnings.warn('block_size of %d seems too small; using our '
'default of %d.' % (blocksize, self.blocksize),
@@ -79,21 +84,33 @@ def __init__(self, key, msg=None, digestmod=''):
self.block_size = blocksize
if len(key) > blocksize:
- key = self.digest_cons(key).digest()
+ key = self._digest_cons(key).digest()
key = key.ljust(blocksize, b'\0')
- self.outer.update(key.translate(trans_5C))
- self.inner.update(key.translate(trans_36))
+ self._outer.update(key.translate(trans_5C))
+ self._inner.update(key.translate(trans_36))
if msg is not None:
self.update(msg)
@property
def name(self):
- return "hmac-" + self.inner.name
+ return "hmac-" + self._inner.name
+
+ @property
+ def digest_cons(self):
+ return self._digest_cons
+
+ @property
+ def inner(self):
+ return self._inner
+
+ @property
+ def outer(self):
+ return self._outer
def update(self, msg):
"""Feed data from msg into this hashing object."""
- self.inner.update(msg)
+ self._inner.update(msg)
def copy(self):
"""Return a separate copy of this hashing object.
@@ -102,10 +119,10 @@ def copy(self):
"""
# Call __new__ directly to avoid the expensive __init__.
other = self.__class__.__new__(self.__class__)
- other.digest_cons = self.digest_cons
+ other._digest_cons = self._digest_cons
other.digest_size = self.digest_size
- other.inner = self.inner.copy()
- other.outer = self.outer.copy()
+ other._inner = self._inner.copy()
+ other._outer = self._outer.copy()
return other
def _current(self):
@@ -113,8 +130,8 @@ def _current(self):
To be used only internally with digest() and hexdigest().
"""
- h = self.outer.copy()
- h.update(self.inner.digest())
+ h = self._outer.copy()
+ h.update(self._inner.digest())
return h
def digest(self):
diff --git a/Lib/http/__init__.py b/Lib/http/__init__.py
index 350afe77926b02..37be765349ea00 100644
--- a/Lib/http/__init__.py
+++ b/Lib/http/__init__.py
@@ -17,6 +17,9 @@ class HTTPStatus(IntEnum):
* RFC 2774: An HTTP Extension Framework
* RFC 7725: An HTTP Status Code to Report Legal Obstacles
* RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2)
+ * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0)
+ * RFC 8297: An HTTP Status Code for Indicating Hints
+ * RFC 8470: Using Early Data in HTTP
"""
def __new__(cls, value, phrase, description=''):
obj = int.__new__(cls, value)
@@ -31,6 +34,7 @@ def __new__(cls, value, phrase, description=''):
SWITCHING_PROTOCOLS = (101, 'Switching Protocols',
'Switching to new protocol; obey Upgrade header')
PROCESSING = 102, 'Processing'
+ EARLY_HINTS = 103, 'Early Hints'
# success
OK = 200, 'OK', 'Request fulfilled, document follows'
@@ -100,11 +104,14 @@ def __new__(cls, value, phrase, description=''):
'Cannot satisfy request range')
EXPECTATION_FAILED = (417, 'Expectation Failed',
'Expect condition could not be satisfied')
+ IM_A_TEAPOT = (418, 'I\'m a Teapot',
+ 'Server refuses to brew coffee because it is a teapot.')
MISDIRECTED_REQUEST = (421, 'Misdirected Request',
'Server is not able to produce a response')
UNPROCESSABLE_ENTITY = 422, 'Unprocessable Entity'
LOCKED = 423, 'Locked'
FAILED_DEPENDENCY = 424, 'Failed Dependency'
+ TOO_EARLY = 425, 'Too Early'
UPGRADE_REQUIRED = 426, 'Upgrade Required'
PRECONDITION_REQUIRED = (428, 'Precondition Required',
'The origin server requires the request to be conditional')
diff --git a/Lib/http/client.py b/Lib/http/client.py
index 33a434733f8a46..019380a7203181 100644
--- a/Lib/http/client.py
+++ b/Lib/http/client.py
@@ -828,6 +828,8 @@ def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
(self.host, self.port) = self._get_hostport(host, port)
+ self._validate_host(self.host)
+
# This is stored as an instance variable to allow unit
# tests to replace it with a suitable mockup
self._create_connection = socket.create_connection
@@ -1183,6 +1185,14 @@ def _validate_path(self, url):
raise InvalidURL(f"URL can't contain control characters. {url!r} "
f"(found at least {match.group()!r})")
+ def _validate_host(self, host):
+ """Validate a host so it doesn't contain control characters."""
+ # Prevent CVE-2019-18348.
+ match = _contains_disallowed_url_pchar_re.search(host)
+ if match:
+ raise InvalidURL(f"URL can't contain control characters. {host!r} "
+ f"(found at least {match.group()!r})")
+
def putheader(self, header, *values):
"""Send a request header line to the server.
diff --git a/Lib/http/cookies.py b/Lib/http/cookies.py
index 6694f5478bdadf..35ac2dc6ae280c 100644
--- a/Lib/http/cookies.py
+++ b/Lib/http/cookies.py
@@ -131,6 +131,7 @@
#
import re
import string
+import types
__all__ = ["CookieError", "BaseCookie", "SimpleCookie"]
@@ -419,6 +420,8 @@ def OutputString(self, attrs=None):
# Return the result
return _semispacejoin(result)
+ __class_getitem__ = classmethod(types.GenericAlias)
+
#
# Pattern for finding cookie
diff --git a/Lib/http/server.py b/Lib/http/server.py
index 2d74b95586cff4..fa204fbc15e3d7 100644
--- a/Lib/http/server.py
+++ b/Lib/http/server.py
@@ -1164,8 +1164,9 @@ def run_cgi(self):
while select.select([self.rfile], [], [], 0)[0]:
if not self.rfile.read(1):
break
- if sts:
- self.log_error("CGI script exit status %#x", sts)
+ exitcode = os.waitstatus_to_exitcode(sts)
+ if exitcode:
+ self.log_error(f"CGI script exit code {exitcode}")
return
# Child
try:
diff --git a/Lib/idlelib/Icons/README.txt b/Lib/idlelib/Icons/README.txt
new file mode 100644
index 00000000000000..8b471629ecb3ea
--- /dev/null
+++ b/Lib/idlelib/Icons/README.txt
@@ -0,0 +1,9 @@
+The IDLE icons are from https://bugs.python.org/issue1490384
+
+Created by Andrew Clover.
+
+The original sources are available from Andrew's website:
+https://www.doxdesk.com/software/py/pyicons.html
+
+Various different formats and sizes are available at this GitHub Pull Request:
+https://github.com/python/cpython/pull/17473
diff --git a/Lib/idlelib/Icons/idle.icns b/Lib/idlelib/Icons/idle.icns
deleted file mode 100644
index f65e3130f0afff..00000000000000
Binary files a/Lib/idlelib/Icons/idle.icns and /dev/null differ
diff --git a/Lib/idlelib/Icons/idle_256.png b/Lib/idlelib/Icons/idle_256.png
new file mode 100644
index 00000000000000..99ffa6fad4a92d
Binary files /dev/null and b/Lib/idlelib/Icons/idle_256.png differ
diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt
index d57ba7e6bac908..709008f78a571d 100644
--- a/Lib/idlelib/NEWS.txt
+++ b/Lib/idlelib/NEWS.txt
@@ -3,6 +3,27 @@ Released on 2020-10-05?
======================================
+bpo-40723: Make test_idle pass when run after import.
+Patch by Florian Dahlitz.
+
+bpo-38689: IDLE will no longer freeze when inspect.signature fails
+when fetching a calltip.
+
+bpo-27115: For 'Go to Line', use a Query entry box subclass with
+IDLE standard behavior and improved error checking.
+
+bpo-39885: Since clicking to get an IDLE context menu moves the
+cursor, any text selection should be and now is cleared.
+
+bpo-39852: Edit "Go to line" now clears any selection, preventing
+accidental deletion. It also updates Ln and Col on the status bar.
+
+bpo-39781: Selecting code context lines no longer causes a jump.
+
+bpo-39663: Add tests for pyparse find_good_parse_start().
+
+bpo-39600: Remove duplicate font names from configuration list.
+
bpo-38792: Close a shell calltip if a :exc:`KeyboardInterrupt`
or shell restart occurs. Patch by Zackery Spytz.
diff --git a/Lib/idlelib/autocomplete_w.py b/Lib/idlelib/autocomplete_w.py
index 0643c092c6e548..fe7a6be83d586b 100644
--- a/Lib/idlelib/autocomplete_w.py
+++ b/Lib/idlelib/autocomplete_w.py
@@ -4,7 +4,7 @@
import platform
from tkinter import *
-from tkinter.ttk import Frame, Scrollbar
+from tkinter.ttk import Scrollbar
from idlelib.autocomplete import FILES, ATTRS
from idlelib.multicall import MC_SHIFT
diff --git a/Lib/idlelib/calltip.py b/Lib/idlelib/calltip.py
index 2e0db60d476aef..d4092c7847186b 100644
--- a/Lib/idlelib/calltip.py
+++ b/Lib/idlelib/calltip.py
@@ -129,20 +129,22 @@ def get_argspec(ob):
empty line or _MAX_LINES. For builtins, this typically includes
the arguments in addition to the return value.
'''
- argspec = default = ""
+ # Determine function object fob to inspect.
try:
ob_call = ob.__call__
- except BaseException:
- return default
-
+ except BaseException: # Buggy user object could raise anything.
+ return '' # No popup for non-callables.
fob = ob_call if isinstance(ob_call, types.MethodType) else ob
+ # Initialize argspec and wrap it to get lines.
try:
argspec = str(inspect.signature(fob))
- except ValueError as err:
+ except Exception as err:
msg = str(err)
if msg.startswith(_invalid_method):
return _invalid_method
+ else:
+ argspec = ''
if '/' in argspec and len(argspec) < _MAX_COLS - len(_argument_positional):
# Add explanation TODO remove after 3.7, before 3.9.
@@ -154,6 +156,7 @@ def get_argspec(ob):
lines = (textwrap.wrap(argspec, _MAX_COLS, subsequent_indent=_INDENT)
if len(argspec) > _MAX_COLS else [argspec] if argspec else [])
+ # Augment lines from docstring, if any, and join to get argspec.
if isinstance(ob_call, types.MethodType):
doc = ob_call.__doc__
else:
@@ -167,9 +170,8 @@ def get_argspec(ob):
line = line[: _MAX_COLS - 3] + '...'
lines.append(line)
argspec = '\n'.join(lines)
- if not argspec:
- argspec = _default_callable_argspec
- return argspec
+
+ return argspec or _default_callable_argspec
if __name__ == '__main__':
diff --git a/Lib/idlelib/codecontext.py b/Lib/idlelib/codecontext.py
index 4ce98136fe4175..989b30e5994650 100644
--- a/Lib/idlelib/codecontext.py
+++ b/Lib/idlelib/codecontext.py
@@ -7,7 +7,6 @@
enclosing block. The number of hint lines is determined by the maxlines
variable in the codecontext section of config-extensions.def. Lines which do
not open blocks are not shown in the context hints pane.
-
"""
import re
from sys import maxsize as INFINITY
@@ -17,8 +16,8 @@
from idlelib.config import idleConf
-BLOCKOPENERS = {"class", "def", "elif", "else", "except", "finally", "for",
- "if", "try", "while", "with", "async"}
+BLOCKOPENERS = {'class', 'def', 'if', 'elif', 'else', 'while', 'for',
+ 'try', 'except', 'finally', 'with', 'async'}
def get_spaces_firstword(codeline, c=re.compile(r"^(\s*)(\w*)")):
@@ -84,7 +83,7 @@ def __del__(self):
if self.t1 is not None:
try:
self.text.after_cancel(self.t1)
- except tkinter.TclError:
+ except tkinter.TclError: # pragma: no cover
pass
self.t1 = None
@@ -112,7 +111,7 @@ def toggle_code_context_event(self, event=None):
padx += widget.tk.getint(info['padx'])
padx += widget.tk.getint(widget.cget('padx'))
border += widget.tk.getint(widget.cget('border'))
- self.context = tkinter.Text(
+ context = self.context = tkinter.Text(
self.editwin.text_frame,
height=1,
width=1, # Don't request more than we get.
@@ -120,11 +119,11 @@ def toggle_code_context_event(self, event=None):
padx=padx, border=border, relief=SUNKEN, state='disabled')
self.update_font()
self.update_highlight_colors()
- self.context.bind('', self.jumptoline)
+ context.bind('', self.jumptoline)
# Get the current context and initiate the recurring update event.
self.timer_event()
# Grid the context widget above the text widget.
- self.context.grid(row=0, column=1, sticky=NSEW)
+ context.grid(row=0, column=1, sticky=NSEW)
line_number_colors = idleConf.GetHighlight(idleConf.CurrentTheme(),
'linenumber')
@@ -215,18 +214,25 @@ def update_code_context(self):
self.context['state'] = 'disabled'
def jumptoline(self, event=None):
- "Show clicked context line at top of editor."
- lines = len(self.info)
- if lines == 1: # No context lines are showing.
- newtop = 1
- else:
- # Line number clicked.
- contextline = int(float(self.context.index('insert')))
- # Lines not displayed due to maxlines.
- offset = max(1, lines - self.context_depth) - 1
- newtop = self.info[offset + contextline][0]
- self.text.yview(f'{newtop}.0')
- self.update_code_context()
+ """ Show clicked context line at top of editor.
+
+ If a selection was made, don't jump; allow copying.
+ If no visible context, show the top line of the file.
+ """
+ try:
+ self.context.index("sel.first")
+ except tkinter.TclError:
+ lines = len(self.info)
+ if lines == 1: # No context lines are showing.
+ newtop = 1
+ else:
+ # Line number clicked.
+ contextline = int(float(self.context.index('insert')))
+ # Lines not displayed due to maxlines.
+ offset = max(1, lines - self.context_depth) - 1
+ newtop = self.info[offset + contextline][0]
+ self.text.yview(f'{newtop}.0')
+ self.update_code_context()
def timer_event(self):
"Event on editor text widget triggered every UPDATEINTERVAL ms."
diff --git a/Lib/idlelib/config_key.py b/Lib/idlelib/config_key.py
index 4478323fcc2c51..7510aa9f3d8786 100644
--- a/Lib/idlelib/config_key.py
+++ b/Lib/idlelib/config_key.py
@@ -1,7 +1,7 @@
"""
Dialog for building Tkinter accelerator key bindings
"""
-from tkinter import Toplevel, Listbox, Text, StringVar, TclError
+from tkinter import Toplevel, Listbox, StringVar, TclError
from tkinter.ttk import Frame, Button, Checkbutton, Entry, Label, Scrollbar
from tkinter import messagebox
import string
diff --git a/Lib/idlelib/configdialog.py b/Lib/idlelib/configdialog.py
index 22359735874d19..82596498d34611 100644
--- a/Lib/idlelib/configdialog.py
+++ b/Lib/idlelib/configdialog.py
@@ -11,7 +11,7 @@
"""
import re
-from tkinter import (Toplevel, Listbox, Text, Scale, Canvas,
+from tkinter import (Toplevel, Listbox, Scale, Canvas,
StringVar, BooleanVar, IntVar, TRUE, FALSE,
TOP, BOTTOM, RIGHT, LEFT, SOLID, GROOVE,
NONE, BOTH, X, Y, W, E, EW, NS, NSEW, NW,
@@ -606,9 +606,8 @@ def load_font_cfg(self):
font_size = configured_font[1]
font_bold = configured_font[2]=='bold'
- # Set editor font selection list and font_name.
- fonts = list(tkFont.families(self))
- fonts.sort()
+ # Set sorted no-duplicate editor font selection list and font_name.
+ fonts = sorted(set(tkFont.families(self)))
for font in fonts:
self.fontlist.insert(END, font)
self.font_name.set(font_name)
diff --git a/Lib/idlelib/editor.py b/Lib/idlelib/editor.py
index 04c786dc5234c2..b0f88b5463d1b6 100644
--- a/Lib/idlelib/editor.py
+++ b/Lib/idlelib/editor.py
@@ -499,6 +499,7 @@ def handle_yview(self, event, *args):
rmenu = None
def right_menu_event(self, event):
+ self.text.tag_remove("sel", "1.0", "end")
self.text.mark_set("insert", "@%d,%d" % (event.x, event.y))
if not self.rmenu:
self.make_rmenu()
@@ -671,15 +672,16 @@ def replace_event(self, event):
def goto_line_event(self, event):
text = self.text
- lineno = tkSimpleDialog.askinteger("Goto",
- "Go to line number:",parent=text)
- if lineno is None:
- return "break"
- if lineno <= 0:
- text.bell()
- return "break"
- text.mark_set("insert", "%d.0" % lineno)
- text.see("insert")
+ lineno = query.Goto(
+ text, "Go To Line",
+ "Enter a positive integer\n"
+ "('big' = end of file):"
+ ).result
+ if lineno is not None:
+ text.tag_remove("sel", "1.0", "end")
+ text.mark_set("insert", f'{lineno}.0')
+ text.see("insert")
+ self.set_line_and_column()
return "break"
def open_module(self):
diff --git a/Lib/idlelib/help.html b/Lib/idlelib/help.html
index 0b2bdd2e174ccf..424c6b50f339e1 100644
--- a/Lib/idlelib/help.html
+++ b/Lib/idlelib/help.html
@@ -4,7 +4,7 @@
- IDLE — Python 3.9.0a1 documentation
+ IDLE — Python 3.9.0a4 documentation
@@ -17,7 +17,7 @@
@@ -71,7 +71,7 @@