diff --git a/.coveragerc b/.coveragerc index 18bf2f40fe523f..b5d94317e8aa8b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -7,6 +7,11 @@ exclude_lines = # Don't complain if non-runnable code isn't run: if 0: if __name__ == .__main__.: + raise AssertionError\( + + # Empty bodies in protocols or abstract methods + ^\s*def [a-zA-Z0-9_]+\(.*\)(\s*->.*)?:\s*\.\.\.(\s*#.*)?$ + ^\s*\.\.\.(\s*#.*)?$ .*# pragma: no cover .*# pragma: no branch diff --git a/.gitattributes b/.gitattributes index 5d5558da711b17..4a072bc990f0fb 100644 --- a/.gitattributes +++ b/.gitattributes @@ -72,9 +72,12 @@ Doc/library/token-list.inc generated Include/internal/pycore_ast.h generated Include/internal/pycore_ast_state.h generated Include/internal/pycore_opcode.h generated +Include/internal/pycore_opcode_metadata.h generated Include/internal/pycore_*_generated.h generated Include/opcode.h generated +Include/opcode_ids.h generated Include/token.h generated +Lib/_opcode_metadata.py generated Lib/keyword.py generated Lib/test/levenshtein_examples.json generated Lib/test/test_stable_abi_ctypes.py generated diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 882ba9e9c9ebea..578cd71a7bd211 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -69,6 +69,7 @@ Python/traceback.c @iritkatriel # Import (including importlib). **/*import* @brettcannon @ericsnowcurrently @ncoghlan @warsaw +/Python/import.c @kumaraditya303 **/*importlib/resources/* @jaraco @warsaw @FFY00 **/importlib/metadata/* @jaraco @warsaw diff --git a/.github/ISSUE_TEMPLATE/bug.md b/.github/ISSUE_TEMPLATE/bug.md index 1d93e0735e50f3..47037cd319e7e2 100644 --- a/.github/ISSUE_TEMPLATE/bug.md +++ b/.github/ISSUE_TEMPLATE/bug.md @@ -5,28 +5,46 @@ labels: "type-bug" --- # Bug report -A clear and concise description of what the bug is. -Include a minimal, reproducible example (https://stackoverflow.com/help/minimal-reproducible-example), if possible. +## Checklist + + + +- [ ] I am confident this is a bug in CPython, not a bug in a third-party project +- [ ] I have searched the CPython issue tracker, and am confident this bug has not been reported before + +## A clear and concise description of the bug + + + + # Your environment - + - CPython versions tested on: - Operating system and architecture: diff --git a/.github/ISSUE_TEMPLATE/crash.md b/.github/ISSUE_TEMPLATE/crash.md index dad3423db03410..a268249d1c1e65 100644 --- a/.github/ISSUE_TEMPLATE/crash.md +++ b/.github/ISSUE_TEMPLATE/crash.md @@ -5,29 +5,44 @@ labels: "type-crash" --- # Crash report -Tell us what happened, ideally including a minimal, reproducible example (https://stackoverflow.com/help/minimal-reproducible-example). + + + # Error messages -Enter any relevant error message caused by the crash, including a core dump if there is one. + + + # Your environment - + - CPython versions tested on: - Operating system and architecture: diff --git a/.github/ISSUE_TEMPLATE/feature.md b/.github/ISSUE_TEMPLATE/feature.md index ed051e945f8120..7e96bc9df665c2 100644 --- a/.github/ISSUE_TEMPLATE/feature.md +++ b/.github/ISSUE_TEMPLATE/feature.md @@ -4,25 +4,47 @@ about: Submit a proposal for a new CPython feature or enhancement labels: "type-feature" --- + + # Feature or enhancement -(A clear and concise description of your proposal.) + + + # Pitch -(Explain why this feature or enhancement should be implemented and how it would be used. - Add examples, if applicable.) + + + # Previous discussion + diff --git a/.github/workflows/reusable-docs.yml b/.github/workflows/reusable-docs.yml index b39d8cea6421ea..56932c4860573c 100644 --- a/.github/workflows/reusable-docs.yml +++ b/.github/workflows/reusable-docs.yml @@ -28,10 +28,8 @@ jobs: cache-dependency-path: 'Doc/requirements.txt' - name: 'Install build dependencies' run: make -C Doc/ venv - - name: 'Build HTML documentation' - run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html - # Add pull request annotations for Sphinx nitpicks (missing references) + # To annotate PRs with Sphinx nitpicks (missing references) - name: 'Get list of changed files' if: github.event_name == 'pull_request' id: changed_files @@ -39,24 +37,19 @@ jobs: with: filter: "Doc/**" format: csv # works for paths with spaces - - name: 'Build changed files in nit-picky mode' - if: github.event_name == 'pull_request' + - name: 'Build HTML documentation' continue-on-error: true run: | set -Eeuo pipefail - # Mark files the pull request modified - python Doc/tools/touch-clean-files.py --clean '${{ steps.changed_files.outputs.added_modified }}' - # Build docs with the '-n' (nit-picky) option; convert warnings to annotations - make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n --keep-going" html 2>&1 | - python Doc/tools/warnings-to-gh-actions.py - - # Ensure some files always pass Sphinx nit-picky mode (no missing references) - - name: 'Build known-good files in nit-picky mode' + # Build docs with the '-n' (nit-picky) option; write warnings to file + make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n -W --keep-going -w sphinx-warnings.txt" html + - name: 'Check warnings' + if: github.event_name == 'pull_request' run: | - # Mark files that must pass nit-picky - python Doc/tools/touch-clean-files.py - # Build docs with the '-n' (nit-picky) option, convert warnings to errors (-W) - make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n -W --keep-going" html 2>&1 + python Doc/tools/check-warnings.py \ + --check-and-annotate '${{ steps.changed_files.outputs.added_modified }}' \ + --fail-if-regression \ + --fail-if-improved # This build doesn't use problem matchers or check annotations build_doc_oldest_supported_sphinx: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d62c57c044728f..85a6de4abe0146 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -10,7 +10,7 @@ repos: types_or: [c, python, rst] - repo: https://github.com/sphinx-contrib/sphinx-lint - rev: v0.6.7 + rev: v0.6.8 hooks: - id: sphinx-lint args: [--enable=default-role] diff --git a/Doc/c-api/allocation.rst b/Doc/c-api/allocation.rst index 0a8fcc5ae5fcdf..b3609c233156b6 100644 --- a/Doc/c-api/allocation.rst +++ b/Doc/c-api/allocation.rst @@ -27,22 +27,26 @@ Allocating Objects on the Heap length information for a variable-size object. -.. c:function:: TYPE* PyObject_New(TYPE, PyTypeObject *type) +.. c:macro:: PyObject_New(TYPE, typeobj) - Allocate a new Python object using the C structure type *TYPE* and the - Python type object *type*. Fields not defined by the Python object header - are not initialized; the object's reference count will be one. The size of - the memory allocation is determined from the :c:member:`~PyTypeObject.tp_basicsize` field of - the type object. + Allocate a new Python object using the C structure type *TYPE* + and the Python type object *typeobj* (``PyTypeObject*``). + Fields not defined by the Python object header are not initialized. + The caller will own the only reference to the object + (i.e. its reference count will be one). + The size of the memory allocation is determined from the + :c:member:`~PyTypeObject.tp_basicsize` field of the type object. -.. c:function:: TYPE* PyObject_NewVar(TYPE, PyTypeObject *type, Py_ssize_t size) +.. c:macro:: PyObject_NewVar(TYPE, typeobj, size) Allocate a new Python object using the C structure type *TYPE* and the - Python type object *type*. Fields not defined by the Python object header + Python type object *typeobj* (``PyTypeObject*``). + Fields not defined by the Python object header are not initialized. The allocated memory allows for the *TYPE* structure - plus *size* fields of the size given by the :c:member:`~PyTypeObject.tp_itemsize` field of - *type*. This is useful for implementing objects like tuples, which are + plus *size* (``Py_ssize_t``) fields of the size + given by the :c:member:`~PyTypeObject.tp_itemsize` field of + *typeobj*. This is useful for implementing objects like tuples, which are able to determine their size at construction time. Embedding the array of fields into the same allocation decreases the number of allocations, improving the memory management efficiency. @@ -50,8 +54,8 @@ Allocating Objects on the Heap .. c:function:: void PyObject_Del(void *op) - Releases memory allocated to an object using :c:func:`PyObject_New` or - :c:func:`PyObject_NewVar`. This is normally called from the + Releases memory allocated to an object using :c:macro:`PyObject_New` or + :c:macro:`PyObject_NewVar`. This is normally called from the :c:member:`~PyTypeObject.tp_dealloc` handler specified in the object's type. The fields of the object should not be accessed after this call as the memory is no longer a valid Python object. diff --git a/Doc/c-api/apiabiversion.rst b/Doc/c-api/apiabiversion.rst index 62d542966622ce..f6c8284daeacb0 100644 --- a/Doc/c-api/apiabiversion.rst +++ b/Doc/c-api/apiabiversion.rst @@ -60,7 +60,7 @@ See :ref:`stable` for a discussion of API and ABI stability across versions. Use this for numeric comparisons, e.g. ``#if PY_VERSION_HEX >= ...``. - This version is also available via the symbol :data:`Py_Version`. + This version is also available via the symbol :c:var:`Py_Version`. .. c:var:: const unsigned long Py_Version diff --git a/Doc/c-api/arg.rst b/Doc/c-api/arg.rst index d2ea490732fe59..c43dd0f4303cd4 100644 --- a/Doc/c-api/arg.rst +++ b/Doc/c-api/arg.rst @@ -293,8 +293,10 @@ Other objects ``O`` (object) [PyObject \*] Store a Python object (without any conversion) in a C object pointer. The C - program thus receives the actual object that was passed. The object's reference - count is not increased. The pointer stored is not ``NULL``. + program thus receives the actual object that was passed. A new + :term:`strong reference` to the object is not created + (i.e. its reference count is not increased). + The pointer stored is not ``NULL``. ``O!`` (object) [*typeobject*, PyObject \*] Store a Python object in a C object pointer. This is similar to ``O``, but @@ -343,7 +345,7 @@ Other objects *items*. Format units for sequences may be nested. It is possible to pass "long" integers (integers whose value exceeds the -platform's :const:`LONG_MAX`) however no proper range checking is done --- the +platform's :c:macro:`LONG_MAX`) however no proper range checking is done --- the most significant bits are silently truncated when the receiving field is too small to receive the value (actually, the semantics are inherited from downcasts in C --- your mileage may vary). @@ -378,7 +380,8 @@ inside nested parentheses. They are: mutually exclude each other. Note that any Python object references which are provided to the caller are -*borrowed* references; do not decrement their reference count! +*borrowed* references; do not release them +(i.e. do not decrement their reference count)! Additional arguments passed to these functions must be addresses of variables whose type is determined by the format string; these are used to store values @@ -463,7 +466,7 @@ API Functions A simpler form of parameter retrieval which does not use a format string to specify the types of the arguments. Functions which use this method to retrieve - their parameters should be declared as :const:`METH_VARARGS` in function or + their parameters should be declared as :c:macro:`METH_VARARGS` in function or method tables. The tuple containing the actual parameters should be passed as *args*; it must actually be a tuple. The length of the tuple must be at least *min* and no more than *max*; *min* and *max* may be equal. Additional @@ -477,7 +480,7 @@ API Functions will be set if there was a failure. This is an example of the use of this function, taken from the sources for the - :mod:`_weakref` helper module for weak references:: + :mod:`!_weakref` helper module for weak references:: static PyObject * weakref_ref(PyObject *self, PyObject *args) @@ -555,7 +558,7 @@ Building values Same as ``s#``. ``u`` (:class:`str`) [const wchar_t \*] - Convert a null-terminated :c:expr:`wchar_t` buffer of Unicode (UTF-16 or UCS-4) + Convert a null-terminated :c:type:`wchar_t` buffer of Unicode (UTF-16 or UCS-4) data to a Python Unicode object. If the Unicode buffer pointer is ``NULL``, ``None`` is returned. @@ -621,8 +624,10 @@ Building values Convert a C :c:type:`Py_complex` structure to a Python complex number. ``O`` (object) [PyObject \*] - Pass a Python object untouched (except for its reference count, which is - incremented by one). If the object passed in is a ``NULL`` pointer, it is assumed + Pass a Python object untouched but create a new + :term:`strong reference` to it + (i.e. its reference count is incremented by one). + If the object passed in is a ``NULL`` pointer, it is assumed that this was caused because the call producing the argument found an error and set an exception. Therefore, :c:func:`Py_BuildValue` will return ``NULL`` but won't raise an exception. If no exception has been raised yet, :exc:`SystemError` is @@ -632,7 +637,7 @@ Building values Same as ``O``. ``N`` (object) [PyObject \*] - Same as ``O``, except it doesn't increment the reference count on the object. + Same as ``O``, except it doesn't create a new :term:`strong reference`. Useful when the object is created by a call to an object constructor in the argument list. diff --git a/Doc/c-api/bool.rst b/Doc/c-api/bool.rst index b2d8f2124fc203..b14fa6a0a982e2 100644 --- a/Doc/c-api/bool.rst +++ b/Doc/c-api/bool.rst @@ -11,6 +11,12 @@ creation and deletion functions don't apply to booleans. The following macros are available, however. +.. c:var:: PyTypeObject PyBool_Type + + This instance of :c:type:`PyTypeObject` represents the Python boolean type; it + is the same object as :class:`bool` in the Python layer. + + .. c:function:: int PyBool_Check(PyObject *o) Return true if *o* is of type :c:data:`PyBool_Type`. This function always diff --git a/Doc/c-api/buffer.rst b/Doc/c-api/buffer.rst index 91d1edd9b2ec46..ba391a5279f205 100644 --- a/Doc/c-api/buffer.rst +++ b/Doc/c-api/buffer.rst @@ -44,7 +44,7 @@ the elements exposed by an :class:`array.array` can be multi-byte values. An example consumer of the buffer interface is the :meth:`~io.BufferedIOBase.write` method of file objects: any object that can export a series of bytes through -the buffer interface can be written to a file. While :meth:`write` only +the buffer interface can be written to a file. While :meth:`!write` only needs read-only access to the internal contents of the object passed to it, other methods such as :meth:`~io.BufferedIOBase.readinto` need write access to the contents of their argument. The buffer interface allows objects to @@ -102,7 +102,9 @@ a buffer, see :c:func:`PyObject_GetBuffer`. .. c:member:: PyObject *obj A new reference to the exporting object. The reference is owned by - the consumer and automatically decremented and set to ``NULL`` by + the consumer and automatically released + (i.e. reference count decremented) + and set to ``NULL`` by :c:func:`PyBuffer_Release`. The field is the equivalent of the return value of any standard C-API function. @@ -159,10 +161,14 @@ a buffer, see :c:func:`PyObject_GetBuffer`. If it is ``0``, :c:member:`~Py_buffer.buf` points to a single item representing a scalar. In this case, :c:member:`~Py_buffer.shape`, :c:member:`~Py_buffer.strides` and :c:member:`~Py_buffer.suboffsets` MUST be ``NULL``. + The maximum number of dimensions is given by :c:macro:`PyBUF_MAX_NDIM`. - The macro :c:macro:`PyBUF_MAX_NDIM` limits the maximum number of dimensions - to 64. Exporters MUST respect this limit, consumers of multi-dimensional - buffers SHOULD be able to handle up to :c:macro:`PyBUF_MAX_NDIM` dimensions. + .. :c:macro:: PyBUF_MAX_NDIM + + The maximum number of dimensions the memory represents. + Exporters MUST respect this limit, consumers of multi-dimensional + buffers SHOULD be able to handle up to :c:macro:`!PyBUF_MAX_NDIM` dimensions. + Currently set to 64. .. c:member:: Py_ssize_t *shape @@ -225,7 +231,7 @@ object via :c:func:`PyObject_GetBuffer`. Since the complexity of the logical structure of the memory can vary drastically, the consumer uses the *flags* argument to specify the exact buffer type it can handle. -All :c:data:`Py_buffer` fields are unambiguously defined by the request +All :c:type:`Py_buffer` fields are unambiguously defined by the request type. request-independent fields @@ -454,7 +460,8 @@ Buffer-related functions .. c:function:: void PyBuffer_Release(Py_buffer *view) - Release the buffer *view* and decrement the reference count for + Release the buffer *view* and release the :term:`strong reference` + (i.e. decrement the reference count) to the view's supporting object, ``view->obj``. This function MUST be called when the buffer is no longer being used, otherwise reference leaks may occur. @@ -464,7 +471,7 @@ Buffer-related functions .. c:function:: Py_ssize_t PyBuffer_SizeFromFormat(const char *format) - Return the implied :c:data:`~Py_buffer.itemsize` from :c:data:`~Py_buffer.format`. + Return the implied :c:member:`~Py_buffer.itemsize` from :c:member:`~Py_buffer.format`. On error, raise an exception and return -1. .. versionadded:: 3.9 diff --git a/Doc/c-api/bytes.rst b/Doc/c-api/bytes.rst index 9f48f2ffafe170..61a68f52773882 100644 --- a/Doc/c-api/bytes.rst +++ b/Doc/c-api/bytes.rst @@ -64,39 +64,39 @@ called with a non-bytes parameter. +-------------------+---------------+--------------------------------+ | Format Characters | Type | Comment | +===================+===============+================================+ - | :attr:`%%` | *n/a* | The literal % character. | + | ``%%`` | *n/a* | The literal % character. | +-------------------+---------------+--------------------------------+ - | :attr:`%c` | int | A single byte, | + | ``%c`` | int | A single byte, | | | | represented as a C int. | +-------------------+---------------+--------------------------------+ - | :attr:`%d` | int | Equivalent to | + | ``%d`` | int | Equivalent to | | | | ``printf("%d")``. [1]_ | +-------------------+---------------+--------------------------------+ - | :attr:`%u` | unsigned int | Equivalent to | + | ``%u`` | unsigned int | Equivalent to | | | | ``printf("%u")``. [1]_ | +-------------------+---------------+--------------------------------+ - | :attr:`%ld` | long | Equivalent to | + | ``%ld`` | long | Equivalent to | | | | ``printf("%ld")``. [1]_ | +-------------------+---------------+--------------------------------+ - | :attr:`%lu` | unsigned long | Equivalent to | + | ``%lu`` | unsigned long | Equivalent to | | | | ``printf("%lu")``. [1]_ | +-------------------+---------------+--------------------------------+ - | :attr:`%zd` | :c:type:`\ | Equivalent to | + | ``%zd`` | :c:type:`\ | Equivalent to | | | Py_ssize_t` | ``printf("%zd")``. [1]_ | +-------------------+---------------+--------------------------------+ - | :attr:`%zu` | size_t | Equivalent to | + | ``%zu`` | size_t | Equivalent to | | | | ``printf("%zu")``. [1]_ | +-------------------+---------------+--------------------------------+ - | :attr:`%i` | int | Equivalent to | + | ``%i`` | int | Equivalent to | | | | ``printf("%i")``. [1]_ | +-------------------+---------------+--------------------------------+ - | :attr:`%x` | int | Equivalent to | + | ``%x`` | int | Equivalent to | | | | ``printf("%x")``. [1]_ | +-------------------+---------------+--------------------------------+ - | :attr:`%s` | const char\* | A null-terminated C character | + | ``%s`` | const char\* | A null-terminated C character | | | | array. | +-------------------+---------------+--------------------------------+ - | :attr:`%p` | const void\* | The hex representation of a C | + | ``%p`` | const void\* | The hex representation of a C | | | | pointer. Mostly equivalent to | | | | ``printf("%p")`` except that | | | | it is guaranteed to start with | @@ -184,8 +184,8 @@ called with a non-bytes parameter. .. c:function:: void PyBytes_ConcatAndDel(PyObject **bytes, PyObject *newpart) Create a new bytes object in *\*bytes* containing the contents of *newpart* - appended to *bytes*. This version decrements the reference count of - *newpart*. + appended to *bytes*. This version releases the :term:`strong reference` + to *newpart* (i.e. decrements its reference count). .. c:function:: int _PyBytes_Resize(PyObject **bytes, Py_ssize_t newsize) diff --git a/Doc/c-api/call.rst b/Doc/c-api/call.rst index ac6242701c5047..aed4ae44c76eea 100644 --- a/Doc/c-api/call.rst +++ b/Doc/c-api/call.rst @@ -59,12 +59,12 @@ This bears repeating: .. versionchanged:: 3.12 - The :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag is now removed from a class + The :c:macro:`Py_TPFLAGS_HAVE_VECTORCALL` flag is now removed from a class when the class's :py:meth:`~object.__call__` method is reassigned. (This internally sets :c:member:`~PyTypeObject.tp_call` only, and thus may make it behave differently than the vectorcall function.) In earlier Python versions, vectorcall should only be used with - :const:`immutable ` or static types. + :c:macro:`immutable ` or static types. A class should not implement vectorcall if that would be slower than *tp_call*. For example, if the callee needs to convert @@ -72,7 +72,7 @@ the arguments to an args tuple and kwargs dict anyway, then there is no point in implementing vectorcall. Classes can implement the vectorcall protocol by enabling the -:const:`Py_TPFLAGS_HAVE_VECTORCALL` flag and setting +:c:macro:`Py_TPFLAGS_HAVE_VECTORCALL` flag and setting :c:member:`~PyTypeObject.tp_vectorcall_offset` to the offset inside the object structure where a *vectorcallfunc* appears. This is a pointer to a function with the following signature: @@ -84,7 +84,7 @@ This is a pointer to a function with the following signature: values of the keyword arguments. This can be *NULL* if there are no arguments. - *nargsf* is the number of positional arguments plus possibly the - :const:`PY_VECTORCALL_ARGUMENTS_OFFSET` flag. + :c:macro:`PY_VECTORCALL_ARGUMENTS_OFFSET` flag. To get the actual number of positional arguments from *nargsf*, use :c:func:`PyVectorcall_NARGS`. - *kwnames* is a tuple containing the names of the keyword arguments; @@ -93,7 +93,7 @@ This is a pointer to a function with the following signature: and they must be unique. If there are no keyword arguments, then *kwnames* can instead be *NULL*. -.. data:: PY_VECTORCALL_ARGUMENTS_OFFSET +.. c:macro:: PY_VECTORCALL_ARGUMENTS_OFFSET If this flag is set in a vectorcall *nargsf* argument, the callee is allowed to temporarily change ``args[-1]``. In other words, *args* points to @@ -104,7 +104,7 @@ This is a pointer to a function with the following signature: ``args[0]`` may be changed. Whenever they can do so cheaply (without additional allocation), callers - are encouraged to use :const:`PY_VECTORCALL_ARGUMENTS_OFFSET`. + are encouraged to use :c:macro:`PY_VECTORCALL_ARGUMENTS_OFFSET`. Doing so will allow callables such as bound methods to make their onward calls (which include a prepended *self* argument) very efficiently. @@ -152,7 +152,7 @@ Vectorcall Support API This is mostly useful to check whether or not *op* supports vectorcall, which can be done by checking ``PyVectorcall_Function(op) != NULL``. - .. versionadded:: 3.8 + .. versionadded:: 3.9 .. c:function:: PyObject* PyVectorcall_Call(PyObject *callable, PyObject *tuple, PyObject *dict) @@ -161,7 +161,7 @@ Vectorcall Support API This is a specialized function, intended to be put in the :c:member:`~PyTypeObject.tp_call` slot or be used in an implementation of ``tp_call``. - It does not check the :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag + It does not check the :c:macro:`Py_TPFLAGS_HAVE_VECTORCALL` flag and it does not fall back to ``tp_call``. .. versionadded:: 3.8 @@ -379,11 +379,11 @@ please see individual documentation for details. *args[0]*, and the *args* array starting at *args[1]* represents the arguments of the call. There must be at least one positional argument. *nargsf* is the number of positional arguments including *args[0]*, - plus :const:`PY_VECTORCALL_ARGUMENTS_OFFSET` if the value of ``args[0]`` may + plus :c:macro:`PY_VECTORCALL_ARGUMENTS_OFFSET` if the value of ``args[0]`` may temporarily be changed. Keyword arguments can be passed just like in :c:func:`PyObject_Vectorcall`. - If the object has the :const:`Py_TPFLAGS_METHOD_DESCRIPTOR` feature, + If the object has the :c:macro:`Py_TPFLAGS_METHOD_DESCRIPTOR` feature, this will call the unbound method object with the full *args* vector as arguments. diff --git a/Doc/c-api/capsule.rst b/Doc/c-api/capsule.rst index 427ed959c58568..cdb8aa33e9fd32 100644 --- a/Doc/c-api/capsule.rst +++ b/Doc/c-api/capsule.rst @@ -64,7 +64,7 @@ Refer to :ref:`using-capsules` for more information on using these objects. The *name* parameter must compare exactly to the name stored in the capsule. If the name stored in the capsule is ``NULL``, the *name* passed in must also - be ``NULL``. Python uses the C function :c:func:`strcmp` to compare capsule + be ``NULL``. Python uses the C function :c:func:`!strcmp` to compare capsule names. @@ -121,7 +121,7 @@ Refer to :ref:`using-capsules` for more information on using these objects. compared.) In other words, if :c:func:`PyCapsule_IsValid` returns a true value, calls to - any of the accessors (any function starting with :c:func:`PyCapsule_Get`) are + any of the accessors (any function starting with ``PyCapsule_Get``) are guaranteed to succeed. Return a nonzero value if the object is valid and matches the name passed in. diff --git a/Doc/c-api/cell.rst b/Doc/c-api/cell.rst index ac4ef5adc5cc20..f8cd0344fdd1c0 100644 --- a/Doc/c-api/cell.rst +++ b/Doc/c-api/cell.rst @@ -25,7 +25,7 @@ Cell objects are not likely to be useful elsewhere. The type object corresponding to cell objects. -.. c:function:: int PyCell_Check(ob) +.. c:function:: int PyCell_Check(PyObject *ob) Return true if *ob* is a cell object; *ob* must not be ``NULL``. This function always succeeds. diff --git a/Doc/c-api/code.rst b/Doc/c-api/code.rst index a99de9904c0740..5082b0cb6ad3f3 100644 --- a/Doc/c-api/code.rst +++ b/Doc/c-api/code.rst @@ -33,20 +33,20 @@ bound into a function. Return the number of free variables in *co*. -.. c:function:: PyCodeObject* PyUnstable_Code_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, int firstlineno, PyObject *linetable, PyObject *exceptiontable) +.. c:function:: PyCodeObject* PyUnstable_Code_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, PyObject *qualname, int firstlineno, PyObject *linetable, PyObject *exceptiontable) Return a new code object. If you need a dummy code object to create a frame, use :c:func:`PyCode_NewEmpty` instead. Since the definition of the bytecode changes often, calling - :c:func:`PyCode_New` directly can bind you to a precise Python version. + :c:func:`PyUnstable_Code_New` directly can bind you to a precise Python version. The many arguments of this function are inter-dependent in complex ways, meaning that subtle changes to values are likely to result in incorrect execution or VM crashes. Use this function only with extreme care. .. versionchanged:: 3.11 - Added ``exceptiontable`` parameter. + Added ``qualname`` and ``exceptiontable`` parameters. .. index:: single: PyCode_New @@ -56,17 +56,17 @@ bound into a function. The old name is deprecated, but will remain available until the signature changes again. -.. c:function:: PyCodeObject* PyUnstable_Code_NewWithPosOnlyArgs(int argcount, int posonlyargcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, int firstlineno, PyObject *linetable, PyObject *exceptiontable) +.. c:function:: PyCodeObject* PyUnstable_Code_NewWithPosOnlyArgs(int argcount, int posonlyargcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, PyObject *qualname, int firstlineno, PyObject *linetable, PyObject *exceptiontable) - Similar to :c:func:`PyCode_New`, but with an extra "posonlyargcount" for positional-only arguments. - The same caveats that apply to ``PyCode_New`` also apply to this function. + Similar to :c:func:`PyUnstable_Code_New`, but with an extra "posonlyargcount" for positional-only arguments. + The same caveats that apply to ``PyUnstable_Code_New`` also apply to this function. .. index:: single: PyCode_NewWithPosOnlyArgs .. versionadded:: 3.8 as ``PyCode_NewWithPosOnlyArgs`` .. versionchanged:: 3.11 - Added ``exceptiontable`` parameter. + Added ``qualname`` and ``exceptiontable`` parameters. .. versionchanged:: 3.12 diff --git a/Doc/c-api/codec.rst b/Doc/c-api/codec.rst index 235c77c945cc5b..8ae5c4fecd6248 100644 --- a/Doc/c-api/codec.rst +++ b/Doc/c-api/codec.rst @@ -7,7 +7,7 @@ Codec registry and support functions Register a new codec search function. - As side effect, this tries to load the :mod:`encodings` package, if not yet + As side effect, this tries to load the :mod:`!encodings` package, if not yet done, to make sure that it is always first in the list of search functions. .. c:function:: int PyCodec_Unregister(PyObject *search_function) diff --git a/Doc/c-api/complex.rst b/Doc/c-api/complex.rst index cb8b270fcbab6e..e3fd001c599c80 100644 --- a/Doc/c-api/complex.rst +++ b/Doc/c-api/complex.rst @@ -64,7 +64,7 @@ pointers. This is consistent throughout the API. representation. If *divisor* is null, this method returns zero and sets - :c:data:`errno` to :c:data:`EDOM`. + :c:data:`errno` to :c:macro:`!EDOM`. .. c:function:: Py_complex _Py_c_pow(Py_complex num, Py_complex exp) @@ -73,7 +73,7 @@ pointers. This is consistent throughout the API. representation. If *num* is null and *exp* is not a positive real number, - this method returns zero and sets :c:data:`errno` to :c:data:`EDOM`. + this method returns zero and sets :c:data:`errno` to :c:macro:`!EDOM`. Complex Numbers as Python Objects diff --git a/Doc/c-api/conversion.rst b/Doc/c-api/conversion.rst index fdb321fe7ab3f2..c5350123dfdfdc 100644 --- a/Doc/c-api/conversion.rst +++ b/Doc/c-api/conversion.rst @@ -119,10 +119,10 @@ The following functions provide locale-independent string to number conversions. .. c:function:: int PyOS_stricmp(const char *s1, const char *s2) Case insensitive comparison of strings. The function works almost - identically to :c:func:`strcmp` except that it ignores the case. + identically to :c:func:`!strcmp` except that it ignores the case. .. c:function:: int PyOS_strnicmp(const char *s1, const char *s2, Py_ssize_t size) Case insensitive comparison of strings. The function works almost - identically to :c:func:`strncmp` except that it ignores the case. + identically to :c:func:`!strncmp` except that it ignores the case. diff --git a/Doc/c-api/dict.rst b/Doc/c-api/dict.rst index bd0c36a217e2ce..e4c1d71a413a68 100644 --- a/Doc/c-api/dict.rst +++ b/Doc/c-api/dict.rst @@ -93,10 +93,26 @@ Dictionary Objects Return ``0`` on success or ``-1`` on failure. +.. c:function:: int PyDict_GetItemRef(PyObject *p, PyObject *key, PyObject **result) + + Return a new :term:`strong reference` to the object from dictionary *p* + which has a key *key*: + + * If the key is present, set *\*result* to a new :term:`strong reference` + to the value and return ``1``. + * If the key is missing, set *\*result* to ``NULL`` and return ``0``. + * On error, raise an exception and return ``-1``. + + .. versionadded:: 3.13 + + See also the :c:func:`PyObject_GetItem` function. + + .. c:function:: PyObject* PyDict_GetItem(PyObject *p, PyObject *key) - Return the object from dictionary *p* which has a key *key*. Return ``NULL`` - if the key *key* is not present, but *without* setting an exception. + Return a :term:`borrowed reference` to the object from dictionary *p* which + has a key *key*. Return ``NULL`` if the key *key* is missing *without* + setting an exception. .. note:: @@ -131,6 +147,14 @@ Dictionary Objects :c:func:`PyUnicode_FromString` *key* instead. +.. c:function:: int PyDict_GetItemStringRef(PyObject *p, const char *key, PyObject **result) + + Similar than :c:func:`PyDict_GetItemRef`, but *key* is specified as a + :c:expr:`const char*`, rather than a :c:expr:`PyObject*`. + + .. versionadded:: 3.13 + + .. c:function:: PyObject* PyDict_SetDefault(PyObject *p, PyObject *key, PyObject *defaultobj) This is the same as the Python-level :meth:`dict.setdefault`. If present, it diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst index a24ecac861e76b..f1d6c995188abb 100644 --- a/Doc/c-api/exceptions.rst +++ b/Doc/c-api/exceptions.rst @@ -83,7 +83,7 @@ Printing and clearing This utility function prints a warning message to ``sys.stderr`` when an exception has been set but it is impossible for the interpreter to actually raise the exception. It is used, for example, when an exception occurs in an - :meth:`__del__` method. + :meth:`~object.__del__` method. The function is called with a single argument *obj* that identifies the context in which the unraisable exception occurred. If possible, @@ -110,7 +110,8 @@ For convenience, some of these functions will always return a This is the most common way to set the error indicator. The first argument specifies the exception type; it is normally one of the standard exceptions, - e.g. :c:data:`PyExc_RuntimeError`. You need not increment its reference count. + e.g. :c:data:`PyExc_RuntimeError`. You need not create a new + :term:`strong reference` to it (e.g. with :c:func:`Py_INCREF`). The second argument is an error message; it is decoded from ``'utf-8'``. @@ -163,9 +164,9 @@ For convenience, some of these functions will always return a This is a convenience function to raise an exception when a C library function has returned an error and set the C variable :c:data:`errno`. It constructs a tuple object whose first item is the integer :c:data:`errno` value and whose - second item is the corresponding error message (gotten from :c:func:`strerror`), + second item is the corresponding error message (gotten from :c:func:`!strerror`), and then calls ``PyErr_SetObject(type, object)``. On Unix, when the - :c:data:`errno` value is :const:`EINTR`, indicating an interrupted system call, + :c:data:`errno` value is :c:macro:`!EINTR`, indicating an interrupted system call, this calls :c:func:`PyErr_CheckSignals`, and if that set the error indicator, leaves it set to that. The function always returns ``NULL``, so a wrapper function around a system call can write ``return PyErr_SetFromErrno(type);`` @@ -177,7 +178,7 @@ For convenience, some of these functions will always return a Similar to :c:func:`PyErr_SetFromErrno`, with the additional behavior that if *filenameObject* is not ``NULL``, it is passed to the constructor of *type* as a third parameter. In the case of :exc:`OSError` exception, - this is used to define the :attr:`filename` attribute of the + this is used to define the :attr:`!filename` attribute of the exception instance. @@ -200,12 +201,12 @@ For convenience, some of these functions will always return a .. c:function:: PyObject* PyErr_SetFromWindowsErr(int ierr) This is a convenience function to raise :exc:`WindowsError`. If called with - *ierr* of ``0``, the error code returned by a call to :c:func:`GetLastError` - is used instead. It calls the Win32 function :c:func:`FormatMessage` to retrieve - the Windows description of error code given by *ierr* or :c:func:`GetLastError`, + *ierr* of ``0``, the error code returned by a call to :c:func:`!GetLastError` + is used instead. It calls the Win32 function :c:func:`!FormatMessage` to retrieve + the Windows description of error code given by *ierr* or :c:func:`!GetLastError`, then it constructs a tuple object whose first item is the *ierr* value and whose second item is the corresponding error message (gotten from - :c:func:`FormatMessage`), and then calls ``PyErr_SetObject(PyExc_WindowsError, + :c:func:`!FormatMessage`), and then calls ``PyErr_SetObject(PyExc_WindowsError, object)``. This function always returns ``NULL``. .. availability:: Windows. @@ -631,7 +632,7 @@ Signal Handling be interruptible by user requests (such as by pressing Ctrl-C). .. note:: - The default Python signal handler for :const:`SIGINT` raises the + The default Python signal handler for :c:macro:`!SIGINT` raises the :exc:`KeyboardInterrupt` exception. @@ -642,7 +643,7 @@ Signal Handling single: SIGINT single: KeyboardInterrupt (built-in exception) - Simulate the effect of a :const:`SIGINT` signal arriving. + Simulate the effect of a :c:macro:`!SIGINT` signal arriving. This is equivalent to ``PyErr_SetInterruptEx(SIGINT)``. .. note:: @@ -666,7 +667,7 @@ Signal Handling to interrupt an operation). If the given signal isn't handled by Python (it was set to - :data:`signal.SIG_DFL` or :data:`signal.SIG_IGN`), it will be ignored. + :py:const:`signal.SIG_DFL` or :py:const:`signal.SIG_IGN`), it will be ignored. If *signum* is outside of the allowed range of signal numbers, ``-1`` is returned. Otherwise, ``0`` is returned. The error indicator is @@ -754,7 +755,7 @@ Exception Objects .. c:function:: PyObject* PyException_GetCause(PyObject *ex) - Return the cause (either an exception instance, or :const:`None`, + Return the cause (either an exception instance, or ``None``, set by ``raise ... from ...``) associated with the exception as a new reference, as accessible from Python through :attr:`__cause__`. @@ -763,7 +764,7 @@ Exception Objects Set the cause associated with the exception to *cause*. Use ``NULL`` to clear it. There is no type check to make sure that *cause* is either an exception - instance or :const:`None`. This steals a reference to *cause*. + instance or ``None``. This steals a reference to *cause*. :attr:`__suppress_context__` is implicitly set to ``True`` by this function. @@ -874,7 +875,7 @@ because the :ref:`call protocol ` takes care of recursion handling. Marks a point where a recursive C-level call is about to be performed. - If :const:`USE_STACKCHECK` is defined, this function checks if the OS + If :c:macro:`USE_STACKCHECK` is defined, this function checks if the OS stack overflowed using :c:func:`PyOS_CheckStack`. In this is the case, it sets a :exc:`MemoryError` and returns a nonzero value. diff --git a/Doc/c-api/file.rst b/Doc/c-api/file.rst index f32ecba9f27029..b36c800e00444a 100644 --- a/Doc/c-api/file.rst +++ b/Doc/c-api/file.rst @@ -93,7 +93,7 @@ the :mod:`io` APIs instead. .. index:: single: Py_PRINT_RAW Write object *obj* to file object *p*. The only supported flag for *flags* is - :const:`Py_PRINT_RAW`; if given, the :func:`str` of the object is written + :c:macro:`Py_PRINT_RAW`; if given, the :func:`str` of the object is written instead of the :func:`repr`. Return ``0`` on success or ``-1`` on failure; the appropriate exception will be set. diff --git a/Doc/c-api/float.rst b/Doc/c-api/float.rst index fd0be1108c6300..4f6ac0d8175c6b 100644 --- a/Doc/c-api/float.rst +++ b/Doc/c-api/float.rst @@ -109,7 +109,7 @@ Pack functions The pack routines write 2, 4 or 8 bytes, starting at *p*. *le* is an :c:expr:`int` argument, non-zero if you want the bytes string in little-endian format (exponent last, at ``p+1``, ``p+3``, or ``p+6`` ``p+7``), zero if you -want big-endian format (exponent first, at *p*). The :c:data:`PY_BIG_ENDIAN` +want big-endian format (exponent first, at *p*). The :c:macro:`PY_BIG_ENDIAN` constant can be used to use the native endian: it is equal to ``1`` on big endian processor, or ``0`` on little endian processor. @@ -140,7 +140,7 @@ Unpack functions The unpack routines read 2, 4 or 8 bytes, starting at *p*. *le* is an :c:expr:`int` argument, non-zero if the bytes string is in little-endian format (exponent last, at ``p+1``, ``p+3`` or ``p+6`` and ``p+7``), zero if big-endian -(exponent first, at *p*). The :c:data:`PY_BIG_ENDIAN` constant can be used to +(exponent first, at *p*). The :c:macro:`PY_BIG_ENDIAN` constant can be used to use the native endian: it is equal to ``1`` on big endian processor, or ``0`` on little endian processor. diff --git a/Doc/c-api/gcsupport.rst b/Doc/c-api/gcsupport.rst index c3260a21bc7f8b..6b2494ee4f0ed4 100644 --- a/Doc/c-api/gcsupport.rst +++ b/Doc/c-api/gcsupport.rst @@ -13,22 +13,20 @@ or strings), do not need to provide any explicit support for garbage collection. To create a container type, the :c:member:`~PyTypeObject.tp_flags` field of the type object must -include the :const:`Py_TPFLAGS_HAVE_GC` and provide an implementation of the +include the :c:macro:`Py_TPFLAGS_HAVE_GC` and provide an implementation of the :c:member:`~PyTypeObject.tp_traverse` handler. If instances of the type are mutable, a :c:member:`~PyTypeObject.tp_clear` implementation must also be provided. -.. data:: Py_TPFLAGS_HAVE_GC - :noindex: - +:c:macro:`Py_TPFLAGS_HAVE_GC` Objects with a type with this flag set must conform with the rules documented here. For convenience these objects will be referred to as container objects. Constructors for container types must conform to two rules: -#. The memory for the object must be allocated using :c:func:`PyObject_GC_New` - or :c:func:`PyObject_GC_NewVar`. +#. The memory for the object must be allocated using :c:macro:`PyObject_GC_New` + or :c:macro:`PyObject_GC_NewVar`. #. Once all the fields which may contain references to other containers are initialized, it must call :c:func:`PyObject_GC_Track`. @@ -52,21 +50,21 @@ rules: :c:member:`~PyTypeObject.tp_flags`, :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` fields if the type inherits from a class that implements the garbage collector protocol and the child class - does *not* include the :const:`Py_TPFLAGS_HAVE_GC` flag. + does *not* include the :c:macro:`Py_TPFLAGS_HAVE_GC` flag. -.. c:function:: TYPE* PyObject_GC_New(TYPE, PyTypeObject *type) +.. c:macro:: PyObject_GC_New(TYPE, typeobj) - Analogous to :c:func:`PyObject_New` but for container objects with the - :const:`Py_TPFLAGS_HAVE_GC` flag set. + Analogous to :c:macro:`PyObject_New` but for container objects with the + :c:macro:`Py_TPFLAGS_HAVE_GC` flag set. -.. c:function:: TYPE* PyObject_GC_NewVar(TYPE, PyTypeObject *type, Py_ssize_t size) +.. c:macro:: PyObject_GC_NewVar(TYPE, typeobj, size) - Analogous to :c:func:`PyObject_NewVar` but for container objects with the - :const:`Py_TPFLAGS_HAVE_GC` flag set. + Analogous to :c:macro:`PyObject_NewVar` but for container objects with the + :c:macro:`Py_TPFLAGS_HAVE_GC` flag set. .. c:function:: PyObject* PyUnstable_Object_GC_NewWithExtraData(PyTypeObject *type, size_t extra_size) - Analogous to :c:func:`PyObject_GC_New` but allocates *extra_size* + Analogous to :c:macro:`PyObject_GC_New` but allocates *extra_size* bytes at the end of the object (at offset :c:member:`~PyTypeObject.tp_basicsize`). The allocated memory is initialized to zeros, @@ -87,7 +85,7 @@ rules: .. c:function:: TYPE* PyObject_GC_Resize(TYPE, PyVarObject *op, Py_ssize_t newsize) - Resize an object allocated by :c:func:`PyObject_NewVar`. Returns the + Resize an object allocated by :c:macro:`PyObject_NewVar`. Returns the resized object or ``NULL`` on failure. *op* must not be tracked by the collector yet. @@ -130,8 +128,8 @@ rules: .. c:function:: void PyObject_GC_Del(void *op) - Releases memory allocated to an object using :c:func:`PyObject_GC_New` or - :c:func:`PyObject_GC_NewVar`. + Releases memory allocated to an object using :c:macro:`PyObject_GC_New` or + :c:macro:`PyObject_GC_NewVar`. .. c:function:: void PyObject_GC_UnTrack(void *op) @@ -145,7 +143,7 @@ rules: .. versionchanged:: 3.8 - The :c:func:`_PyObject_GC_TRACK` and :c:func:`_PyObject_GC_UNTRACK` macros + The :c:func:`!_PyObject_GC_TRACK` and :c:func:`!_PyObject_GC_UNTRACK` macros have been removed from the public C API. The :c:member:`~PyTypeObject.tp_traverse` handler accepts a function parameter of this type: diff --git a/Doc/c-api/import.rst b/Doc/c-api/import.rst index 7aacc219a2bd61..137780cc359cf9 100644 --- a/Doc/c-api/import.rst +++ b/Doc/c-api/import.rst @@ -142,19 +142,19 @@ Importing Modules read from a Python bytecode file or obtained from the built-in function :func:`compile`, load the module. Return a new reference to the module object, or ``NULL`` with an exception set if an error occurred. *name* - is removed from :attr:`sys.modules` in error cases, even if *name* was already - in :attr:`sys.modules` on entry to :c:func:`PyImport_ExecCodeModule`. Leaving - incompletely initialized modules in :attr:`sys.modules` is dangerous, as imports of + is removed from :data:`sys.modules` in error cases, even if *name* was already + in :data:`sys.modules` on entry to :c:func:`PyImport_ExecCodeModule`. Leaving + incompletely initialized modules in :data:`sys.modules` is dangerous, as imports of such modules have no way to know that the module object is an unknown (and probably damaged with respect to the module author's intents) state. The module's :attr:`__spec__` and :attr:`__loader__` will be set, if not set already, with the appropriate values. The spec's loader will be set to the module's ``__loader__`` (if set) and to an instance of - :class:`SourceFileLoader` otherwise. + :class:`~importlib.machinery.SourceFileLoader` otherwise. The module's :attr:`__file__` attribute will be set to the code object's - :c:member:`co_filename`. If applicable, :attr:`__cached__` will also + :attr:`!co_filename`. If applicable, :attr:`__cached__` will also be set. This function will reload the module if it was already imported. See @@ -241,7 +241,7 @@ Importing Modules .. c:function:: PyObject* PyImport_GetImporter(PyObject *path) - Return a finder object for a :data:`sys.path`/:attr:`pkg.__path__` item + Return a finder object for a :data:`sys.path`/:attr:`!pkg.__path__` item *path*, possibly by fetching it from the :data:`sys.path_importer_cache` dict. If it wasn't yet cached, traverse :data:`sys.path_hooks` until a hook is found that can handle the path item. Return ``None`` if no hook could; @@ -310,23 +310,25 @@ Importing Modules .. c:struct:: _inittab - Structure describing a single entry in the list of built-in modules. Each of - these structures gives the name and initialization function for a module built - into the interpreter. The name is an ASCII encoded string. Programs which + Structure describing a single entry in the list of built-in modules. + Programs which embed Python may use an array of these structures in conjunction with :c:func:`PyImport_ExtendInittab` to provide additional built-in modules. - The structure is defined in :file:`Include/import.h` as:: + The structure consists of two members: - struct _inittab { - const char *name; /* ASCII encoded string */ - PyObject* (*initfunc)(void); - }; + .. c:member:: const char *name + + The module name, as an ASCII encoded string. + + .. c: member:: PyObject* (*initfunc)(void) + + Initialization function for a module built into the interpreter. .. c:function:: int PyImport_ExtendInittab(struct _inittab *newtab) Add a collection of modules to the table of built-in modules. The *newtab* - array must end with a sentinel entry which contains ``NULL`` for the :attr:`name` + array must end with a sentinel entry which contains ``NULL`` for the :c:member:`~_inittab.name` field; failure to provide the sentinel value can result in a memory fault. Returns ``0`` on success or ``-1`` if insufficient memory could be allocated to extend the internal table. In the event of failure, no modules are added to the diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index e7b2937d38dcf9..dd53fe2bc95696 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -25,7 +25,7 @@ The following functions can be safely called before Python is initialized: * :c:func:`PyImport_AppendInittab` * :c:func:`PyImport_ExtendInittab` - * :c:func:`PyInitFrozenExtensions` + * :c:func:`!PyInitFrozenExtensions` * :c:func:`PyMem_SetAllocator` * :c:func:`PyMem_SetupDebugHooks` * :c:func:`PyObject_SetArenaAllocator` @@ -151,7 +151,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. :c:member:`PyConfig.use_environment` should be used instead, see :ref:`Python Initialization Configuration `. - Ignore all :envvar:`PYTHON*` environment variables, e.g. + Ignore all :envvar:`!PYTHON*` environment variables, e.g. :envvar:`PYTHONPATH` and :envvar:`PYTHONHOME`, that might be set. Set by the :option:`-E` and :option:`-I` options. @@ -224,7 +224,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. :ref:`Python Initialization Configuration `. If the flag is non-zero, use :class:`io.FileIO` instead of - :class:`WindowsConsoleIO` for :mod:`sys` standard streams. + :class:`!io._WindowsConsoleIO` for :mod:`sys` standard streams. Set to ``1`` if the :envvar:`PYTHONLEGACYWINDOWSSTDIO` environment variable is set to a non-empty string. @@ -393,7 +393,7 @@ Initializing and finalizing the interpreter the application. **Bugs and caveats:** The destruction of modules and objects in modules is done - in random order; this may cause destructors (:meth:`__del__` methods) to fail + in random order; this may cause destructors (:meth:`~object.__del__` methods) to fail when they depend on other objects (even functions) or modules. Dynamically loaded extension modules loaded by Python are not unloaded. Small amounts of memory allocated by the Python interpreter may not be freed (if you find a leak, @@ -417,7 +417,7 @@ Process-wide parameters ======================= -.. c:function:: wchar* Py_GetProgramName() +.. c:function:: wchar_t* Py_GetProgramName() Return the program name set with :c:member:`PyConfig.program_name`, or the default. The returned string points into static storage; the caller should not modify its @@ -785,7 +785,7 @@ the fork, and releasing them afterwards. In addition, it resets any :ref:`lock-objects` in the child. When extending or embedding Python, there is no way to inform Python of additional (non-Python) locks that need to be acquired before or reset after a fork. OS facilities such as -:c:func:`pthread_atfork` would need to be used to accomplish the same thing. +:c:func:`!pthread_atfork` would need to be used to accomplish the same thing. Additionally, when extending or embedding Python, calling :c:func:`fork` directly rather than through :func:`os.fork` (and returning to or calling into Python) may result in a deadlock by one of Python's internal locks @@ -827,8 +827,11 @@ code, or when embedding the Python interpreter: .. c:type:: PyThreadState This data structure represents the state of a single thread. The only public - data member is :attr:`interp` (:c:expr:`PyInterpreterState *`), which points to - this thread's interpreter state. + data member is: + + .. c:member:: PyInterpreterState *interp + + This thread's interpreter state. .. c:function:: PyThreadState* PyEval_SaveThread() @@ -849,7 +852,7 @@ code, or when embedding the Python interpreter: .. note:: Calling this function from a thread when the runtime is finalizing will terminate the thread, even if the thread was not created by Python. - You can use :c:func:`_Py_IsFinalizing` or :func:`sys.is_finalizing` to + You can use :c:func:`!_Py_IsFinalizing` or :func:`sys.is_finalizing` to check if the interpreter is in process of being finalized before calling this function to avoid unwanted termination. @@ -895,7 +898,7 @@ with sub-interpreters: .. note:: Calling this function from a thread when the runtime is finalizing will terminate the thread, even if the thread was not created by Python. - You can use :c:func:`_Py_IsFinalizing` or :func:`sys.is_finalizing` to + You can use :c:func:`!_Py_IsFinalizing` or :func:`sys.is_finalizing` to check if the interpreter is in process of being finalized before calling this function to avoid unwanted termination. @@ -1161,7 +1164,7 @@ All of the following functions must be called after :c:func:`Py_Initialize`. function does not steal any references to *exc*. To prevent naive misuse, you must write your own C extension to call this. Must be called with the GIL held. Returns the number of thread states modified; this is normally one, but will be - zero if the thread id isn't found. If *exc* is :const:`NULL`, the pending + zero if the thread id isn't found. If *exc* is ``NULL``, the pending exception (if any) for the thread is cleared. This raises no exceptions. .. versionchanged:: 3.7 @@ -1177,7 +1180,7 @@ All of the following functions must be called after :c:func:`Py_Initialize`. .. note:: Calling this function from a thread when the runtime is finalizing will terminate the thread, even if the thread was not created by Python. - You can use :c:func:`_Py_IsFinalizing` or :func:`sys.is_finalizing` to + You can use :c:func:`!_Py_IsFinalizing` or :func:`sys.is_finalizing` to check if the interpreter is in process of being finalized before calling this function to avoid unwanted termination. @@ -1223,7 +1226,96 @@ You can switch between sub-interpreters using the :c:func:`PyThreadState_Swap` function. You can create and destroy them using the following functions: -.. c:function:: PyThreadState* Py_NewInterpreter() +.. c:type:: PyInterpreterConfig + + Structure containing most parameters to configure a sub-interpreter. + Its values are used only in :c:func:`Py_NewInterpreterFromConfig` and + never modified by the runtime. + + .. versionadded:: 3.12 + + Structure fields: + + .. c:member:: int use_main_obmalloc + + If this is ``0`` then the sub-interpreter will use its own + "object" allocator state. + Otherwise it will use (share) the main interpreter's. + + If this is ``0`` then + :c:member:`~PyInterpreterConfig.check_multi_interp_extensions` + must be ``1`` (non-zero). + If this is ``1`` then :c:member:`~PyInterpreterConfig.gil` + must not be :c:macro:`PyInterpreterConfig_OWN_GIL`. + + .. c:member:: int allow_fork + + If this is ``0`` then the runtime will not support forking the + process in any thread where the sub-interpreter is currently active. + Otherwise fork is unrestricted. + + Note that the :mod:`subprocess` module still works + when fork is disallowed. + + .. c:member:: int allow_exec + + If this is ``0`` then the runtime will not support replacing the + current process via exec (e.g. :func:`os.execv`) in any thread + where the sub-interpreter is currently active. + Otherwise exec is unrestricted. + + Note that the :mod:`subprocess` module still works + when exec is disallowed. + + .. c:member:: int allow_threads + + If this is ``0`` then the sub-interpreter's :mod:`threading` module + won't create threads. + Otherwise threads are allowed. + + .. c:member:: int allow_daemon_threads + + If this is ``0`` then the sub-interpreter's :mod:`threading` module + won't create daemon threads. + Otherwise daemon threads are allowed (as long as + :c:member:`~PyInterpreterConfig.allow_threads` is non-zero). + + .. c:member:: int check_multi_interp_extensions + + If this is ``0`` then all extension modules may be imported, + including legacy (single-phase init) modules, + in any thread where the sub-interpreter is currently active. + Otherwise only multi-phase init extension modules + (see :pep:`489`) may be imported. + (Also see :c:macro:`Py_mod_multiple_interpreters`.) + + This must be ``1`` (non-zero) if + :c:member:`~PyInterpreterConfig.use_main_obmalloc` is ``0``. + + .. c:member:: int gil + + This determines the operation of the GIL for the sub-interpreter. + It may be one of the following: + + .. c:namespace:: NULL + + .. c:macro:: PyInterpreterConfig_DEFAULT_GIL + + Use the default selection (:c:macro:`PyInterpreterConfig_SHARED_GIL`). + + .. c:macro:: PyInterpreterConfig_SHARED_GIL + + Use (share) the main interpreter's GIL. + + .. c:macro:: PyInterpreterConfig_OWN_GIL + + Use the sub-interpreter's own GIL. + + If this is :c:macro:`PyInterpreterConfig_OWN_GIL` then + :c:member:`PyInterpreterConfig.use_main_obmalloc` must be ``0``. + + +.. c:function:: PyStatus Py_NewInterpreterFromConfig(PyThreadState **tstate_p, const PyInterpreterConfig *config) .. index:: pair: module; builtins @@ -1243,16 +1335,47 @@ function. You can create and destroy them using the following functions: ``sys.stdout`` and ``sys.stderr`` (however these refer to the same underlying file descriptors). - The return value points to the first thread state created in the new + The given *config* controls the options with which the interpreter + is initialized. + + Upon success, *tstate_p* will be set to the first thread state + created in the new sub-interpreter. This thread state is made in the current thread state. Note that no actual thread is created; see the discussion of thread states - below. If creation of the new interpreter is unsuccessful, ``NULL`` is - returned; no exception is set since the exception state is stored in the - current thread state and there may not be a current thread state. (Like all - other Python/C API functions, the global interpreter lock must be held before - calling this function and is still held when it returns; however, unlike most - other Python/C API functions, there needn't be a current thread state on - entry.) + below. If creation of the new interpreter is unsuccessful, + *tstate_p* is set to ``NULL``; + no exception is set since the exception state is stored in the + current thread state and there may not be a current thread state. + + Like all other Python/C API functions, the global interpreter lock + must be held before calling this function and is still held when it + returns. Likewise a current thread state must be set on entry. On + success, the returned thread state will be set as current. If the + sub-interpreter is created with its own GIL then the GIL of the + calling interpreter will be released. When the function returns, + the new interpreter's GIL will be held by the current thread and + the previously interpreter's GIL will remain released here. + + .. versionadded:: 3.12 + + Sub-interpreters are most effective when isolated from each other, + with certain functionality restricted:: + + PyInterpreterConfig config = { + .use_main_obmalloc = 0, + .allow_fork = 0, + .allow_exec = 0, + .allow_threads = 1, + .allow_daemon_threads = 0, + .check_multi_interp_extensions = 1, + .gil = PyInterpreterConfig_OWN_GIL, + }; + PyThreadState *tstate = Py_NewInterpreterFromConfig(&config); + + Note that the config is used only briefly and does not get modified. + During initialization the config's values are converted into various + :c:type:`PyInterpreterState` values. A read-only copy of the config + may be stored internally on the :c:type:`PyInterpreterState`. .. index:: single: Py_FinalizeEx() @@ -1287,19 +1410,79 @@ function. You can create and destroy them using the following functions: .. index:: single: close() (in module os) +.. c:function:: PyThreadState* Py_NewInterpreter(void) + + .. index:: + pair: module; builtins + pair: module; __main__ + pair: module; sys + single: stdout (in module sys) + single: stderr (in module sys) + single: stdin (in module sys) + + Create a new sub-interpreter. This is essentially just a wrapper + around :c:func:`Py_NewInterpreterFromConfig` with a config that + preserves the existing behavior. The result is an unisolated + sub-interpreter that shares the main interpreter's GIL, allows + fork/exec, allows daemon threads, and allows single-phase init + modules. + + .. c:function:: void Py_EndInterpreter(PyThreadState *tstate) .. index:: single: Py_FinalizeEx() - Destroy the (sub-)interpreter represented by the given thread state. The given - thread state must be the current thread state. See the discussion of thread - states below. When the call returns, the current thread state is ``NULL``. All - thread states associated with this interpreter are destroyed. (The global - interpreter lock must be held before calling this function and is still held - when it returns.) :c:func:`Py_FinalizeEx` will destroy all sub-interpreters that + Destroy the (sub-)interpreter represented by the given thread state. + The given thread state must be the current thread state. See the + discussion of thread states below. When the call returns, + the current thread state is ``NULL``. All thread states associated + with this interpreter are destroyed. The global interpreter lock + used by the target interpreter must be held before calling this + function. No GIL is held when it returns. + + :c:func:`Py_FinalizeEx` will destroy all sub-interpreters that haven't been explicitly destroyed at that point. +A Per-Interpreter GIL +--------------------- + +Using :c:func:`Py_NewInterpreterFromConfig` you can create +a sub-interpreter that is completely isolated from other interpreters, +including having its own GIL. The most important benefit of this +isolation is that such an interpreter can execute Python code without +being blocked by other interpreters or blocking any others. Thus a +single Python process can truly take advantage of multiple CPU cores +when running Python code. The isolation also encourages a different +approach to concurrency than that of just using threads. +(See :pep:`554`.) + +Using an isolated interpreter requires vigilance in preserving that +isolation. That especially means not sharing any objects or mutable +state without guarantees about thread-safety. Even objects that are +otherwise immutable (e.g. ``None``, ``(1, 5)``) can't normally be shared +because of the refcount. One simple but less-efficient approach around +this is to use a global lock around all use of some state (or object). +Alternately, effectively immutable objects (like integers or strings) +can be made safe in spite of their refcounts by making them "immortal". +In fact, this has been done for the builtin singletons, small integers, +and a number of other builtin objects. + +If you preserve isolation then you will have access to proper multi-core +computing without the complications that come with free-threading. +Failure to preserve isolation will expose you to the full consequences +of free-threading, including races and hard-to-debug crashes. + +Aside from that, one of the main challenges of using multiple isolated +interpreters is how to communicate between them safely (not break +isolation) and efficiently. The runtime and stdlib do not provide +any standard approach to this yet. A future stdlib module would help +mitigate the effort of preserving isolation and expose effective tools +for communicating (and sharing) data between interpreters. + +.. versionadded:: 3.12 + + Bugs and caveats ---------------- @@ -1407,32 +1590,32 @@ Python-level trace functions in previous versions. The type of the trace function registered using :c:func:`PyEval_SetProfile` and :c:func:`PyEval_SetTrace`. The first parameter is the object passed to the registration function as *obj*, *frame* is the frame object to which the event - pertains, *what* is one of the constants :const:`PyTrace_CALL`, - :const:`PyTrace_EXCEPTION`, :const:`PyTrace_LINE`, :const:`PyTrace_RETURN`, - :const:`PyTrace_C_CALL`, :const:`PyTrace_C_EXCEPTION`, :const:`PyTrace_C_RETURN`, - or :const:`PyTrace_OPCODE`, and *arg* depends on the value of *what*: - - +------------------------------+----------------------------------------+ - | Value of *what* | Meaning of *arg* | - +==============================+========================================+ - | :const:`PyTrace_CALL` | Always :c:data:`Py_None`. | - +------------------------------+----------------------------------------+ - | :const:`PyTrace_EXCEPTION` | Exception information as returned by | - | | :func:`sys.exc_info`. | - +------------------------------+----------------------------------------+ - | :const:`PyTrace_LINE` | Always :c:data:`Py_None`. | - +------------------------------+----------------------------------------+ - | :const:`PyTrace_RETURN` | Value being returned to the caller, | - | | or ``NULL`` if caused by an exception. | - +------------------------------+----------------------------------------+ - | :const:`PyTrace_C_CALL` | Function object being called. | - +------------------------------+----------------------------------------+ - | :const:`PyTrace_C_EXCEPTION` | Function object being called. | - +------------------------------+----------------------------------------+ - | :const:`PyTrace_C_RETURN` | Function object being called. | - +------------------------------+----------------------------------------+ - | :const:`PyTrace_OPCODE` | Always :c:data:`Py_None`. | - +------------------------------+----------------------------------------+ + pertains, *what* is one of the constants :c:data:`PyTrace_CALL`, + :c:data:`PyTrace_EXCEPTION`, :c:data:`PyTrace_LINE`, :c:data:`PyTrace_RETURN`, + :c:data:`PyTrace_C_CALL`, :c:data:`PyTrace_C_EXCEPTION`, :c:data:`PyTrace_C_RETURN`, + or :c:data:`PyTrace_OPCODE`, and *arg* depends on the value of *what*: + + +-------------------------------+----------------------------------------+ + | Value of *what* | Meaning of *arg* | + +===============================+========================================+ + | :c:data:`PyTrace_CALL` | Always :c:data:`Py_None`. | + +-------------------------------+----------------------------------------+ + | :c:data:`PyTrace_EXCEPTION` | Exception information as returned by | + | | :func:`sys.exc_info`. | + +-------------------------------+----------------------------------------+ + | :c:data:`PyTrace_LINE` | Always :c:data:`Py_None`. | + +-------------------------------+----------------------------------------+ + | :c:data:`PyTrace_RETURN` | Value being returned to the caller, | + | | or ``NULL`` if caused by an exception. | + +-------------------------------+----------------------------------------+ + | :c:data:`PyTrace_C_CALL` | Function object being called. | + +-------------------------------+----------------------------------------+ + | :c:data:`PyTrace_C_EXCEPTION` | Function object being called. | + +-------------------------------+----------------------------------------+ + | :c:data:`PyTrace_C_RETURN` | Function object being called. | + +-------------------------------+----------------------------------------+ + | :c:data:`PyTrace_OPCODE` | Always :c:data:`Py_None`. | + +-------------------------------+----------------------------------------+ .. c:var:: int PyTrace_CALL @@ -1499,8 +1682,8 @@ Python-level trace functions in previous versions. function as its first parameter, and may be any Python object, or ``NULL``. If the profile function needs to maintain state, using a different value for *obj* for each thread provides a convenient and thread-safe place to store it. The - profile function is called for all monitored events except :const:`PyTrace_LINE` - :const:`PyTrace_OPCODE` and :const:`PyTrace_EXCEPTION`. + profile function is called for all monitored events except :c:data:`PyTrace_LINE` + :c:data:`PyTrace_OPCODE` and :c:data:`PyTrace_EXCEPTION`. See also the :func:`sys.setprofile` function. @@ -1525,8 +1708,8 @@ Python-level trace functions in previous versions. :c:func:`PyEval_SetProfile`, except the tracing function does receive line-number events and per-opcode events, but does not receive any event related to C function objects being called. Any trace function registered using :c:func:`PyEval_SetTrace` - will not receive :const:`PyTrace_C_CALL`, :const:`PyTrace_C_EXCEPTION` or - :const:`PyTrace_C_RETURN` as a value for the *what* parameter. + will not receive :c:data:`PyTrace_C_CALL`, :c:data:`PyTrace_C_EXCEPTION` or + :c:data:`PyTrace_C_RETURN` as a value for the *what* parameter. See also the :func:`sys.settrace` function. diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index 5f70c45c54f757..3ad2d435665f89 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -135,6 +135,8 @@ PyStatus Name of the function which created an error, can be ``NULL``. + .. c:namespace:: NULL + Functions to create a status: .. c:function:: PyStatus PyStatus_Ok(void) @@ -210,6 +212,8 @@ PyPreConfig Structure used to preinitialize Python. + .. c:namespace:: NULL + Function to initialize a preconfiguration: .. c:function:: void PyPreConfig_InitPythonConfig(PyPreConfig *preconfig) @@ -222,6 +226,8 @@ PyPreConfig Initialize the preconfiguration with :ref:`Isolated Configuration `. + .. c:namespace:: PyPreConfig + Structure fields: .. c:member:: int allocator @@ -429,6 +435,8 @@ PyConfig When done, the :c:func:`PyConfig_Clear` function must be used to release the configuration memory. + .. c:namespace:: NULL + Structure methods: .. c:function:: void PyConfig_InitPythonConfig(PyConfig *config) @@ -522,11 +530,13 @@ PyConfig Moreover, if :c:func:`PyConfig_SetArgv` or :c:func:`PyConfig_SetBytesArgv` is used, this method must be called before other methods, since the preinitialization configuration depends on command line arguments (if - :c:member:`parse_argv` is non-zero). + :c:member:`~PyConfig.parse_argv` is non-zero). The caller of these methods is responsible to handle exceptions (error or exit) using ``PyStatus_Exception()`` and ``Py_ExitStatusException()``. + .. c:namespace:: PyConfig + Structure fields: .. c:member:: PyWideStringList argv @@ -889,7 +899,7 @@ PyConfig .. c:member:: int legacy_windows_stdio If non-zero, use :class:`io.FileIO` instead of - :class:`io.WindowsConsoleIO` for :data:`sys.stdin`, :data:`sys.stdout` + :class:`!io._WindowsConsoleIO` for :data:`sys.stdin`, :data:`sys.stdout` and :data:`sys.stderr`. Set to ``1`` if the :envvar:`PYTHONLEGACYWINDOWSSTDIO` environment @@ -938,7 +948,7 @@ PyConfig .. c:member:: wchar_t* pythonpath_env Module search paths (:data:`sys.path`) as a string separated by ``DELIM`` - (:data:`os.path.pathsep`). + (:data:`os.pathsep`). Set by the :envvar:`PYTHONPATH` environment variable. @@ -1118,7 +1128,7 @@ PyConfig .. c:member:: int show_ref_count - Show total reference count at exit? + Show total reference count at exit (excluding immortal objects)? Set to ``1`` by :option:`-X showrefcount <-X>` command line option. @@ -1139,7 +1149,7 @@ PyConfig Set to ``0`` by the :option:`-S` command line option. - :data:`sys.flags.no_site` is set to the inverted value of + :data:`sys.flags.no_site ` is set to the inverted value of :c:member:`~PyConfig.site_import`. Default: ``1``. diff --git a/Doc/c-api/intro.rst b/Doc/c-api/intro.rst index 9014f7e03b3600..5cf9914be8c8bf 100644 --- a/Doc/c-api/intro.rst +++ b/Doc/c-api/intro.rst @@ -287,52 +287,58 @@ true if (and only if) the object pointed to by *a* is a Python list. Reference Counts ---------------- -The reference count is important because today's computers have a finite (and -often severely limited) memory size; it counts how many different places there -are that have a reference to an object. Such a place could be another object, -or a global (or static) C variable, or a local variable in some C function. -When an object's reference count becomes zero, the object is deallocated. If -it contains references to other objects, their reference count is decremented. -Those other objects may be deallocated in turn, if this decrement makes their -reference count become zero, and so on. (There's an obvious problem with -objects that reference each other here; for now, the solution is "don't do -that.") +The reference count is important because today's computers have a finite +(and often severely limited) memory size; it counts how many different +places there are that have a :term:`strong reference` to an object. +Such a place could be another object, or a global (or static) C variable, +or a local variable in some C function. +When the last :term:`strong reference` to an object is released +(i.e. its reference count becomes zero), the object is deallocated. +If it contains references to other objects, those references are released. +Those other objects may be deallocated in turn, if there are no more +references to them, and so on. (There's an obvious problem with +objects that reference each other here; for now, the solution +is "don't do that.") .. index:: single: Py_INCREF() single: Py_DECREF() -Reference counts are always manipulated explicitly. The normal way is to use -the macro :c:func:`Py_INCREF` to increment an object's reference count by one, -and :c:func:`Py_DECREF` to decrement it by one. The :c:func:`Py_DECREF` macro +Reference counts are always manipulated explicitly. The normal way is +to use the macro :c:func:`Py_INCREF` to take a new reference to an +object (i.e. increment its reference count by one), +and :c:func:`Py_DECREF` to release that reference (i.e. decrement the +reference count by one). The :c:func:`Py_DECREF` macro is considerably more complex than the incref one, since it must check whether the reference count becomes zero and then cause the object's deallocator to be -called. The deallocator is a function pointer contained in the object's type -structure. The type-specific deallocator takes care of decrementing the -reference counts for other objects contained in the object if this is a compound +called. The deallocator is a function pointer contained in the object's type +structure. The type-specific deallocator takes care of releasing references +for other objects contained in the object if this is a compound object type, such as a list, as well as performing any additional finalization that's needed. There's no chance that the reference count can overflow; at least as many bits are used to hold the reference count as there are distinct memory locations in virtual memory (assuming ``sizeof(Py_ssize_t) >= sizeof(void*)``). Thus, the reference count increment is a simple operation. -It is not necessary to increment an object's reference count for every local -variable that contains a pointer to an object. In theory, the object's +It is not necessary to hold a :term:`strong reference` (i.e. increment +the reference count) for every local variable that contains a pointer +to an object. In theory, the object's reference count goes up by one when the variable is made to point to it and it goes down by one when the variable goes out of scope. However, these two cancel each other out, so at the end the reference count hasn't changed. The only real reason to use the reference count is to prevent the object from being deallocated as long as our variable is pointing to it. If we know that there is at least one other reference to the object that lives at least as long as -our variable, there is no need to increment the reference count temporarily. +our variable, there is no need to take a new :term:`strong reference` +(i.e. increment the reference count) temporarily. An important situation where this arises is in objects that are passed as arguments to C functions in an extension module that are called from Python; the call mechanism guarantees to hold a reference to every argument for the duration of the call. However, a common pitfall is to extract an object from a list and hold on to it -for a while without incrementing its reference count. Some other operation might -conceivably remove the object from the list, decrementing its reference count +for a while without taking a new reference. Some other operation might +conceivably remove the object from the list, releasing that reference, and possibly deallocating it. The real danger is that innocent-looking operations may invoke arbitrary Python code which could do this; there is a code path which allows control to flow back to the user from a :c:func:`Py_DECREF`, so @@ -340,7 +346,8 @@ almost any operation is potentially dangerous. A safe approach is to always use the generic operations (functions whose name begins with ``PyObject_``, ``PyNumber_``, ``PySequence_`` or ``PyMapping_``). -These operations always increment the reference count of the object they return. +These operations always create a new :term:`strong reference` +(i.e. increment the reference count) of the object they return. This leaves the caller with the responsibility to call :c:func:`Py_DECREF` when they are done with the result; this soon becomes second nature. @@ -356,7 +363,7 @@ to objects (objects are not owned: they are always shared). "Owning a reference" means being responsible for calling Py_DECREF on it when the reference is no longer needed. Ownership can also be transferred, meaning that the code that receives ownership of the reference then becomes responsible for -eventually decref'ing it by calling :c:func:`Py_DECREF` or :c:func:`Py_XDECREF` +eventually releasing it by calling :c:func:`Py_DECREF` or :c:func:`Py_XDECREF` when it's no longer needed---or passing on this responsibility (usually to its caller). When a function passes ownership of a reference on to its caller, the caller is said to receive a *new* reference. When no ownership is transferred, @@ -414,9 +421,9 @@ For example, the above two blocks of code could be replaced by the following It is much more common to use :c:func:`PyObject_SetItem` and friends with items whose references you are only borrowing, like arguments that were passed in to -the function you are writing. In that case, their behaviour regarding reference -counts is much saner, since you don't have to increment a reference count so you -can give a reference away ("have it be stolen"). For example, this function +the function you are writing. In that case, their behaviour regarding references +is much saner, since you don't have to take a new reference just so you +can give that reference away ("have it be stolen"). For example, this function sets all items of a list (actually, any mutable sequence) to a given item:: int @@ -616,7 +623,7 @@ and lose important information about the exact cause of the error. .. index:: single: sum_sequence() A simple example of detecting exceptions and passing them on is shown in the -:c:func:`sum_sequence` example above. It so happens that this example doesn't +:c:func:`!sum_sequence` example above. It so happens that this example doesn't need to clean up any owned references when it detects an error. The following example function shows some error cleanup. First, to remind you why you like Python, we show the equivalent Python code:: diff --git a/Doc/c-api/iterator.rst b/Doc/c-api/iterator.rst index 3fcf099134d4dd..6b7ba8c9979163 100644 --- a/Doc/c-api/iterator.rst +++ b/Doc/c-api/iterator.rst @@ -6,7 +6,7 @@ Iterator Objects ---------------- Python provides two general-purpose iterator objects. The first, a sequence -iterator, works with an arbitrary sequence supporting the :meth:`__getitem__` +iterator, works with an arbitrary sequence supporting the :meth:`~object.__getitem__` method. The second works with a callable object and a sentinel value, calling the callable for each item in the sequence, and ending the iteration when the sentinel value is returned. @@ -19,7 +19,7 @@ sentinel value is returned. types. -.. c:function:: int PySeqIter_Check(op) +.. c:function:: int PySeqIter_Check(PyObject *op) Return true if the type of *op* is :c:data:`PySeqIter_Type`. This function always succeeds. @@ -38,7 +38,7 @@ sentinel value is returned. two-argument form of the :func:`iter` built-in function. -.. c:function:: int PyCallIter_Check(op) +.. c:function:: int PyCallIter_Check(PyObject *op) Return true if the type of *op* is :c:data:`PyCallIter_Type`. This function always succeeds. diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index fe379ffe912391..f1354a34f2b2d5 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -142,8 +142,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. instance of :c:type:`PyLongObject`, first call its :meth:`~object.__index__` method (if present) to convert it to a :c:type:`PyLongObject`. - If the value of *obj* is greater than :const:`LONG_MAX` or less than - :const:`LONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, and + If the value of *obj* is greater than :c:macro:`LONG_MAX` or less than + :c:macro:`LONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, and return ``-1``; otherwise, set *\*overflow* to ``0``. If any other exception occurs set *\*overflow* to ``0`` and return ``-1`` as usual. @@ -183,8 +183,8 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. instance of :c:type:`PyLongObject`, first call its :meth:`~object.__index__` method (if present) to convert it to a :c:type:`PyLongObject`. - If the value of *obj* is greater than :const:`LLONG_MAX` or less than - :const:`LLONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, + If the value of *obj* is greater than :c:macro:`LLONG_MAX` or less than + :c:macro:`LLONG_MIN`, set *\*overflow* to ``1`` or ``-1``, respectively, and return ``-1``; otherwise, set *\*overflow* to ``0``. If any other exception occurs set *\*overflow* to ``0`` and return ``-1`` as usual. diff --git a/Doc/c-api/mapping.rst b/Doc/c-api/mapping.rst index 9176a4652cbf29..82011d9d36a524 100644 --- a/Doc/c-api/mapping.rst +++ b/Doc/c-api/mapping.rst @@ -13,7 +13,7 @@ See also :c:func:`PyObject_GetItem`, :c:func:`PyObject_SetItem` and Return ``1`` if the object provides the mapping protocol or supports slicing, and ``0`` otherwise. Note that it returns ``1`` for Python classes with - a :meth:`__getitem__` method, since in general it is impossible to + a :meth:`~object.__getitem__` method, since in general it is impossible to determine what type of keys the class supports. This function always succeeds. @@ -90,7 +90,7 @@ See also :c:func:`PyObject_GetItem`, :c:func:`PyObject_SetItem` and This is equivalent to the Python expression ``key in o``. This function always succeeds. - Note that exceptions which occur while calling the :meth:`__getitem__` + Note that exceptions which occur while calling the :meth:`~object.__getitem__` method will get suppressed. To get error reporting use :c:func:`PyObject_GetItem()` instead. @@ -101,7 +101,7 @@ See also :c:func:`PyObject_GetItem`, :c:func:`PyObject_SetItem` and This is equivalent to the Python expression ``key in o``. This function always succeeds. - Note that exceptions which occur while calling the :meth:`__getitem__` + Note that exceptions which occur while calling the :meth:`~object.__getitem__` method and creating a temporary string object will get suppressed. To get error reporting use :c:func:`PyMapping_GetItemString()` instead. diff --git a/Doc/c-api/memory.rst b/Doc/c-api/memory.rst index 7041c15d23fb83..1df8c2b911ca8f 100644 --- a/Doc/c-api/memory.rst +++ b/Doc/c-api/memory.rst @@ -136,7 +136,7 @@ need to be held. The :ref:`default raw memory allocator ` uses the following functions: :c:func:`malloc`, :c:func:`calloc`, :c:func:`realloc` -and :c:func:`free`; call ``malloc(1)`` (or ``calloc(1, 1)``) when requesting +and :c:func:`!free`; call ``malloc(1)`` (or ``calloc(1, 1)``) when requesting zero bytes. .. versionadded:: 3.4 @@ -264,14 +264,14 @@ The following type-oriented macros are provided for convenience. Note that *TYPE* refers to any C type. -.. c:function:: TYPE* PyMem_New(TYPE, size_t n) +.. c:macro:: PyMem_New(TYPE, n) Same as :c:func:`PyMem_Malloc`, but allocates ``(n * sizeof(TYPE))`` bytes of memory. Returns a pointer cast to :c:expr:`TYPE*`. The memory will not have been initialized in any way. -.. c:function:: TYPE* PyMem_Resize(void *p, TYPE, size_t n) +.. c:macro:: PyMem_Resize(p, TYPE, n) Same as :c:func:`PyMem_Realloc`, but the memory block is resized to ``(n * sizeof(TYPE))`` bytes. Returns a pointer cast to :c:expr:`TYPE*`. On return, @@ -423,7 +423,7 @@ Customize Memory Allocators +----------------------------------------------------------+---------------------------------------+ .. versionchanged:: 3.5 - The :c:type:`PyMemAllocator` structure was renamed to + The :c:type:`!PyMemAllocator` structure was renamed to :c:type:`PyMemAllocatorEx` and a new ``calloc`` field was added. @@ -431,6 +431,8 @@ Customize Memory Allocators Enum used to identify an allocator domain. Domains: + .. c:namespace:: NULL + .. c:macro:: PYMEM_DOMAIN_RAW Functions: @@ -470,10 +472,14 @@ Customize Memory Allocators The new allocator must return a distinct non-``NULL`` pointer when requesting zero bytes. - For the :c:data:`PYMEM_DOMAIN_RAW` domain, the allocator must be + For the :c:macro:`PYMEM_DOMAIN_RAW` domain, the allocator must be thread-safe: the :term:`GIL ` is not held when the allocator is called. + For the remaining domains, the allocator must also be thread-safe: + the allocator may be called in different interpreters that do not + share a ``GIL``. + If the new allocator is not a hook (does not call the previous allocator), the :c:func:`PyMem_SetupDebugHooks` function must be called to reinstall the debug hooks on top on the new allocator. @@ -498,6 +504,8 @@ Customize Memory Allocators **must** wrap the existing allocator. Substituting the current allocator for some other arbitrary one is **not supported**. + .. versionchanged:: 3.12 + All allocators must be thread-safe. .. c:function:: void PyMem_SetupDebugHooks(void) @@ -536,8 +544,8 @@ Runtime checks: - Detect write before the start of the buffer (buffer underflow). - Detect write after the end of the buffer (buffer overflow). - Check that the :term:`GIL ` is held when - allocator functions of :c:data:`PYMEM_DOMAIN_OBJ` (ex: - :c:func:`PyObject_Malloc`) and :c:data:`PYMEM_DOMAIN_MEM` (ex: + allocator functions of :c:macro:`PYMEM_DOMAIN_OBJ` (ex: + :c:func:`PyObject_Malloc`) and :c:macro:`PYMEM_DOMAIN_MEM` (ex: :c:func:`PyMem_Malloc`) domains are called. On error, the debug hooks use the :mod:`tracemalloc` module to get the @@ -557,9 +565,9 @@ that the treatment of negative indices differs from a Python slice): ``p[-S]`` API identifier (ASCII character): - * ``'r'`` for :c:data:`PYMEM_DOMAIN_RAW`. - * ``'m'`` for :c:data:`PYMEM_DOMAIN_MEM`. - * ``'o'`` for :c:data:`PYMEM_DOMAIN_OBJ`. + * ``'r'`` for :c:macro:`PYMEM_DOMAIN_RAW`. + * ``'m'`` for :c:macro:`PYMEM_DOMAIN_MEM`. + * ``'o'`` for :c:macro:`PYMEM_DOMAIN_OBJ`. ``p[-S+1:0]`` Copies of PYMEM_FORBIDDENBYTE. Used to catch under- writes and reads. @@ -581,7 +589,7 @@ that the treatment of negative indices differs from a Python slice): default). A serial number, incremented by 1 on each call to a malloc-like or - realloc-like function. Big-endian ``size_t``. If "bad memory" is detected + realloc-like function. Big-endian :c:type:`size_t`. If "bad memory" is detected later, the serial number gives an excellent way to set a breakpoint on the next run, to capture the instant at which this block was passed out. The static function bumpserialno() in obmalloc.c is the only place the serial @@ -601,7 +609,7 @@ PYMEM_CLEANBYTE (meaning uninitialized memory is getting used). compiled in release mode. On error, the debug hooks now use :mod:`tracemalloc` to get the traceback where a memory block was allocated. The debug hooks now also check if the GIL is held when functions of - :c:data:`PYMEM_DOMAIN_OBJ` and :c:data:`PYMEM_DOMAIN_MEM` domains are + :c:macro:`PYMEM_DOMAIN_OBJ` and :c:macro:`PYMEM_DOMAIN_MEM` domains are called. .. versionchanged:: 3.8 @@ -622,13 +630,13 @@ with a fixed size of 256 KiB. It falls back to :c:func:`PyMem_RawMalloc` and :c:func:`PyMem_RawRealloc` for allocations larger than 512 bytes. *pymalloc* is the :ref:`default allocator ` of the -:c:data:`PYMEM_DOMAIN_MEM` (ex: :c:func:`PyMem_Malloc`) and -:c:data:`PYMEM_DOMAIN_OBJ` (ex: :c:func:`PyObject_Malloc`) domains. +:c:macro:`PYMEM_DOMAIN_MEM` (ex: :c:func:`PyMem_Malloc`) and +:c:macro:`PYMEM_DOMAIN_OBJ` (ex: :c:func:`PyObject_Malloc`) domains. The arena allocator uses the following functions: -* :c:func:`VirtualAlloc` and :c:func:`VirtualFree` on Windows, -* :c:func:`mmap` and :c:func:`munmap` if available, +* :c:func:`!VirtualAlloc` and :c:func:`!VirtualFree` on Windows, +* :c:func:`!mmap` and :c:func:`!munmap` if available, * :c:func:`malloc` and :c:func:`free` otherwise. This allocator is disabled if Python is configured with the @@ -732,8 +740,8 @@ allocators operating on different heaps. :: free(buf1); /* Fatal -- should be PyMem_Del() */ In addition to the functions aimed at handling raw memory blocks from the Python -heap, objects in Python are allocated and released with :c:func:`PyObject_New`, -:c:func:`PyObject_NewVar` and :c:func:`PyObject_Del`. +heap, objects in Python are allocated and released with :c:macro:`PyObject_New`, +:c:macro:`PyObject_NewVar` and :c:func:`PyObject_Del`. These will be explained in the next chapter on defining and implementing new object types in C. diff --git a/Doc/c-api/method.rst b/Doc/c-api/method.rst index 93ad30cd4f7a8d..0d75ab8e1af111 100644 --- a/Doc/c-api/method.rst +++ b/Doc/c-api/method.rst @@ -7,8 +7,8 @@ Instance Method Objects .. index:: pair: object; instancemethod -An instance method is a wrapper for a :c:data:`PyCFunction` and the new way -to bind a :c:data:`PyCFunction` to a class object. It replaces the former call +An instance method is a wrapper for a :c:type:`PyCFunction` and the new way +to bind a :c:type:`PyCFunction` to a class object. It replaces the former call ``PyMethod_New(func, NULL, class)``. diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst index d35b302fce6aa6..187f8419d4ee4f 100644 --- a/Doc/c-api/module.rst +++ b/Doc/c-api/module.rst @@ -119,7 +119,7 @@ Module Objects encoded to 'utf-8'. .. deprecated:: 3.2 - :c:func:`PyModule_GetFilename` raises :c:type:`UnicodeEncodeError` on + :c:func:`PyModule_GetFilename` raises :exc:`UnicodeEncodeError` on unencodable filenames, use :c:func:`PyModule_GetFilenameObject` instead. @@ -145,7 +145,7 @@ or request "multi-phase initialization" by returning the definition struct itsel .. c:member:: PyModuleDef_Base m_base - Always initialize this member to :const:`PyModuleDef_HEAD_INIT`. + Always initialize this member to :c:macro:`PyModuleDef_HEAD_INIT`. .. c:member:: const char *m_name @@ -164,7 +164,7 @@ or request "multi-phase initialization" by returning the definition struct itsel This memory area is allocated based on *m_size* on module creation, and freed when the module object is deallocated, after the - :c:member:`m_free` function has been called, if present. + :c:member:`~PyModuleDef.m_free` function has been called, if present. Setting ``m_size`` to ``-1`` means that the module does not support sub-interpreters, because it has global state. @@ -202,7 +202,7 @@ or request "multi-phase initialization" by returning the definition struct itsel This function is not called if the module state was requested but is not allocated yet. This is the case immediately after the module is created and before the module is executed (:c:data:`Py_mod_exec` function). More - precisely, this function is not called if :c:member:`m_size` is greater + precisely, this function is not called if :c:member:`~PyModuleDef.m_size` is greater than 0 and the module state (as returned by :c:func:`PyModule_GetState`) is ``NULL``. @@ -217,7 +217,7 @@ or request "multi-phase initialization" by returning the definition struct itsel This function is not called if the module state was requested but is not allocated yet. This is the case immediately after the module is created and before the module is executed (:c:data:`Py_mod_exec` function). More - precisely, this function is not called if :c:member:`m_size` is greater + precisely, this function is not called if :c:member:`~PyModuleDef.m_size` is greater than 0 and the module state (as returned by :c:func:`PyModule_GetState`) is ``NULL``. @@ -238,7 +238,7 @@ or request "multi-phase initialization" by returning the definition struct itsel This function is not called if the module state was requested but is not allocated yet. This is the case immediately after the module is created and before the module is executed (:c:data:`Py_mod_exec` function). More - precisely, this function is not called if :c:member:`m_size` is greater + precisely, this function is not called if :c:member:`~PyModuleDef.m_size` is greater than 0 and the module state (as returned by :c:func:`PyModule_GetState`) is ``NULL``. @@ -256,7 +256,7 @@ of the following two module creation functions: Create a new module object, given the definition in *def*. This behaves like :c:func:`PyModule_Create2` with *module_api_version* set to - :const:`PYTHON_API_VERSION`. + :c:macro:`PYTHON_API_VERSION`. .. c:function:: PyObject* PyModule_Create2(PyModuleDef *def, int module_api_version) @@ -282,7 +282,7 @@ An alternate way to specify extensions is to request "multi-phase initialization Extension modules created this way behave more like Python modules: the initialization is split between the *creation phase*, when the module object is created, and the *execution phase*, when it is populated. -The distinction is similar to the :py:meth:`__new__` and :py:meth:`__init__` methods +The distinction is similar to the :py:meth:`!__new__` and :py:meth:`!__init__` methods of classes. Unlike modules created using single-phase initialization, these modules are not @@ -293,7 +293,7 @@ By default, multiple modules created from the same definition should be independent: changes to one should not affect the others. This means that all state should be specific to the module object (using e.g. using :c:func:`PyModule_GetState`), or its contents (such as the module's -:attr:`__dict__` or individual classes created with :c:func:`PyType_FromSpec`). +:attr:`~object.__dict__` or individual classes created with :c:func:`PyType_FromSpec`). All modules created using multi-phase initialization are expected to support :ref:`sub-interpreters `. Making sure multiple modules @@ -376,6 +376,37 @@ The available slot types are: If multiple ``Py_mod_exec`` slots are specified, they are processed in the order they appear in the *m_slots* array. +.. c:macro:: Py_mod_multiple_interpreters + + Specifies one of the following values: + + .. c:macro:: Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED + + The module does not support being imported in subinterpreters. + + .. c:macro:: Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED + + The module supports being imported in subinterpreters, + but only when they share the main interpreter's GIL. + (See :ref:`isolating-extensions-howto`.) + + .. c:macro:: Py_MOD_PER_INTERPRETER_GIL_SUPPORTED + + The module supports being imported in subinterpreters, + even when they have their own GIL. + (See :ref:`isolating-extensions-howto`.) + + This slot determines whether or not importing this module + in a subinterpreter will fail. + + Multiple ``Py_mod_multiple_interpreters`` slots may not be specified + in one module definition. + + If ``Py_mod_multiple_interpreters`` is not specified, the import + machinery defaults to ``Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED``. + + .. versionadded:: 3.12 + See :PEP:`489` for more details on multi-phase initialization. Low-level module creation functions @@ -390,7 +421,7 @@ objects dynamically. Note that both ``PyModule_FromDefAndSpec`` and Create a new module object, given the definition in *def* and the ModuleSpec *spec*. This behaves like :c:func:`PyModule_FromDefAndSpec2` - with *module_api_version* set to :const:`PYTHON_API_VERSION`. + with *module_api_version* set to :c:macro:`PYTHON_API_VERSION`. .. versionadded:: 3.5 @@ -515,7 +546,7 @@ state: .. note:: Unlike other functions that steal references, ``PyModule_AddObject()`` - only decrements the reference count of *value* **on success**. + only releases the reference to *value* **on success**. This means that its return value must be checked, and calling code must :c:func:`Py_XDECREF` *value* manually on error. @@ -552,7 +583,7 @@ state: ``NULL``-terminated. Return ``-1`` on error, ``0`` on success. -.. c:function:: int PyModule_AddIntMacro(PyObject *module, macro) +.. c:macro:: PyModule_AddIntMacro(module, macro) Add an int constant to *module*. The name and the value are taken from *macro*. For example ``PyModule_AddIntMacro(module, AF_INET)`` adds the int @@ -560,7 +591,7 @@ state: Return ``-1`` on error, ``0`` on success. -.. c:function:: int PyModule_AddStringMacro(PyObject *module, macro) +.. c:macro:: PyModule_AddStringMacro(module, macro) Add a string constant to *module*. diff --git a/Doc/c-api/none.rst b/Doc/c-api/none.rst index 1a497652ac5655..dd8bfb56104251 100644 --- a/Doc/c-api/none.rst +++ b/Doc/c-api/none.rst @@ -9,7 +9,7 @@ The ``None`` Object Note that the :c:type:`PyTypeObject` for ``None`` is not directly exposed in the Python/C API. Since ``None`` is a singleton, testing for object identity (using -``==`` in C) is sufficient. There is no :c:func:`PyNone_Check` function for the +``==`` in C) is sufficient. There is no :c:func:`!PyNone_Check` function for the same reason. diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 6fc5b2d14dd327..6d9fc7f50604da 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -15,15 +15,15 @@ Object Protocol .. c:macro:: Py_RETURN_NOTIMPLEMENTED Properly handle returning :c:data:`Py_NotImplemented` from within a C - function (that is, increment the reference count of NotImplemented and - return it). + function (that is, create a new :term:`strong reference` + to NotImplemented and return it). .. c:function:: int PyObject_Print(PyObject *o, FILE *fp, int flags) Print an object *o*, on file *fp*. Returns ``-1`` on error. The flags argument is used to enable certain printing options. The only option currently supported - is :const:`Py_PRINT_RAW`; if given, the :func:`str` of the object is written + is :c:macro:`Py_PRINT_RAW`; if given, the :func:`str` of the object is written instead of the :func:`repr`. @@ -199,8 +199,8 @@ Object Protocol .. c:function:: PyObject* PyObject_RichCompare(PyObject *o1, PyObject *o2, int opid) Compare the values of *o1* and *o2* using the operation specified by *opid*, - which must be one of :const:`Py_LT`, :const:`Py_LE`, :const:`Py_EQ`, - :const:`Py_NE`, :const:`Py_GT`, or :const:`Py_GE`, corresponding to ``<``, + which must be one of :c:macro:`Py_LT`, :c:macro:`Py_LE`, :c:macro:`Py_EQ`, + :c:macro:`Py_NE`, :c:macro:`Py_GT`, or :c:macro:`Py_GE`, corresponding to ``<``, ``<=``, ``==``, ``!=``, ``>``, or ``>=`` respectively. This is the equivalent of the Python expression ``o1 op o2``, where ``op`` is the operator corresponding to *opid*. Returns the value of the comparison on success, or ``NULL`` on failure. @@ -209,8 +209,8 @@ Object Protocol .. c:function:: int PyObject_RichCompareBool(PyObject *o1, PyObject *o2, int opid) Compare the values of *o1* and *o2* using the operation specified by *opid*, - which must be one of :const:`Py_LT`, :const:`Py_LE`, :const:`Py_EQ`, - :const:`Py_NE`, :const:`Py_GT`, or :const:`Py_GE`, corresponding to ``<``, + which must be one of :c:macro:`Py_LT`, :c:macro:`Py_LE`, :c:macro:`Py_EQ`, + :c:macro:`Py_NE`, :c:macro:`Py_GT`, or :c:macro:`Py_GE`, corresponding to ``<``, ``<=``, ``==``, ``!=``, ``>``, or ``>=`` respectively. Returns ``-1`` on error, ``0`` if the result is false, ``1`` otherwise. This is the equivalent of the Python expression ``o1 op o2``, where ``op`` is the operator corresponding to @@ -218,7 +218,7 @@ Object Protocol .. note:: If *o1* and *o2* are the same object, :c:func:`PyObject_RichCompareBool` - will always return ``1`` for :const:`Py_EQ` and ``0`` for :const:`Py_NE`. + will always return ``1`` for :c:macro:`Py_EQ` and ``0`` for :c:macro:`Py_NE`. .. c:function:: PyObject* PyObject_Format(PyObject *obj, PyObject *format_spec) @@ -293,7 +293,7 @@ Object Protocol Normally only class objects, i.e. instances of :class:`type` or a derived class, are considered classes. However, objects can override this by having - a :attr:`__bases__` attribute (which must be a tuple of base classes). + a :attr:`~class.__bases__` attribute (which must be a tuple of base classes). .. c:function:: int PyObject_IsInstance(PyObject *inst, PyObject *cls) @@ -310,10 +310,10 @@ Object Protocol is an instance of *cls* if its class is a subclass of *cls*. An instance *inst* can override what is considered its class by having a - :attr:`__class__` attribute. + :attr:`~instance.__class__` attribute. An object *cls* can override if it is considered a class, and what its base - classes are, by having a :attr:`__bases__` attribute (which must be a tuple + classes are, by having a :attr:`~class.__bases__` attribute (which must be a tuple of base classes). @@ -357,11 +357,12 @@ Object Protocol When *o* is non-``NULL``, returns a type object corresponding to the object type of object *o*. On failure, raises :exc:`SystemError` and returns ``NULL``. This - is equivalent to the Python expression ``type(o)``. This function increments the - reference count of the return value. There's really no reason to use this + is equivalent to the Python expression ``type(o)``. + This function creates a new :term:`strong reference` to the return value. + There's really no reason to use this function instead of the :c:func:`Py_TYPE()` function, which returns a - pointer of type :c:expr:`PyTypeObject*`, except when the incremented reference - count is needed. + pointer of type :c:expr:`PyTypeObject*`, except when a new + :term:`strong reference` is needed. .. c:function:: int PyObject_TypeCheck(PyObject *o, PyTypeObject *type) @@ -468,10 +469,10 @@ Object Protocol .. c:function:: void *PyObject_GetItemData(PyObject *o) Get a pointer to per-item data for a class with - :const:`Py_TPFLAGS_ITEMS_AT_END`. + :c:macro:`Py_TPFLAGS_ITEMS_AT_END`. On error, set an exception and return ``NULL``. :py:exc:`TypeError` is raised if *o* does not have - :const:`Py_TPFLAGS_ITEMS_AT_END` set. + :c:macro:`Py_TPFLAGS_ITEMS_AT_END` set. .. versionadded:: 3.12 diff --git a/Doc/c-api/refcounting.rst b/Doc/c-api/refcounting.rst index d8e9c2da6f3ff3..118af7a1a8cf90 100644 --- a/Doc/c-api/refcounting.rst +++ b/Doc/c-api/refcounting.rst @@ -15,6 +15,12 @@ of Python objects. Get the reference count of the Python object *o*. + Note that the returned value may not actually reflect how many + references to the object are actually held. For example, some + objects are "immortal" and have a very high refcount that does not + reflect the actual number of references. Consequently, do not rely + on the returned value to be accurate, other than a value of 0 or 1. + Use the :c:func:`Py_SET_REFCNT()` function to set an object reference count. .. versionchanged:: 3.11 @@ -28,36 +34,53 @@ of Python objects. Set the object *o* reference counter to *refcnt*. + Note that this function has no effect on + `immortal `_ + objects. + .. versionadded:: 3.9 + .. versionchanged:: 3.12 + Immortal objects are not modified. + .. c:function:: void Py_INCREF(PyObject *o) - Increment the reference count for object *o*. + Indicate taking a new :term:`strong reference` to object *o*, + indicating it is in use and should not be destroyed. This function is usually used to convert a :term:`borrowed reference` to a :term:`strong reference` in-place. The :c:func:`Py_NewRef` function can be used to create a new :term:`strong reference`. + When done using the object, release is by calling :c:func:`Py_DECREF`. + The object must not be ``NULL``; if you aren't sure that it isn't ``NULL``, use :c:func:`Py_XINCREF`. + Do not expect this function to actually modify *o* in any way. + For at least `some objects `_, + this function has no effect. + + .. versionchanged:: 3.12 + Immortal objects are not modified. + .. c:function:: void Py_XINCREF(PyObject *o) - Increment the reference count for object *o*. The object may be ``NULL``, in - which case the macro has no effect. + Similar to :c:func:`Py_INCREF`, but the object *o* can be ``NULL``, + in which case this has no effect. See also :c:func:`Py_XNewRef`. .. c:function:: PyObject* Py_NewRef(PyObject *o) - Create a new :term:`strong reference` to an object: increment the reference - count of the object *o* and return the object *o*. + Create a new :term:`strong reference` to an object: + call :c:func:`Py_INCREF` on *o* and return the object *o*. When the :term:`strong reference` is no longer needed, :c:func:`Py_DECREF` - should be called on it to decrement the object reference count. + should be called on it to release the reference. The object *o* must not be ``NULL``; use :c:func:`Py_XNewRef` if *o* can be ``NULL``. @@ -87,9 +110,12 @@ of Python objects. .. c:function:: void Py_DECREF(PyObject *o) - Decrement the reference count for object *o*. + Release a :term:`strong reference` to object *o*, indicating the + reference is no longer used. - If the reference count reaches zero, the object's type's deallocation + Once the last :term:`strong reference` is released + (i.e. the object's reference count reaches 0), + the object's type's deallocation function (which must not be ``NULL``) is invoked. This function is usually used to delete a :term:`strong reference` before @@ -98,10 +124,14 @@ of Python objects. The object must not be ``NULL``; if you aren't sure that it isn't ``NULL``, use :c:func:`Py_XDECREF`. + Do not expect this function to actually modify *o* in any way. + For at least `some objects `_, + this function has no effect. + .. warning:: The deallocation function can cause arbitrary Python code to be invoked (e.g. - when a class instance with a :meth:`__del__` method is deallocated). While + when a class instance with a :meth:`~object.__del__` method is deallocated). While exceptions in such code are not propagated, the executed code has free access to all Python global variables. This means that any object that is reachable from a global variable should be in a consistent state before :c:func:`Py_DECREF` is @@ -109,25 +139,29 @@ of Python objects. reference to the deleted object in a temporary variable, update the list data structure, and then call :c:func:`Py_DECREF` for the temporary variable. + .. versionchanged:: 3.12 + Immortal objects are not modified. + .. c:function:: void Py_XDECREF(PyObject *o) - Decrement the reference count for object *o*. The object may be ``NULL``, in - which case the macro has no effect; otherwise the effect is the same as for - :c:func:`Py_DECREF`, and the same warning applies. + Similar to :c:func:`Py_DECREF`, but the object *o* can be ``NULL``, + in which case this has no effect. + The same warning from :c:func:`Py_DECREF` applies here as well. .. c:function:: void Py_CLEAR(PyObject *o) - Decrement the reference count for object *o*. The object may be ``NULL``, in + Release a :term:`strong reference` for object *o*. + The object may be ``NULL``, in which case the macro has no effect; otherwise the effect is the same as for :c:func:`Py_DECREF`, except that the argument is also set to ``NULL``. The warning for :c:func:`Py_DECREF` does not apply with respect to the object passed because the macro carefully uses a temporary variable and sets the argument to ``NULL`` - before decrementing its reference count. + before releasing the reference. - It is a good idea to use this macro whenever decrementing the reference - count of an object that might be traversed during garbage collection. + It is a good idea to use this macro whenever releasing a reference + to an object that might be traversed during garbage collection. .. versionchanged:: 3.12 The macro argument is now only evaluated once. If the argument has side @@ -136,20 +170,22 @@ of Python objects. .. c:function:: void Py_IncRef(PyObject *o) - Increment the reference count for object *o*. A function version of :c:func:`Py_XINCREF`. + Indicate taking a new :term:`strong reference` to object *o*. + A function version of :c:func:`Py_XINCREF`. It can be used for runtime dynamic embedding of Python. .. c:function:: void Py_DecRef(PyObject *o) - Decrement the reference count for object *o*. A function version of :c:func:`Py_XDECREF`. + Release a :term:`strong reference` to object *o*. + A function version of :c:func:`Py_XDECREF`. It can be used for runtime dynamic embedding of Python. .. c:macro:: Py_SETREF(dst, src) - Macro safely decrementing the `dst` reference count and setting `dst` to - `src`. + Macro safely releasing a :term:`strong reference` to object *dst* + and setting *dst* to *src*. As in case of :c:func:`Py_CLEAR`, "the obvious" code can be deadly:: @@ -160,9 +196,10 @@ of Python objects. Py_SETREF(dst, src); - That arranges to set `dst` to `src` _before_ decrementing reference count of - *dst* old value, so that any code triggered as a side-effect of `dst` - getting torn down no longer believes `dst` points to a valid object. + That arranges to set *dst* to *src* _before_ releasing the reference + to the old value of *dst*, so that any code triggered as a side-effect + of *dst* getting torn down no longer believes *dst* points + to a valid object. .. versionadded:: 3.6 diff --git a/Doc/c-api/sequence.rst b/Doc/c-api/sequence.rst index 402a3e5e09ff56..ce28839f5ba739 100644 --- a/Doc/c-api/sequence.rst +++ b/Doc/c-api/sequence.rst @@ -9,7 +9,7 @@ Sequence Protocol .. c:function:: int PySequence_Check(PyObject *o) Return ``1`` if the object provides the sequence protocol, and ``0`` otherwise. - Note that it returns ``1`` for Python classes with a :meth:`__getitem__` + Note that it returns ``1`` for Python classes with a :meth:`~object.__getitem__` method, unless they are :class:`dict` subclasses, since in general it is impossible to determine what type of keys the class supports. This function always succeeds. diff --git a/Doc/c-api/set.rst b/Doc/c-api/set.rst index d642a5f1902e2e..1e8a09509032f5 100644 --- a/Doc/c-api/set.rst +++ b/Doc/c-api/set.rst @@ -110,7 +110,7 @@ or :class:`frozenset` or instances of their subtypes. .. index:: pair: built-in function; len Return the length of a :class:`set` or :class:`frozenset` object. Equivalent to - ``len(anyset)``. Raises a :exc:`PyExc_SystemError` if *anyset* is not a + ``len(anyset)``. Raises a :exc:`SystemError` if *anyset* is not a :class:`set`, :class:`frozenset`, or an instance of a subtype. @@ -122,9 +122,9 @@ or :class:`frozenset` or instances of their subtypes. .. c:function:: int PySet_Contains(PyObject *anyset, PyObject *key) Return ``1`` if found, ``0`` if not found, and ``-1`` if an error is encountered. Unlike - the Python :meth:`__contains__` method, this function does not automatically + the Python :meth:`~object.__contains__` method, this function does not automatically convert unhashable sets into temporary frozensets. Raise a :exc:`TypeError` if - the *key* is unhashable. Raise :exc:`PyExc_SystemError` if *anyset* is not a + the *key* is unhashable. Raise :exc:`SystemError` if *anyset* is not a :class:`set`, :class:`frozenset`, or an instance of a subtype. @@ -149,7 +149,7 @@ subtypes but not for instances of :class:`frozenset` or its subtypes. error is encountered. Does not raise :exc:`KeyError` for missing keys. Raise a :exc:`TypeError` if the *key* is unhashable. Unlike the Python :meth:`~set.discard` method, this function does not automatically convert unhashable sets into - temporary frozensets. Raise :exc:`PyExc_SystemError` if *set* is not an + temporary frozensets. Raise :exc:`SystemError` if *set* is not an instance of :class:`set` or its subtype. diff --git a/Doc/c-api/slice.rst b/Doc/c-api/slice.rst index c54a659cf2ffd8..9e880c6b7f25ad 100644 --- a/Doc/c-api/slice.rst +++ b/Doc/c-api/slice.rst @@ -34,7 +34,7 @@ Slice Objects *length* as errors. Returns ``0`` on success and ``-1`` on error with no exception set (unless one of - the indices was not :const:`None` and failed to be converted to an integer, + the indices was not ``None`` and failed to be converted to an integer, in which case ``-1`` is returned with an exception set). You probably do not want to use this function. diff --git a/Doc/c-api/stable.rst b/Doc/c-api/stable.rst index 149d4d6bac3ee4..c66b296d304adc 100644 --- a/Doc/c-api/stable.rst +++ b/Doc/c-api/stable.rst @@ -74,7 +74,7 @@ Contents of the Limited API are :ref:`listed below `. Define this macro before including ``Python.h`` to opt in to only use the Limited API, and to select the Limited API version. - Define ``Py_LIMITED_API`` to the value of :c:data:`PY_VERSION_HEX` + Define ``Py_LIMITED_API`` to the value of :c:macro:`PY_VERSION_HEX` corresponding to the lowest Python version your extension supports. The extension will work without recompilation with all Python 3 releases from the specified one onward, and can use Limited API introduced up to that diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index 766f881463c00f..747cfa62294c21 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -35,7 +35,7 @@ under :ref:`reference counting `. .. c:type:: PyVarObject - This is an extension of :c:type:`PyObject` that adds the :attr:`ob_size` + This is an extension of :c:type:`PyObject` that adds the :c:member:`~PyVarObject.ob_size` field. This is only used for objects that have some notion of *length*. This type does not often appear in the Python/C API. Access to the members must be done by using the macros @@ -152,7 +152,7 @@ under :ref:`reference counting `. .. c:macro:: PyVarObject_HEAD_INIT(type, size) This is a macro which expands to initialization values for a new - :c:type:`PyVarObject` type, including the :attr:`ob_size` field. + :c:type:`PyVarObject` type, including the :c:member:`~PyVarObject.ob_size` field. This macro expands to:: _PyObject_EXTRA_INIT @@ -179,7 +179,7 @@ Implementing functions and methods .. c:type:: PyCFunctionWithKeywords Type of the functions used to implement Python callables in C - with signature :const:`METH_VARARGS | METH_KEYWORDS`. + with signature :ref:`METH_VARARGS | METH_KEYWORDS `. The function signature is:: PyObject *PyCFunctionWithKeywords(PyObject *self, @@ -190,7 +190,7 @@ Implementing functions and methods .. c:type:: _PyCFunctionFast Type of the functions used to implement Python callables in C - with signature :const:`METH_FASTCALL`. + with signature :c:macro:`METH_FASTCALL`. The function signature is:: PyObject *_PyCFunctionFast(PyObject *self, @@ -200,7 +200,7 @@ Implementing functions and methods .. c:type:: _PyCFunctionFastWithKeywords Type of the functions used to implement Python callables in C - with signature :const:`METH_FASTCALL | METH_KEYWORDS`. + with signature :ref:`METH_FASTCALL | METH_KEYWORDS `. The function signature is:: PyObject *_PyCFunctionFastWithKeywords(PyObject *self, @@ -211,7 +211,7 @@ Implementing functions and methods .. c:type:: PyCMethod Type of the functions used to implement Python callables in C - with signature :const:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS`. + with signature :ref:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS `. The function signature is:: PyObject *PyCMethod(PyObject *self, @@ -228,36 +228,38 @@ Implementing functions and methods Structure used to describe a method of an extension type. This structure has four fields: - .. c:member:: const char* ml_name + .. c:member:: const char *ml_name - name of the method + Name of the method. .. c:member:: PyCFunction ml_meth - pointer to the C implementation + Pointer to the C implementation. .. c:member:: int ml_flags - flags bits indicating how the call should be constructed + Flags bits indicating how the call should be constructed. - .. c:member:: const char* ml_doc + .. c:member:: const char *ml_doc - points to the contents of the docstring + Points to the contents of the docstring. -The :c:member:`ml_meth` is a C function pointer. The functions may be of different +The :c:member:`~PyMethodDef.ml_meth` is a C function pointer. +The functions may be of different types, but they always return :c:expr:`PyObject*`. If the function is not of the :c:type:`PyCFunction`, the compiler will require a cast in the method table. Even though :c:type:`PyCFunction` defines the first parameter as :c:expr:`PyObject*`, it is common that the method implementation uses the specific C type of the *self* object. -The :c:member:`ml_flags` field is a bitfield which can include the following flags. +The :c:member:`~PyMethodDef.ml_flags` field is a bitfield which can include +the following flags. The individual flags indicate either a calling convention or a binding convention. There are these calling conventions: -.. data:: METH_VARARGS +.. c:macro:: METH_VARARGS This is the typical calling convention, where the methods have the type :c:type:`PyCFunction`. The function expects two :c:expr:`PyObject*` values. @@ -267,8 +269,17 @@ There are these calling conventions: using :c:func:`PyArg_ParseTuple` or :c:func:`PyArg_UnpackTuple`. -.. data:: METH_VARARGS | METH_KEYWORDS +.. c:macro:: METH_KEYWORDS + Can only be used in certain combinations with other flags: + :ref:`METH_VARARGS | METH_KEYWORDS `, + :ref:`METH_FASTCALL | METH_KEYWORDS ` and + :ref:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS `. + + +.. _METH_VARARGS-METH_KEYWORDS: + +:c:expr:`METH_VARARGS | METH_KEYWORDS` Methods with these flags must be of type :c:type:`PyCFunctionWithKeywords`. The function expects three parameters: *self*, *args*, *kwargs* where *kwargs* is a dictionary of all the keyword arguments or possibly ``NULL`` @@ -276,7 +287,7 @@ There are these calling conventions: using :c:func:`PyArg_ParseTupleAndKeywords`. -.. data:: METH_FASTCALL +.. c:macro:: METH_FASTCALL Fast calling convention supporting only positional arguments. The methods have the type :c:type:`_PyCFunctionFast`. @@ -291,9 +302,10 @@ There are these calling conventions: ``METH_FASTCALL`` is now part of the :ref:`stable ABI `. -.. data:: METH_FASTCALL | METH_KEYWORDS +.. _METH_FASTCALL-METH_KEYWORDS: - Extension of :const:`METH_FASTCALL` supporting also keyword arguments, +:c:expr:`METH_FASTCALL | METH_KEYWORDS` + Extension of :c:macro:`METH_FASTCALL` supporting also keyword arguments, with methods of type :c:type:`_PyCFunctionFastWithKeywords`. Keyword arguments are passed the same way as in the :ref:`vectorcall protocol `: @@ -306,10 +318,18 @@ There are these calling conventions: .. versionadded:: 3.7 -.. data:: METH_METHOD | METH_FASTCALL | METH_KEYWORDS +.. c:macro:: METH_METHOD + + Can only be used in the combination with other flags: + :ref:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS `. + + +.. _METH_METHOD-METH_FASTCALL-METH_KEYWORDS: - Extension of :const:`METH_FASTCALL | METH_KEYWORDS` supporting the *defining - class*, that is, the class that contains the method in question. +:c:expr:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS` + Extension of :ref:`METH_FASTCALL | METH_KEYWORDS ` + supporting the *defining class*, that is, + the class that contains the method in question. The defining class might be a superclass of ``Py_TYPE(self)``. The method needs to be of type :c:type:`PyCMethod`, the same as for @@ -319,10 +339,10 @@ There are these calling conventions: .. versionadded:: 3.9 -.. data:: METH_NOARGS +.. c:macro:: METH_NOARGS Methods without parameters don't need to check whether arguments are given if - they are listed with the :const:`METH_NOARGS` flag. They need to be of type + they are listed with the :c:macro:`METH_NOARGS` flag. They need to be of type :c:type:`PyCFunction`. The first parameter is typically named *self* and will hold a reference to the module or object instance. In all cases the second parameter will be ``NULL``. @@ -331,9 +351,9 @@ There are these calling conventions: :c:macro:`Py_UNUSED` can be used to prevent a compiler warning. -.. data:: METH_O +.. c:macro:: METH_O - Methods with a single object argument can be listed with the :const:`METH_O` + Methods with a single object argument can be listed with the :c:macro:`METH_O` flag, instead of invoking :c:func:`PyArg_ParseTuple` with a ``"O"`` argument. They have the type :c:type:`PyCFunction`, with the *self* parameter, and a :c:expr:`PyObject*` parameter representing the single argument. @@ -345,7 +365,7 @@ defined for modules. At most one of these flags may be set for any given method. -.. data:: METH_CLASS +.. c:macro:: METH_CLASS .. index:: pair: built-in function; classmethod @@ -355,7 +375,7 @@ method. function. -.. data:: METH_STATIC +.. c:macro:: METH_STATIC .. index:: pair: built-in function; staticmethod @@ -367,13 +387,13 @@ One other constant controls whether a method is loaded in place of another definition with the same method name. -.. data:: METH_COEXIST +.. c:macro:: METH_COEXIST The method will be loaded in place of existing definitions. Without *METH_COEXIST*, the default is to skip repeated definitions. Since slot wrappers are loaded before the method table, the existence of a *sq_contains* slot, for example, would generate a wrapped method named - :meth:`__contains__` and preclude the loading of a corresponding + :meth:`~object.__contains__` and preclude the loading of a corresponding PyCFunction with the same name. With the flag defined, the PyCFunction will be loaded in place of the wrapper object and will co-exist with the slot. This is helpful because calls to PyCFunctions are optimized more @@ -414,7 +434,7 @@ Accessing attributes of extension types The string should be static, no copy is made of it. Typically, it is defined using :c:macro:`PyDoc_STR`. - By default (when :c:member:`flags` is ``0``), members allow + By default (when :c:member:`~PyMemberDef.flags` is ``0``), members allow both read and write access. Use the :c:macro:`Py_READONLY` flag for read-only access. Certain types, like :c:macro:`Py_T_STRING`, imply :c:macro:`Py_READONLY`. @@ -440,8 +460,8 @@ Accessing attributes of extension types The legacy offsets :c:member:`~PyTypeObject.tp_dictoffset` and :c:member:`~PyTypeObject.tp_weaklistoffset` can be defined similarly using ``"__dictoffset__"`` and ``"__weaklistoffset__"`` members, but extensions - are strongly encouraged to use :const:`Py_TPFLAGS_MANAGED_DICT` and - :const:`Py_TPFLAGS_MANAGED_WEAKREF` instead. + are strongly encouraged to use :c:macro:`Py_TPFLAGS_MANAGED_DICT` and + :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` instead. .. versionchanged:: 3.12 @@ -494,7 +514,7 @@ The following flags can be used with :c:member:`PyMemberDef.flags`: Can only be used as part of :c:member:`Py_tp_members ` :c:type:`slot ` when creating a class using negative - :c:member:`~PyTypeDef.basicsize`. + :c:member:`~PyType_Spec.basicsize`. It is mandatory in that case. This flag is only used in :c:type:`PyTypeSlot`. @@ -509,19 +529,19 @@ The following flags can be used with :c:member:`PyMemberDef.flags`: .. versionchanged:: 3.10 - The :const:`!RESTRICTED`, :const:`!READ_RESTRICTED` and - :const:`!WRITE_RESTRICTED` macros available with + The :c:macro:`!RESTRICTED`, :c:macro:`!READ_RESTRICTED` and + :c:macro:`!WRITE_RESTRICTED` macros available with ``#include "structmember.h"`` are deprecated. - :const:`!READ_RESTRICTED` and :const:`!RESTRICTED` are equivalent to - :const:`Py_AUDIT_READ`; :const:`!WRITE_RESTRICTED` does nothing. + :c:macro:`!READ_RESTRICTED` and :c:macro:`!RESTRICTED` are equivalent to + :c:macro:`Py_AUDIT_READ`; :c:macro:`!WRITE_RESTRICTED` does nothing. .. index:: single: READONLY .. versionchanged:: 3.12 - The :const:`!READONLY` macro was renamed to :const:`Py_READONLY`. - The :const:`!PY_AUDIT_READ` macro was renamed with the ``Py_`` prefix. + The :c:macro:`!READONLY` macro was renamed to :c:macro:`Py_READONLY`. + The :c:macro:`!PY_AUDIT_READ` macro was renamed with the ``Py_`` prefix. The new names are now always available. Previously, these required ``#include "structmember.h"``. The header is still available and it provides the old names. diff --git a/Doc/c-api/sys.rst b/Doc/c-api/sys.rst index c4077b2a5620d6..aed625c5f6cdae 100644 --- a/Doc/c-api/sys.rst +++ b/Doc/c-api/sys.rst @@ -8,8 +8,9 @@ Operating System Utilities .. c:function:: PyObject* PyOS_FSPath(PyObject *path) Return the file system representation for *path*. If the object is a - :class:`str` or :class:`bytes` object, then its reference count is - incremented. If the object implements the :class:`os.PathLike` interface, + :class:`str` or :class:`bytes` object, then a new + :term:`strong reference` is returned. + If the object implements the :class:`os.PathLike` interface, then :meth:`~os.PathLike.__fspath__` is returned as long as it is a :class:`str` or :class:`bytes` object. Otherwise :exc:`TypeError` is raised and ``NULL`` is returned. @@ -97,16 +98,16 @@ Operating System Utilities .. c:function:: int PyOS_CheckStack() Return true when the interpreter runs out of stack space. This is a reliable - check, but is only available when :const:`USE_STACKCHECK` is defined (currently + check, but is only available when :c:macro:`USE_STACKCHECK` is defined (currently on certain versions of Windows using the Microsoft Visual C++ compiler). - :const:`USE_STACKCHECK` will be defined automatically; you should never + :c:macro:`USE_STACKCHECK` will be defined automatically; you should never change the definition in your own code. .. c:function:: PyOS_sighandler_t PyOS_getsig(int i) Return the current signal handler for signal *i*. This is a thin wrapper around - either :c:func:`sigaction` or :c:func:`signal`. Do not call those functions + either :c:func:`!sigaction` or :c:func:`!signal`. Do not call those functions directly! :c:type:`PyOS_sighandler_t` is a typedef alias for :c:expr:`void (\*)(int)`. @@ -114,7 +115,7 @@ Operating System Utilities .. c:function:: PyOS_sighandler_t PyOS_setsig(int i, PyOS_sighandler_t h) Set the signal handler for signal *i* to be *h*; return the old signal handler. - This is a thin wrapper around either :c:func:`sigaction` or :c:func:`signal`. Do + This is a thin wrapper around either :c:func:`!sigaction` or :c:func:`!signal`. Do not call those functions directly! :c:type:`PyOS_sighandler_t` is a typedef alias for :c:expr:`void (\*)(int)`. @@ -167,7 +168,7 @@ Operating System Utilities .. versionchanged:: 3.8 The function now uses the UTF-8 encoding on Windows if - :c:member:`PyConfig.legacy_windows_fs_encoding` is zero; + :c:member:`PyPreConfig.legacy_windows_fs_encoding` is zero; .. c:function:: char* Py_EncodeLocale(const wchar_t *text, size_t *error_pos) @@ -209,7 +210,7 @@ Operating System Utilities .. versionchanged:: 3.8 The function now uses the UTF-8 encoding on Windows if - :c:member:`PyConfig.legacy_windows_fs_encoding` is zero. + :c:member:`PyPreConfig.legacy_windows_fs_encoding` is zero. .. _systemfunctions: @@ -363,7 +364,7 @@ Process Control This function should only be invoked when a condition is detected that would make it dangerous to continue using the Python interpreter; e.g., when the object administration appears to be corrupted. On Unix, the standard C library - function :c:func:`abort` is called which will attempt to produce a :file:`core` + function :c:func:`!abort` is called which will attempt to produce a :file:`core` file. The ``Py_FatalError()`` function is replaced with a macro which logs diff --git a/Doc/c-api/tuple.rst b/Doc/c-api/tuple.rst index 3fe1062aa8539a..b3710560ebe7ac 100644 --- a/Doc/c-api/tuple.rst +++ b/Doc/c-api/tuple.rst @@ -114,6 +114,8 @@ Tuple Objects raises :exc:`MemoryError` or :exc:`SystemError`. +.. _struct-sequence-objects: + Struct Sequence Objects ----------------------- @@ -145,39 +147,39 @@ type. Contains the meta information of a struct sequence type to create. - +-------------------+------------------------------+--------------------------------------+ - | Field | C Type | Meaning | - +===================+==============================+======================================+ - | ``name`` | ``const char *`` | name of the struct sequence type | - +-------------------+------------------------------+--------------------------------------+ - | ``doc`` | ``const char *`` | pointer to docstring for the type | - | | | or ``NULL`` to omit | - +-------------------+------------------------------+--------------------------------------+ - | ``fields`` | ``PyStructSequence_Field *`` | pointer to ``NULL``-terminated array | - | | | with field names of the new type | - +-------------------+------------------------------+--------------------------------------+ - | ``n_in_sequence`` | ``int`` | number of fields visible to the | - | | | Python side (if used as tuple) | - +-------------------+------------------------------+--------------------------------------+ + .. c:member:: const char *name + + Name of the struct sequence type. + + .. c:member:: const char *doc + + Pointer to docstring for the type or ``NULL`` to omit. + + .. c:member:: PyStructSequence_Field *fields + + Pointer to ``NULL``-terminated array with field names of the new type. + + .. c:member:: int n_in_sequence + + Number of fields visible to the Python side (if used as tuple). .. c:type:: PyStructSequence_Field Describes a field of a struct sequence. As a struct sequence is modeled as a tuple, all fields are typed as :c:expr:`PyObject*`. The index in the - :attr:`fields` array of the :c:type:`PyStructSequence_Desc` determines which + :c:member:`~PyStructSequence_Desc.fields` array of + the :c:type:`PyStructSequence_Desc` determines which field of the struct sequence is described. - +-----------+------------------+-----------------------------------------+ - | Field | C Type | Meaning | - +===========+==================+=========================================+ - | ``name`` | ``const char *`` | name for the field or ``NULL`` to end | - | | | the list of named fields, set to | - | | | :c:data:`PyStructSequence_UnnamedField` | - | | | to leave unnamed | - +-----------+------------------+-----------------------------------------+ - | ``doc`` | ``const char *`` | field docstring or ``NULL`` to omit | - +-----------+------------------+-----------------------------------------+ + .. c:member:: const char *name + + Name for the field or ``NULL`` to end the list of named fields, + set to :c:data:`PyStructSequence_UnnamedField` to leave unnamed. + + .. c:member:: const char *doc + + Field docstring or ``NULL`` to omit. .. c:var:: const char * const PyStructSequence_UnnamedField diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index a5f333e2a31e03..0f58326f6c06b7 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -103,7 +103,7 @@ Type Objects :c:func:`PyType_AddWatcher` will be called whenever :c:func:`PyType_Modified` reports a change to *type*. (The callback may be called only once for a series of consecutive modifications to *type*, if - :c:func:`PyType_Lookup` is not called on *type* between the modifications; + :c:func:`!_PyType_Lookup` is not called on *type* between the modifications; this is an implementation detail and subject to change.) An extension should never call ``PyType_Watch`` with a *watcher_id* that was @@ -132,7 +132,7 @@ Type Objects .. c:function:: int PyType_IS_GC(PyTypeObject *o) Return true if the type object includes support for the cycle detector; this - tests the type flag :const:`Py_TPFLAGS_HAVE_GC`. + tests the type flag :c:macro:`Py_TPFLAGS_HAVE_GC`. .. c:function:: int PyType_IsSubtype(PyTypeObject *a, PyTypeObject *b) @@ -165,10 +165,10 @@ Type Objects .. note:: If some of the base classes implements the GC protocol and the provided - type does not include the :const:`Py_TPFLAGS_HAVE_GC` in its flags, then + type does not include the :c:macro:`Py_TPFLAGS_HAVE_GC` in its flags, then the GC protocol will be automatically implemented from its parents. On the contrary, if the type being created does include - :const:`Py_TPFLAGS_HAVE_GC` in its flags then it **must** implement the + :c:macro:`Py_TPFLAGS_HAVE_GC` in its flags then it **must** implement the GC protocol itself by at least implementing the :c:member:`~PyTypeObject.tp_traverse` handle. @@ -215,7 +215,7 @@ Type Objects ``Py_TYPE(self)`` may be a *subclass* of the intended class, and subclasses are not necessarily defined in the same module as their superclass. See :c:type:`PyCMethod` to get the class that defines the method. - See :c:func:`PyType_GetModuleByDef` for cases when ``PyCMethod`` cannot + See :c:func:`PyType_GetModuleByDef` for cases when :c:type:`!PyCMethod` cannot be used. .. versionadded:: 3.9 @@ -268,7 +268,7 @@ The following functions and structs are used to create .. c:function:: PyObject* PyType_FromMetaclass(PyTypeObject *metaclass, PyObject *module, PyType_Spec *spec, PyObject *bases) Create and return a :ref:`heap type ` from the *spec* - (see :const:`Py_TPFLAGS_HEAPTYPE`). + (see :c:macro:`Py_TPFLAGS_HEAPTYPE`). The metaclass *metaclass* is used to construct the resulting type object. When *metaclass* is ``NULL``, the metaclass is derived from *bases* @@ -420,7 +420,7 @@ The following functions and structs are used to create - The requested :c:member:`PyType_Spec.basicsize` is zero, suggesting that the subclass does not access the instance's memory directly. - - With the :const:`Py_TPFLAGS_ITEMS_AT_END` flag. + - With the :c:macro:`Py_TPFLAGS_ITEMS_AT_END` flag. .. c:member:: unsigned int flags @@ -471,16 +471,16 @@ The following functions and structs are used to create * :c:member:`~PyTypeObject.tp_weaklist` * :c:member:`~PyTypeObject.tp_vectorcall` * :c:member:`~PyTypeObject.tp_weaklistoffset` - (use :const:`Py_TPFLAGS_MANAGED_WEAKREF` instead) + (use :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` instead) * :c:member:`~PyTypeObject.tp_dictoffset` - (use :const:`Py_TPFLAGS_MANAGED_DICT` instead) + (use :c:macro:`Py_TPFLAGS_MANAGED_DICT` instead) * :c:member:`~PyTypeObject.tp_vectorcall_offset` (see :ref:`PyMemberDef `) Setting :c:data:`Py_tp_bases` or :c:data:`Py_tp_base` may be problematic on some platforms. To avoid issues, use the *bases* argument of - :py:func:`PyType_FromSpecWithBases` instead. + :c:func:`PyType_FromSpecWithBases` instead. .. versionchanged:: 3.9 diff --git a/Doc/c-api/typehints.rst b/Doc/c-api/typehints.rst index 4c1957a2a1dbca..98fe68737deb81 100644 --- a/Doc/c-api/typehints.rst +++ b/Doc/c-api/typehints.rst @@ -35,7 +35,7 @@ two types exist -- :ref:`GenericAlias ` and ... } - .. seealso:: The data model method :meth:`__class_getitem__`. + .. seealso:: The data model method :meth:`~object.__class_getitem__`. .. versionadded:: 3.9 diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst index 7249cfe79c32e9..faa183e27fcfa2 100644 --- a/Doc/c-api/typeobj.rst +++ b/Doc/c-api/typeobj.rst @@ -147,7 +147,7 @@ Quick Reference +------------------------------------------------+-----------------------------------+-------------------+---+---+---+---+ | :c:member:`~PyTypeObject.tp_vectorcall` | :c:type:`vectorcallfunc` | | | | | | +------------------------------------------------+-----------------------------------+-------------------+---+---+---+---+ - | [:c:member:`~PyTypeObject.tp_watched`] | char | | | | | | + | [:c:member:`~PyTypeObject.tp_watched`] | unsigned char | | | | | | +------------------------------------------------+-----------------------------------+-------------------+---+---+---+---+ .. [#slots] @@ -163,9 +163,9 @@ Quick Reference .. [#cols] Columns: - **"O"**: set on :c:type:`PyBaseObject_Type` + **"O"**: set on :c:data:`PyBaseObject_Type` - **"T"**: set on :c:type:`PyType_Type` + **"T"**: set on :c:data:`PyType_Type` **"D"**: default (if slot is set to ``NULL``) @@ -485,17 +485,17 @@ PyObject Slots -------------- The type object structure extends the :c:type:`PyVarObject` structure. The -:attr:`ob_size` field is used for dynamic types (created by :func:`type_new`, +:c:member:`~PyVarObject.ob_size` field is used for dynamic types (created by :c:func:`!type_new`, usually called from a class statement). Note that :c:data:`PyType_Type` (the metatype) initializes :c:member:`~PyTypeObject.tp_itemsize`, which means that its instances (i.e. -type objects) *must* have the :attr:`ob_size` field. +type objects) *must* have the :c:member:`~PyVarObject.ob_size` field. .. c:member:: Py_ssize_t PyObject.ob_refcnt This is the type object's reference count, initialized to ``1`` by the ``PyObject_HEAD_INIT`` macro. Note that for :ref:`statically allocated type - objects `, the type's instances (objects whose :attr:`ob_type` + objects `, the type's instances (objects whose :c:member:`~PyObject.ob_type` points back to the type) do *not* count as references. But for :ref:`dynamically allocated type objects `, the instances *do* count as references. @@ -519,8 +519,8 @@ type objects) *must* have the :attr:`ob_size` field. Foo_Type.ob_type = &PyType_Type; This should be done before any instances of the type are created. - :c:func:`PyType_Ready` checks if :attr:`ob_type` is ``NULL``, and if so, - initializes it to the :attr:`ob_type` field of the base class. + :c:func:`PyType_Ready` checks if :c:member:`~PyObject.ob_type` is ``NULL``, and if so, + initializes it to the :c:member:`~PyObject.ob_type` field of the base class. :c:func:`PyType_Ready` will not change this field if it is non-zero. **Inheritance:** @@ -569,8 +569,8 @@ PyTypeObject Slots Each slot has a section describing inheritance. If :c:func:`PyType_Ready` may set a value when the field is set to ``NULL`` then there will also be -a "Default" section. (Note that many fields set on :c:type:`PyBaseObject_Type` -and :c:type:`PyType_Type` effectively act as defaults.) +a "Default" section. (Note that many fields set on :c:data:`PyBaseObject_Type` +and :c:data:`PyType_Type` effectively act as defaults.) .. c:member:: const char* PyTypeObject.tp_name @@ -579,7 +579,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) name, followed by a dot, followed by the type name; for built-in types, it should be just the type name. If the module is a submodule of a package, the full package name is part of the full module name. For example, a type named - :class:`T` defined in module :mod:`M` in subpackage :mod:`Q` in package :mod:`P` + :class:`!T` defined in module :mod:`!M` in subpackage :mod:`!Q` in package :mod:`!P` should have the :c:member:`~PyTypeObject.tp_name` initializer ``"P.Q.M.T"``. For :ref:`dynamically allocated type objects `, @@ -619,20 +619,20 @@ and :c:type:`PyType_Type` effectively act as defaults.) instances have the same size, given in :c:member:`~PyTypeObject.tp_basicsize`. For a type with variable-length instances, the instances must have an - :attr:`ob_size` field, and the instance size is :c:member:`~PyTypeObject.tp_basicsize` plus N + :c:member:`~PyVarObject.ob_size` field, and the instance size is :c:member:`~PyTypeObject.tp_basicsize` plus N times :c:member:`~PyTypeObject.tp_itemsize`, where N is the "length" of the object. The value of - N is typically stored in the instance's :attr:`ob_size` field. There are - exceptions: for example, ints use a negative :attr:`ob_size` to indicate a + N is typically stored in the instance's :c:member:`~PyVarObject.ob_size` field. There are + exceptions: for example, ints use a negative :c:member:`~PyVarObject.ob_size` to indicate a negative number, and N is ``abs(ob_size)`` there. Also, the presence of an - :attr:`ob_size` field in the instance layout doesn't mean that the instance + :c:member:`~PyVarObject.ob_size` field in the instance layout doesn't mean that the instance structure is variable-length (for example, the structure for the list type has - fixed-length instances, yet those instances have a meaningful :attr:`ob_size` + fixed-length instances, yet those instances have a meaningful :c:member:`~PyVarObject.ob_size` field). The basic size includes the fields in the instance declared by the macro :c:macro:`PyObject_HEAD` or :c:macro:`PyObject_VAR_HEAD` (whichever is used to - declare the instance struct) and this in turn includes the :attr:`_ob_prev` and - :attr:`_ob_next` fields if they are present. This means that the only correct + declare the instance struct) and this in turn includes the :c:member:`~PyObject._ob_prev` and + :c:member:`~PyObject._ob_next` fields if they are present. This means that the only correct way to get an initializer for the :c:member:`~PyTypeObject.tp_basicsize` is to use the ``sizeof`` operator on the struct used to declare the instance layout. The basic size does not include the GC header size. @@ -669,15 +669,15 @@ and :c:type:`PyType_Type` effectively act as defaults.) memory buffers owned by the instance (using the freeing function corresponding to the allocation function used to allocate the buffer), and call the type's :c:member:`~PyTypeObject.tp_free` function. If the type is not subtypable - (doesn't have the :const:`Py_TPFLAGS_BASETYPE` flag bit set), it is + (doesn't have the :c:macro:`Py_TPFLAGS_BASETYPE` flag bit set), it is permissible to call the object deallocator directly instead of via :c:member:`~PyTypeObject.tp_free`. The object deallocator should be the one used to allocate the instance; this is normally :c:func:`PyObject_Del` if the instance was allocated - using :c:func:`PyObject_New` or :c:func:`PyObject_VarNew`, or + using :c:macro:`PyObject_New` or :c:macro:`PyObject_NewVar`, or :c:func:`PyObject_GC_Del` if the instance was allocated using - :c:func:`PyObject_GC_New` or :c:func:`PyObject_GC_NewVar`. + :c:macro:`PyObject_GC_New` or :c:macro:`PyObject_GC_NewVar`. - If the type supports garbage collection (has the :const:`Py_TPFLAGS_HAVE_GC` + If the type supports garbage collection (has the :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit set), the destructor should call :c:func:`PyObject_GC_UnTrack` before clearing any member fields. @@ -689,8 +689,9 @@ and :c:type:`PyType_Type` effectively act as defaults.) Py_TYPE(self)->tp_free((PyObject *)self); } - Finally, if the type is heap allocated (:const:`Py_TPFLAGS_HEAPTYPE`), the - deallocator should decrement the reference count for its type object after + Finally, if the type is heap allocated (:c:macro:`Py_TPFLAGS_HEAPTYPE`), the + deallocator should release the owned reference to its type object + (via :c:func:`Py_DECREF`) after calling the type deallocator. In order to avoid dangling pointers, the recommended way to achieve this is: @@ -716,12 +717,12 @@ and :c:type:`PyType_Type` effectively act as defaults.) a more efficient alternative of the simpler :c:member:`~PyTypeObject.tp_call`. - This field is only used if the flag :const:`Py_TPFLAGS_HAVE_VECTORCALL` + This field is only used if the flag :c:macro:`Py_TPFLAGS_HAVE_VECTORCALL` is set. If so, this must be a positive integer containing the offset in the instance of a :c:type:`vectorcallfunc` pointer. The *vectorcallfunc* pointer may be ``NULL``, in which case the instance behaves - as if :const:`Py_TPFLAGS_HAVE_VECTORCALL` was not set: calling the instance + as if :c:macro:`Py_TPFLAGS_HAVE_VECTORCALL` was not set: calling the instance falls back to :c:member:`~PyTypeObject.tp_call`. Any class that sets ``Py_TPFLAGS_HAVE_VECTORCALL`` must also set @@ -740,15 +741,15 @@ and :c:type:`PyType_Type` effectively act as defaults.) Before version 3.12, it was not recommended for :ref:`mutable heap types ` to implement the vectorcall protocol. - When a user sets :attr:`~type.__call__` in Python code, only *tp_call* is + When a user sets :attr:`~object.__call__` in Python code, only *tp_call* is updated, likely making it inconsistent with the vectorcall function. Since 3.12, setting ``__call__`` will disable vectorcall optimization - by clearing the :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag. + by clearing the :c:macro:`Py_TPFLAGS_HAVE_VECTORCALL` flag. **Inheritance:** This field is always inherited. - However, the :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag is not + However, the :c:macro:`Py_TPFLAGS_HAVE_VECTORCALL` flag is not always inherited. If it's not set, then the subclass won't use :ref:`vectorcall `, except when :c:func:`PyVectorcall_Call` is explicitly called. @@ -764,7 +765,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** - Group: :attr:`tp_getattr`, :attr:`tp_getattro` + Group: :c:member:`~PyTypeObject.tp_getattr`, :c:member:`~PyTypeObject.tp_getattro` This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_getattro`: a subtype inherits both :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` from its base type when @@ -781,7 +782,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** - Group: :attr:`tp_setattr`, :attr:`tp_setattro` + Group: :c:member:`~PyTypeObject.tp_setattr`, :c:member:`~PyTypeObject.tp_setattro` This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_setattro`: a subtype inherits both :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` from its base type when @@ -883,7 +884,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) normal return value; when an error occurs during the computation of the hash value, the function should set an exception and return ``-1``. - When this field is not set (*and* :attr:`tp_richcompare` is not set), + When this field is not set (*and* :c:member:`~PyTypeObject.tp_richcompare` is not set), an attempt to take the hash of the object raises :exc:`TypeError`. This is the same as setting it to :c:func:`PyObject_HashNotImplemented`. @@ -897,7 +898,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** - Group: :attr:`tp_hash`, :attr:`tp_richcompare` + Group: :c:member:`~PyTypeObject.tp_hash`, :c:member:`~PyTypeObject.tp_richcompare` This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_richcompare`: a subtype inherits both of @@ -956,7 +957,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** - Group: :attr:`tp_getattr`, :attr:`tp_getattro` + Group: :c:member:`~PyTypeObject.tp_getattr`, :c:member:`~PyTypeObject.tp_getattro` This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_getattr`: a subtype inherits both :c:member:`~PyTypeObject.tp_getattr` and :c:member:`~PyTypeObject.tp_getattro` from its base type when @@ -964,7 +965,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Default:** - :c:type:`PyBaseObject_Type` uses :c:func:`PyObject_GenericGetAttr`. + :c:data:`PyBaseObject_Type` uses :c:func:`PyObject_GenericGetAttr`. .. c:member:: setattrofunc PyTypeObject.tp_setattro @@ -982,7 +983,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** - Group: :attr:`tp_setattr`, :attr:`tp_setattro` + Group: :c:member:`~PyTypeObject.tp_setattr`, :c:member:`~PyTypeObject.tp_setattro` This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_setattr`: a subtype inherits both :c:member:`~PyTypeObject.tp_setattr` and :c:member:`~PyTypeObject.tp_setattro` from its base type when @@ -990,7 +991,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Default:** - :c:type:`PyBaseObject_Type` uses :c:func:`PyObject_GenericSetAttr`. + :c:data:`PyBaseObject_Type` uses :c:func:`PyObject_GenericSetAttr`. .. c:member:: PyBufferProcs* PyTypeObject.tp_as_buffer @@ -1022,30 +1023,32 @@ and :c:type:`PyType_Type` effectively act as defaults.) this flag bit. The flag bits that pertain to extension structures are strictly inherited if the extension structure is inherited, i.e. the base type's value of the flag bit is copied into the subtype together with a pointer to the extension - structure. The :const:`Py_TPFLAGS_HAVE_GC` flag bit is inherited together with + structure. The :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit is inherited together with the :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` fields, i.e. if the - :const:`Py_TPFLAGS_HAVE_GC` flag bit is clear in the subtype and the + :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit is clear in the subtype and the :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` fields in the subtype exist and have ``NULL`` values. .. XXX are most flag bits *really* inherited individually? **Default:** - :c:type:`PyBaseObject_Type` uses + :c:data:`PyBaseObject_Type` uses ``Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE``. **Bit Masks:** + .. c:namespace:: NULL + The following bit masks are currently defined; these can be ORed together using the ``|`` operator to form the value of the :c:member:`~PyTypeObject.tp_flags` field. The macro :c:func:`PyType_HasFeature` takes a type and a flags value, *tp* and *f*, and checks whether ``tp->tp_flags & f`` is non-zero. - .. data:: Py_TPFLAGS_HEAPTYPE + .. c:macro:: Py_TPFLAGS_HEAPTYPE This bit is set when the type object itself is allocated on the heap, for example, types created dynamically using :c:func:`PyType_FromSpec`. In this - case, the :attr:`ob_type` field of its instances is considered a reference to + case, the :c:member:`~PyObject.ob_type` field of its instances is considered a reference to the type, and the type object is INCREF'ed when a new instance is created, and DECREF'ed when an instance is destroyed (this does not apply to instances of subtypes; only the type referenced by the instance's ob_type gets INCREF'ed or @@ -1056,7 +1059,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) ??? - .. data:: Py_TPFLAGS_BASETYPE + .. c:macro:: Py_TPFLAGS_BASETYPE This bit is set when the type can be used as the base type of another type. If this bit is clear, the type cannot be subtyped (similar to a "final" class in @@ -1067,7 +1070,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) ??? - .. data:: Py_TPFLAGS_READY + .. c:macro:: Py_TPFLAGS_READY This bit is set when the type object has been fully initialized by :c:func:`PyType_Ready`. @@ -1077,7 +1080,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) ??? - .. data:: Py_TPFLAGS_READYING + .. c:macro:: Py_TPFLAGS_READYING This bit is set while :c:func:`PyType_Ready` is in the process of initializing the type object. @@ -1087,10 +1090,10 @@ and :c:type:`PyType_Type` effectively act as defaults.) ??? - .. data:: Py_TPFLAGS_HAVE_GC + .. c:macro:: Py_TPFLAGS_HAVE_GC This bit is set when the object supports garbage collection. If this bit - is set, instances must be created using :c:func:`PyObject_GC_New` and + is set, instances must be created using :c:macro:`PyObject_GC_New` and destroyed using :c:func:`PyObject_GC_Del`. More information in section :ref:`supporting-cycle-detection`. This bit also implies that the GC-related fields :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` are present in @@ -1098,28 +1101,28 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** - Group: :const:`Py_TPFLAGS_HAVE_GC`, :attr:`tp_traverse`, :attr:`tp_clear` + Group: :c:macro:`Py_TPFLAGS_HAVE_GC`, :c:member:`~PyTypeObject.tp_traverse`, :c:member:`~PyTypeObject.tp_clear` - The :const:`Py_TPFLAGS_HAVE_GC` flag bit is inherited - together with the :attr:`tp_traverse` and :attr:`tp_clear` - fields, i.e. if the :const:`Py_TPFLAGS_HAVE_GC` flag bit is - clear in the subtype and the :attr:`tp_traverse` and - :attr:`tp_clear` fields in the subtype exist and have ``NULL`` + The :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit is inherited + together with the :c:member:`~PyTypeObject.tp_traverse` and :c:member:`~PyTypeObject.tp_clear` + fields, i.e. if the :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit is + clear in the subtype and the :c:member:`~PyTypeObject.tp_traverse` and + :c:member:`~PyTypeObject.tp_clear` fields in the subtype exist and have ``NULL`` values. - .. data:: Py_TPFLAGS_DEFAULT + .. c:macro:: Py_TPFLAGS_DEFAULT This is a bitmask of all the bits that pertain to the existence of certain fields in the type object and its extension structures. Currently, it includes - the following bits: :const:`Py_TPFLAGS_HAVE_STACKLESS_EXTENSION`. + the following bits: :c:macro:`Py_TPFLAGS_HAVE_STACKLESS_EXTENSION`. **Inheritance:** ??? - .. data:: Py_TPFLAGS_METHOD_DESCRIPTOR + .. c:macro:: Py_TPFLAGS_METHOD_DESCRIPTOR This bit indicates that objects behave like unbound methods. @@ -1140,15 +1143,15 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** This flag is never inherited by types without the - :const:`Py_TPFLAGS_IMMUTABLETYPE` flag set. For extension types, it is + :c:macro:`Py_TPFLAGS_IMMUTABLETYPE` flag set. For extension types, it is inherited whenever :c:member:`~PyTypeObject.tp_descr_get` is inherited. - .. data:: Py_TPFLAGS_MANAGED_DICT + .. c:macro:: Py_TPFLAGS_MANAGED_DICT This bit indicates that instances of the class have a ``__dict__`` attribute, and that the space for the dictionary is managed by the VM. - If this flag is set, :const:`Py_TPFLAGS_HAVE_GC` should also be set. + If this flag is set, :c:macro:`Py_TPFLAGS_HAVE_GC` should also be set. .. versionadded:: 3.12 @@ -1158,7 +1161,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) :c:member:`~PyTypeObject.tp_dictoffset` field is set in a superclass. - .. data:: Py_TPFLAGS_MANAGED_WEAKREF + .. c:macro:: Py_TPFLAGS_MANAGED_WEAKREF This bit indicates that instances of the class should be weakly referenceable. @@ -1171,14 +1174,14 @@ and :c:type:`PyType_Type` effectively act as defaults.) :c:member:`~PyTypeObject.tp_weaklistoffset` field is set in a superclass. - .. data:: Py_TPFLAGS_ITEMS_AT_END + .. c:macro:: Py_TPFLAGS_ITEMS_AT_END Only usable with variable-size types, i.e. ones with non-zero - :c:member:`~PyObject.tp_itemsize`. + :c:member:`~PyTypeObject.tp_itemsize`. Indicates that the variable-sized portion of an instance of this type is at the end of the instance's memory area, at an offset of - :c:expr:`Py_TYPE(obj)->tp_basicsize` (which may be different in each + ``Py_TYPE(obj)->tp_basicsize`` (which may be different in each subclass). When setting this flag, be sure that all superclasses either @@ -1194,14 +1197,14 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. XXX Document more flags here? - .. data:: Py_TPFLAGS_LONG_SUBCLASS - .. data:: Py_TPFLAGS_LIST_SUBCLASS - .. data:: Py_TPFLAGS_TUPLE_SUBCLASS - .. data:: Py_TPFLAGS_BYTES_SUBCLASS - .. data:: Py_TPFLAGS_UNICODE_SUBCLASS - .. data:: Py_TPFLAGS_DICT_SUBCLASS - .. data:: Py_TPFLAGS_BASE_EXC_SUBCLASS - .. data:: Py_TPFLAGS_TYPE_SUBCLASS + .. c:macro:: Py_TPFLAGS_LONG_SUBCLASS + .. c:macro:: Py_TPFLAGS_LIST_SUBCLASS + .. c:macro:: Py_TPFLAGS_TUPLE_SUBCLASS + .. c:macro:: Py_TPFLAGS_BYTES_SUBCLASS + .. c:macro:: Py_TPFLAGS_UNICODE_SUBCLASS + .. c:macro:: Py_TPFLAGS_DICT_SUBCLASS + .. c:macro:: Py_TPFLAGS_BASE_EXC_SUBCLASS + .. c:macro:: Py_TPFLAGS_TYPE_SUBCLASS These flags are used by functions such as :c:func:`PyLong_Check` to quickly determine if a type is a subclass @@ -1212,7 +1215,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) will behave differently depending on what kind of check is used. - .. data:: Py_TPFLAGS_HAVE_FINALIZE + .. c:macro:: Py_TPFLAGS_HAVE_FINALIZE This bit is set when the :c:member:`~PyTypeObject.tp_finalize` slot is present in the type structure. @@ -1225,7 +1228,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) type structure. - .. data:: Py_TPFLAGS_HAVE_VECTORCALL + .. c:macro:: Py_TPFLAGS_HAVE_VECTORCALL This bit is set when the class implements the :ref:`vectorcall protocol `. @@ -1245,7 +1248,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) This flag can now be inherited by mutable classes. - .. data:: Py_TPFLAGS_IMMUTABLETYPE + .. c:macro:: Py_TPFLAGS_IMMUTABLETYPE This bit is set for type objects that are immutable: type attributes cannot be set nor deleted. @@ -1258,7 +1261,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. versionadded:: 3.10 - .. data:: Py_TPFLAGS_DISALLOW_INSTANTIATION + .. c:macro:: Py_TPFLAGS_DISALLOW_INSTANTIATION Disallow creating instances of the type: set :c:member:`~PyTypeObject.tp_new` to NULL and don't create the ``__new__`` @@ -1289,7 +1292,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. versionadded:: 3.10 - .. data:: Py_TPFLAGS_MAPPING + .. c:macro:: Py_TPFLAGS_MAPPING This bit indicates that instances of the class may match mapping patterns when used as the subject of a :keyword:`match` block. It is automatically @@ -1298,20 +1301,20 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. note:: - :const:`Py_TPFLAGS_MAPPING` and :const:`Py_TPFLAGS_SEQUENCE` are + :c:macro:`Py_TPFLAGS_MAPPING` and :c:macro:`Py_TPFLAGS_SEQUENCE` are mutually exclusive; it is an error to enable both flags simultaneously. **Inheritance:** This flag is inherited by types that do not already set - :const:`Py_TPFLAGS_SEQUENCE`. + :c:macro:`Py_TPFLAGS_SEQUENCE`. .. seealso:: :pep:`634` -- Structural Pattern Matching: Specification .. versionadded:: 3.10 - .. data:: Py_TPFLAGS_SEQUENCE + .. c:macro:: Py_TPFLAGS_SEQUENCE This bit indicates that instances of the class may match sequence patterns when used as the subject of a :keyword:`match` block. It is automatically @@ -1320,20 +1323,20 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. note:: - :const:`Py_TPFLAGS_MAPPING` and :const:`Py_TPFLAGS_SEQUENCE` are + :c:macro:`Py_TPFLAGS_MAPPING` and :c:macro:`Py_TPFLAGS_SEQUENCE` are mutually exclusive; it is an error to enable both flags simultaneously. **Inheritance:** This flag is inherited by types that do not already set - :const:`Py_TPFLAGS_MAPPING`. + :c:macro:`Py_TPFLAGS_MAPPING`. .. seealso:: :pep:`634` -- Structural Pattern Matching: Specification .. versionadded:: 3.10 - .. data:: Py_TPFLAGS_VALID_VERSION_TAG + .. c:macro:: Py_TPFLAGS_VALID_VERSION_TAG Internal. Do not set or unset this flag. To indicate that a class has changed call :c:func:`PyType_Modified` @@ -1357,7 +1360,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. c:member:: traverseproc PyTypeObject.tp_traverse An optional pointer to a traversal function for the garbage collector. This is - only used if the :const:`Py_TPFLAGS_HAVE_GC` flag bit is set. The signature is:: + only used if the :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit is set. The signature is:: int tp_traverse(PyObject *self, visitproc visit, void *arg); @@ -1367,8 +1370,8 @@ and :c:type:`PyType_Type` effectively act as defaults.) The :c:member:`~PyTypeObject.tp_traverse` pointer is used by the garbage collector to detect reference cycles. A typical implementation of a :c:member:`~PyTypeObject.tp_traverse` function simply calls :c:func:`Py_VISIT` on each of the instance's members that are Python - objects that the instance owns. For example, this is function :c:func:`local_traverse` from the - :mod:`_thread` extension module:: + objects that the instance owns. For example, this is function :c:func:`!local_traverse` from the + :mod:`!_thread` extension module:: static int local_traverse(localobject *self, visitproc visit, void *arg) @@ -1419,10 +1422,10 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** - Group: :const:`Py_TPFLAGS_HAVE_GC`, :attr:`tp_traverse`, :attr:`tp_clear` + Group: :c:macro:`Py_TPFLAGS_HAVE_GC`, :c:member:`~PyTypeObject.tp_traverse`, :c:member:`~PyTypeObject.tp_clear` This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_clear` and the - :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :c:member:`~PyTypeObject.tp_traverse`, and + :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :c:member:`~PyTypeObject.tp_traverse`, and :c:member:`~PyTypeObject.tp_clear` are all inherited from the base type if they are all zero in the subtype. @@ -1430,7 +1433,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. c:member:: inquiry PyTypeObject.tp_clear An optional pointer to a clear function for the garbage collector. This is only - used if the :const:`Py_TPFLAGS_HAVE_GC` flag bit is set. The signature is:: + used if the :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit is set. The signature is:: int tp_clear(PyObject *); @@ -1459,9 +1462,10 @@ and :c:type:`PyType_Type` effectively act as defaults.) } The :c:func:`Py_CLEAR` macro should be used, because clearing references is - delicate: the reference to the contained object must not be decremented until + delicate: the reference to the contained object must not be released + (via :c:func:`Py_DECREF`) until after the pointer to the contained object is set to ``NULL``. This is because - decrementing the reference count may cause the contained object to become trash, + releasing the reference may cause the contained object to become trash, triggering a chain of reclamation activity that may include invoking arbitrary Python code (due to finalizers, or weakref callbacks, associated with the contained object). If it's possible for such code to reference *self* again, @@ -1486,10 +1490,10 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** - Group: :const:`Py_TPFLAGS_HAVE_GC`, :attr:`tp_traverse`, :attr:`tp_clear` + Group: :c:macro:`Py_TPFLAGS_HAVE_GC`, :c:member:`~PyTypeObject.tp_traverse`, :c:member:`~PyTypeObject.tp_clear` This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_traverse` and the - :const:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :c:member:`~PyTypeObject.tp_traverse`, and + :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit: the flag bit, :c:member:`~PyTypeObject.tp_traverse`, and :c:member:`~PyTypeObject.tp_clear` are all inherited from the base type if they are all zero in the subtype. @@ -1511,21 +1515,23 @@ and :c:type:`PyType_Type` effectively act as defaults.) The following constants are defined to be used as the third argument for :c:member:`~PyTypeObject.tp_richcompare` and for :c:func:`PyObject_RichCompare`: - +----------------+------------+ - | Constant | Comparison | - +================+============+ - | :const:`Py_LT` | ``<`` | - +----------------+------------+ - | :const:`Py_LE` | ``<=`` | - +----------------+------------+ - | :const:`Py_EQ` | ``==`` | - +----------------+------------+ - | :const:`Py_NE` | ``!=`` | - +----------------+------------+ - | :const:`Py_GT` | ``>`` | - +----------------+------------+ - | :const:`Py_GE` | ``>=`` | - +----------------+------------+ + .. c:namespace:: NULL + + +--------------------+------------+ + | Constant | Comparison | + +====================+============+ + | .. c:macro:: Py_LT | ``<`` | + +--------------------+------------+ + | .. c:macro:: Py_LE | ``<=`` | + +--------------------+------------+ + | .. c:macro:: Py_EQ | ``==`` | + +--------------------+------------+ + | .. c:macro:: Py_NE | ``!=`` | + +--------------------+------------+ + | .. c:macro:: Py_GT | ``>`` | + +--------------------+------------+ + | .. c:macro:: Py_GE | ``>=`` | + +--------------------+------------+ The following macro is defined to ease writing rich comparison functions: @@ -1537,7 +1543,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) they may be C ints or floats). The third argument specifies the requested operation, as for :c:func:`PyObject_RichCompare`. - The return value's reference count is properly incremented. + The returned value is a new :term:`strong reference`. On error, sets an exception and returns ``NULL`` from the function. @@ -1545,7 +1551,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Inheritance:** - Group: :attr:`tp_hash`, :attr:`tp_richcompare` + Group: :c:member:`~PyTypeObject.tp_hash`, :c:member:`~PyTypeObject.tp_richcompare` This field is inherited by subtypes together with :c:member:`~PyTypeObject.tp_hash`: a subtype inherits :c:member:`~PyTypeObject.tp_richcompare` and :c:member:`~PyTypeObject.tp_hash` when @@ -1554,16 +1560,16 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Default:** - :c:type:`PyBaseObject_Type` provides a :attr:`tp_richcompare` + :c:data:`PyBaseObject_Type` provides a :c:member:`~PyTypeObject.tp_richcompare` implementation, which may be inherited. However, if only - :attr:`tp_hash` is defined, not even the inherited function is used + :c:member:`~PyTypeObject.tp_hash` is defined, not even the inherited function is used and instances of the type will not be able to participate in any comparisons. .. c:member:: Py_ssize_t PyTypeObject.tp_weaklistoffset - While this field is still supported, :const:`Py_TPFLAGS_MANAGED_WEAKREF` + While this field is still supported, :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` should be used instead, if at all possible. If the instances of this type are weakly referenceable, this field is greater @@ -1576,7 +1582,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) Do not confuse this field with :c:member:`~PyTypeObject.tp_weaklist`; that is the list head for weak references to the type object itself. - It is an error to set both the :const:`Py_TPFLAGS_MANAGED_WEAKREF` bit and + It is an error to set both the :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` bit and :c:member:`~PyTypeObject.tp_weaklist`. **Inheritance:** @@ -1588,7 +1594,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Default:** - If the :const:`Py_TPFLAGS_MANAGED_WEAKREF` bit is set in the + If the :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` bit is set in the :c:member:`~PyTypeObject.tp_dict` field, then :c:member:`~PyTypeObject.tp_weaklistoffset` will be set to a negative value, to indicate that it is unsafe to use this field. @@ -1717,7 +1723,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) called; it may also be initialized to a dictionary containing initial attributes for the type. Once :c:func:`PyType_Ready` has initialized the type, extra attributes for the type may be added to this dictionary only if they don't - correspond to overloaded operations (like :meth:`__add__`). Once + correspond to overloaded operations (like :meth:`~object.__add__`). Once initialization for the type has finished, this field should be treated as read-only. @@ -1782,7 +1788,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. c:member:: Py_ssize_t PyTypeObject.tp_dictoffset - While this field is still supported, :const:`Py_TPFLAGS_MANAGED_DICT` should be + While this field is still supported, :c:macro:`Py_TPFLAGS_MANAGED_DICT` should be used instead, if at all possible. If the instances of this type have a dictionary containing instance variables, @@ -1801,7 +1807,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) dictionary, so it is may be more efficient to call :c:func:`PyObject_GetAttr` when accessing an attribute on the object. - It is an error to set both the :const:`Py_TPFLAGS_MANAGED_WEAKREF` bit and + It is an error to set both the :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` bit and :c:member:`~PyTypeObject.tp_dictoffset`. **Inheritance:** @@ -1809,14 +1815,14 @@ and :c:type:`PyType_Type` effectively act as defaults.) This field is inherited by subtypes. A subtype should not override this offset; doing so could be unsafe, if C code tries to access the dictionary at the previous offset. - To properly support inheritance, use :const:`Py_TPFLAGS_MANAGED_DICT`. + To properly support inheritance, use :c:macro:`Py_TPFLAGS_MANAGED_DICT`. **Default:** This slot has no default. For :ref:`static types `, if the - field is ``NULL`` then no :attr:`__dict__` gets created for instances. + field is ``NULL`` then no :attr:`~object.__dict__` gets created for instances. - If the :const:`Py_TPFLAGS_MANAGED_DICT` bit is set in the + If the :c:macro:`Py_TPFLAGS_MANAGED_DICT` bit is set in the :c:member:`~PyTypeObject.tp_dict` field, then :c:member:`~PyTypeObject.tp_dictoffset` will be set to ``-1``, to indicate that it is unsafe to use this field. @@ -1826,10 +1832,10 @@ and :c:type:`PyType_Type` effectively act as defaults.) An optional pointer to an instance initialization function. - This function corresponds to the :meth:`__init__` method of classes. Like - :meth:`__init__`, it is possible to create an instance without calling - :meth:`__init__`, and it is possible to reinitialize an instance by calling its - :meth:`__init__` method again. + This function corresponds to the :meth:`~object.__init__` method of classes. Like + :meth:`!__init__`, it is possible to create an instance without calling + :meth:`!__init__`, and it is possible to reinitialize an instance by calling its + :meth:`!__init__` method again. The function signature is:: @@ -1837,7 +1843,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) The self argument is the instance to be initialized; the *args* and *kwds* arguments represent positional and keyword arguments of the call to - :meth:`__init__`. + :meth:`~object.__init__`. The :c:member:`~PyTypeObject.tp_init` function, if not ``NULL``, is called when an instance is created normally by calling its type, after the type's :c:member:`~PyTypeObject.tp_new` function @@ -1876,7 +1882,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) :c:func:`PyType_GenericAlloc`, to force a standard heap allocation strategy. - For static subtypes, :c:type:`PyBaseObject_Type` uses + For static subtypes, :c:data:`PyBaseObject_Type` uses :c:func:`PyType_GenericAlloc`. That is the recommended value for all statically defined types. @@ -1903,7 +1909,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) in :c:member:`~PyTypeObject.tp_new`, while for mutable types, most initialization should be deferred to :c:member:`~PyTypeObject.tp_init`. - Set the :const:`Py_TPFLAGS_DISALLOW_INSTANTIATION` flag to disallow creating + Set the :c:macro:`Py_TPFLAGS_DISALLOW_INSTANTIATION` flag to disallow creating instances of the type in Python. **Inheritance:** @@ -1937,9 +1943,9 @@ and :c:type:`PyType_Type` effectively act as defaults.) In dynamic subtypes, this field is set to a deallocator suitable to match :c:func:`PyType_GenericAlloc` and the value of the - :const:`Py_TPFLAGS_HAVE_GC` flag bit. + :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit. - For static subtypes, :c:type:`PyBaseObject_Type` uses PyObject_Del. + For static subtypes, :c:data:`PyBaseObject_Type` uses :c:func:`PyObject_Del`. .. c:member:: inquiry PyTypeObject.tp_is_gc @@ -1948,7 +1954,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) The garbage collector needs to know whether a particular object is collectible or not. Normally, it is sufficient to look at the object's type's - :c:member:`~PyTypeObject.tp_flags` field, and check the :const:`Py_TPFLAGS_HAVE_GC` flag bit. But + :c:member:`~PyTypeObject.tp_flags` field, and check the :c:macro:`Py_TPFLAGS_HAVE_GC` flag bit. But some types have a mixture of statically and dynamically allocated instances, and the statically allocated instances are not collectible. Such types should define this function; it should return ``1`` for a collectible instance, and @@ -1967,7 +1973,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) **Default:** This slot has no default. If this field is ``NULL``, - :const:`Py_TPFLAGS_HAVE_GC` is used as the functional equivalent. + :c:macro:`Py_TPFLAGS_HAVE_GC` is used as the functional equivalent. .. c:member:: PyObject* PyTypeObject.tp_bases @@ -2114,7 +2120,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. versionchanged:: 3.8 Before version 3.8 it was necessary to set the - :const:`Py_TPFLAGS_HAVE_FINALIZE` flags bit in order for this field to be + :c:macro:`Py_TPFLAGS_HAVE_FINALIZE` flags bit in order for this field to be used. This is no longer required. .. seealso:: "Safe object finalization" (:pep:`442`) @@ -2126,7 +2132,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) In other words, it is used to implement :ref:`vectorcall ` for ``type.__call__``. If ``tp_vectorcall`` is ``NULL``, the default call implementation - using :attr:`__new__` and :attr:`__init__` is used. + using :meth:`~object.__new__` and :meth:`~object.__init__` is used. **Inheritance:** @@ -2135,7 +2141,7 @@ and :c:type:`PyType_Type` effectively act as defaults.) .. versionadded:: 3.9 (the field exists since 3.8 but it's only used since 3.9) -.. c:member:: char PyTypeObject.tp_watched +.. c:member:: unsigned char PyTypeObject.tp_watched Internal. Do not use. @@ -2173,7 +2179,7 @@ Heap Types An alternative to :ref:`static types ` is *heap-allocated types*, or *heap types* for short, which correspond closely to classes created by -Python's ``class`` statement. Heap types have the :const:`Py_TPFLAGS_HEAPTYPE` +Python's ``class`` statement. Heap types have the :c:macro:`Py_TPFLAGS_HEAPTYPE` flag set. This is done by filling a :c:type:`PyType_Spec` structure and calling @@ -2253,8 +2259,8 @@ Number Object Structures .. note:: - The :c:data:`nb_reserved` field should always be ``NULL``. It - was previously called :c:data:`nb_long`, and was renamed in + The :c:member:`~PyNumberMethods.nb_reserved` field should always be ``NULL``. It + was previously called :c:member:`!nb_long`, and was renamed in Python 3.0.1. .. c:member:: binaryfunc PyNumberMethods.nb_add @@ -2325,8 +2331,8 @@ Mapping Object Structures .. c:member:: objobjargproc PyMappingMethods.mp_ass_subscript This function is used by :c:func:`PyObject_SetItem`, - :c:func:`PyObject_DelItem`, :c:func:`PyObject_SetSlice` and - :c:func:`PyObject_DelSlice`. It has the same signature as + :c:func:`PyObject_DelItem`, :c:func:`PySequence_SetSlice` and + :c:func:`PySequence_DelSlice`. It has the same signature as :c:func:`!PyObject_SetItem`, but *v* can also be set to ``NULL`` to delete an item. If this slot is ``NULL``, the object does not support item assignment and deletion. @@ -2372,9 +2378,9 @@ Sequence Object Structures This slot must be filled for the :c:func:`PySequence_Check` function to return ``1``, it can be ``NULL`` otherwise. - Negative indexes are handled as follows: if the :attr:`sq_length` slot is + Negative indexes are handled as follows: if the :c:member:`~PySequenceMethods.sq_length` slot is filled, it is called and the sequence length is used to compute a positive - index which is passed to :attr:`sq_item`. If :attr:`sq_length` is ``NULL``, + index which is passed to :c:member:`~PySequenceMethods.sq_item`. If :c:member:`!sq_length` is ``NULL``, the index is passed as is to the function. .. c:member:: ssizeobjargproc PySequenceMethods.sq_ass_item @@ -2548,7 +2554,7 @@ Async Object Structures PyObject *am_aiter(PyObject *self); Must return an :term:`asynchronous iterator` object. - See :meth:`__anext__` for details. + See :meth:`~object.__anext__` for details. This slot may be set to ``NULL`` if an object does not implement asynchronous iteration protocol. @@ -2559,7 +2565,8 @@ Async Object Structures PyObject *am_anext(PyObject *self); - Must return an :term:`awaitable` object. See :meth:`__anext__` for details. + Must return an :term:`awaitable` object. + See :meth:`~object.__anext__` for details. This slot may be set to ``NULL``. .. c:member:: sendfunc PyAsyncMethods.am_send @@ -2584,8 +2591,8 @@ Slot Type typedefs The purpose of this function is to separate memory allocation from memory initialization. It should return a pointer to a block of memory of adequate length for the instance, suitably aligned, and initialized to zeros, but with - :attr:`ob_refcnt` set to ``1`` and :attr:`ob_type` set to the type argument. If - the type's :c:member:`~PyTypeObject.tp_itemsize` is non-zero, the object's :attr:`ob_size` field + :c:member:`~PyObject.ob_refcnt` set to ``1`` and :c:member:`~PyObject.ob_type` set to the type argument. If + the type's :c:member:`~PyTypeObject.tp_itemsize` is non-zero, the object's :c:member:`~PyVarObject.ob_size` field should be initialized to *nitems* and the length of the allocated memory block should be ``tp_basicsize + nitems*tp_itemsize``, rounded up to a multiple of ``sizeof(void*)``; otherwise, *nitems* is not used and the length of the block @@ -2781,7 +2788,7 @@ A type that supports weakrefs, instance dicts, and hashing:: A str subclass that cannot be subclassed and cannot be called to create instances (e.g. uses a separate factory func) using -:c:data:`Py_TPFLAGS_DISALLOW_INSTANTIATION` flag:: +:c:macro:`Py_TPFLAGS_DISALLOW_INSTANTIATION` flag:: typedef struct { PyUnicodeObject raw; diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst index 64dcea785d0c68..cf965fa1676524 100644 --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -44,7 +44,7 @@ Python: .. c:type:: Py_UNICODE - This is a typedef of :c:expr:`wchar_t`, which is a 16-bit type or 32-bit type + This is a typedef of :c:type:`wchar_t`, which is a 16-bit type or 32-bit type depending on the platform. .. versionchanged:: 3.3 @@ -437,11 +437,11 @@ APIs: +----------+-----------------------------------------------------+ | ``ll`` | :c:expr:`long long` or :c:expr:`unsigned long long` | +----------+-----------------------------------------------------+ - | ``j`` | :c:expr:`intmax_t` or :c:expr:`uintmax_t` | + | ``j`` | :c:type:`intmax_t` or :c:type:`uintmax_t` | +----------+-----------------------------------------------------+ - | ``z`` | :c:expr:`size_t` or :c:expr:`ssize_t` | + | ``z`` | :c:type:`size_t` or :c:type:`ssize_t` | +----------+-----------------------------------------------------+ - | ``t`` | :c:expr:`ptrdiff_t` | + | ``t`` | :c:type:`ptrdiff_t` | +----------+-----------------------------------------------------+ The length modifier ``l`` for following conversions ``s`` or ``V`` specify @@ -520,7 +520,7 @@ APIs: .. note:: The width formatter unit is number of characters rather than bytes. - The precision formatter unit is number of bytes or :c:expr:`wchar_t` + The precision formatter unit is number of bytes or :c:type:`wchar_t` items (if the length modifier ``l`` is used) for ``"%s"`` and ``"%V"`` (if the ``PyObject*`` argument is ``NULL``), and a number of characters for ``"%A"``, ``"%U"``, ``"%S"``, ``"%R"`` and ``"%V"`` @@ -564,7 +564,7 @@ APIs: Copy an instance of a Unicode subtype to a new true Unicode object if necessary. If *obj* is already a true Unicode object (not a subtype), - return the reference with incremented refcount. + return a new :term:`strong reference` to the object. Objects other than Unicode or its subtypes will cause a :exc:`TypeError`. @@ -601,7 +601,7 @@ APIs: Py_ssize_t how_many) Copy characters from one Unicode object into another. This function performs - character conversion when necessary and falls back to :c:func:`memcpy` if + character conversion when necessary and falls back to :c:func:`!memcpy` if possible. Returns ``-1`` and sets an exception on error, otherwise returns the number of copied characters. @@ -714,7 +714,7 @@ system. .. c:function:: PyObject* PyUnicode_DecodeLocale(const char *str, const char *errors) Similar to :c:func:`PyUnicode_DecodeLocaleAndSize`, but compute the string - length using :c:func:`strlen`. + length using :c:func:`!strlen`. .. versionadded:: 3.3 @@ -839,11 +839,11 @@ conversion function: wchar_t Support """"""""""""""" -:c:expr:`wchar_t` support for platforms which support it: +:c:type:`wchar_t` support for platforms which support it: .. c:function:: PyObject* PyUnicode_FromWideChar(const wchar_t *w, Py_ssize_t size) - Create a Unicode object from the :c:expr:`wchar_t` buffer *w* of the given *size*. + Create a Unicode object from the :c:type:`wchar_t` buffer *w* of the given *size*. Passing ``-1`` as the *size* indicates that the function must itself compute the length, using wcslen. Return ``NULL`` on failure. @@ -851,9 +851,9 @@ wchar_t Support .. c:function:: Py_ssize_t PyUnicode_AsWideChar(PyObject *unicode, wchar_t *w, Py_ssize_t size) - Copy the Unicode object contents into the :c:expr:`wchar_t` buffer *w*. At most - *size* :c:expr:`wchar_t` characters are copied (excluding a possibly trailing - null termination character). Return the number of :c:expr:`wchar_t` characters + Copy the Unicode object contents into the :c:type:`wchar_t` buffer *w*. At most + *size* :c:type:`wchar_t` characters are copied (excluding a possibly trailing + null termination character). Return the number of :c:type:`wchar_t` characters copied or ``-1`` in case of an error. Note that the resulting :c:expr:`wchar_t*` string may or may not be null-terminated. It is the responsibility of the caller to make sure that the :c:expr:`wchar_t*` string is null-terminated in case this is @@ -867,12 +867,12 @@ wchar_t Support Convert the Unicode object to a wide character string. The output string always ends with a null character. If *size* is not ``NULL``, write the number of wide characters (excluding the trailing null termination character) into - *\*size*. Note that the resulting :c:expr:`wchar_t` string might contain + *\*size*. Note that the resulting :c:type:`wchar_t` string might contain null characters, which would cause the string to be truncated when used with most C functions. If *size* is ``NULL`` and the :c:expr:`wchar_t*` string contains null characters a :exc:`ValueError` is raised. - Returns a buffer allocated by :c:func:`PyMem_New` (use + Returns a buffer allocated by :c:macro:`PyMem_New` (use :c:func:`PyMem_Free` to free it) on success. On error, returns ``NULL`` and *\*size* is undefined. Raises a :exc:`MemoryError` if memory allocation is failed. @@ -1205,9 +1205,9 @@ Character Map Codecs This codec is special in that it can be used to implement many different codecs (and this is in fact what was done to obtain most of the standard codecs -included in the :mod:`encodings` package). The codec uses mappings to encode and +included in the :mod:`!encodings` package). The codec uses mappings to encode and decode characters. The mapping objects provided must support the -:meth:`__getitem__` mapping interface; dictionaries and sequences work well. +:meth:`~object.__getitem__` mapping interface; dictionaries and sequences work well. These are the mapping codec APIs: @@ -1250,7 +1250,7 @@ The following codec API is special in that maps Unicode to Unicode. The mapping table must map Unicode ordinal integers to Unicode ordinal integers or ``None`` (causing deletion of the character). - Mapping tables need only provide the :meth:`__getitem__` interface; dictionaries + Mapping tables need only provide the :meth:`~object.__getitem__` interface; dictionaries and sequences work well. Unmapped character ordinals (ones which cause a :exc:`LookupError`) are left untouched and are copied as-is. @@ -1292,7 +1292,7 @@ the user settings on the machine running the codec. Encode the Unicode object using the specified code page and return a Python bytes object. Return ``NULL`` if an exception was raised by the codec. Use - :c:data:`CP_ACP` code page to get the MBCS encoder. + :c:macro:`CP_ACP` code page to get the MBCS encoder. .. versionadded:: 3.3 @@ -1411,11 +1411,11 @@ They all return ``NULL`` or ``-1`` if an exception occurs. Rich compare two Unicode strings and return one of the following: * ``NULL`` in case an exception was raised - * :const:`Py_True` or :const:`Py_False` for successful comparisons - * :const:`Py_NotImplemented` in case the type combination is unknown + * :c:data:`Py_True` or :c:data:`Py_False` for successful comparisons + * :c:data:`Py_NotImplemented` in case the type combination is unknown - Possible values for *op* are :const:`Py_GT`, :const:`Py_GE`, :const:`Py_EQ`, - :const:`Py_NE`, :const:`Py_LT`, and :const:`Py_LE`. + Possible values for *op* are :c:macro:`Py_GT`, :c:macro:`Py_GE`, :c:macro:`Py_EQ`, + :c:macro:`Py_NE`, :c:macro:`Py_LT`, and :c:macro:`Py_LE`. .. c:function:: PyObject* PyUnicode_Format(PyObject *format, PyObject *args) @@ -1438,11 +1438,11 @@ They all return ``NULL`` or ``-1`` if an exception occurs. Intern the argument *\*string* in place. The argument must be the address of a pointer variable pointing to a Python Unicode string object. If there is an existing interned string that is the same as *\*string*, it sets *\*string* to - it (decrementing the reference count of the old string object and incrementing - the reference count of the interned string object), otherwise it leaves - *\*string* alone and interns it (incrementing its reference count). - (Clarification: even though there is a lot of talk about reference counts, think - of this function as reference-count-neutral; you own the object after the call + it (releasing the reference to the old string object and creating a new + :term:`strong reference` to the interned string object), otherwise it leaves + *\*string* alone and interns it (creating a new :term:`strong reference`). + (Clarification: even though there is a lot of talk about references, think + of this function as reference-neutral; you own the object after the call if and only if you owned it before the call.) diff --git a/Doc/c-api/veryhigh.rst b/Doc/c-api/veryhigh.rst index 000a2d3d8790bb..324518c035096b 100644 --- a/Doc/c-api/veryhigh.rst +++ b/Doc/c-api/veryhigh.rst @@ -12,12 +12,12 @@ file or a buffer, but they will not let you interact in a more detailed way with the interpreter. Several of these functions accept a start symbol from the grammar as a -parameter. The available start symbols are :const:`Py_eval_input`, -:const:`Py_file_input`, and :const:`Py_single_input`. These are described +parameter. The available start symbols are :c:data:`Py_eval_input`, +:c:data:`Py_file_input`, and :c:data:`Py_single_input`. These are described following the functions which accept them as parameters. Note also that several of these functions take :c:expr:`FILE*` parameters. One -particular issue which needs to be handled carefully is that the :c:expr:`FILE` +particular issue which needs to be handled carefully is that the :c:type:`FILE` structure for different C libraries can be different and incompatible. Under Windows (at least), it is possible for dynamically linked extensions to actually use different libraries, so care should be taken that :c:expr:`FILE*` parameters @@ -256,8 +256,8 @@ the same library that the Python runtime is using. Parse and compile the Python source code in *str*, returning the resulting code object. The start token is given by *start*; this can be used to constrain the - code which can be compiled and should be :const:`Py_eval_input`, - :const:`Py_file_input`, or :const:`Py_single_input`. The filename specified by + code which can be compiled and should be :c:data:`Py_eval_input`, + :c:data:`Py_file_input`, or :c:data:`Py_single_input`. The filename specified by *filename* is used to construct the code object and may appear in tracebacks or :exc:`SyntaxError` exception messages. This returns ``NULL`` if the code cannot be parsed or compiled. @@ -353,7 +353,7 @@ the same library that the Python runtime is using. executed, it is passed as ``PyCompilerFlags *flags``. In this case, ``from __future__ import`` can modify *flags*. - Whenever ``PyCompilerFlags *flags`` is ``NULL``, :attr:`cf_flags` is treated as + Whenever ``PyCompilerFlags *flags`` is ``NULL``, :c:member:`~PyCompilerFlags.cf_flags` is treated as equal to ``0``, and any modification due to ``from __future__ import`` is discarded. @@ -367,7 +367,7 @@ the same library that the Python runtime is using. initialized to ``PY_MINOR_VERSION``. The field is ignored by default, it is used if and only if - ``PyCF_ONLY_AST`` flag is set in *cf_flags*. + ``PyCF_ONLY_AST`` flag is set in :c:member:`~PyCompilerFlags.cf_flags`. .. versionchanged:: 3.8 Added *cf_feature_version* field. diff --git a/Doc/conf.py b/Doc/conf.py index 09e12e245891d2..19e05e1aa8fe19 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -77,12 +77,101 @@ exclude_patterns.append(venvdir + '/*') nitpick_ignore = [ + # Standard C functions + ('c:func', 'calloc'), + ('c:func', 'dlopen'), + ('c:func', 'exec'), + ('c:func', 'fcntl'), + ('c:func', 'fork'), + ('c:func', 'free'), + ('c:func', 'gmtime'), + ('c:func', 'localtime'), + ('c:func', 'main'), + ('c:func', 'malloc'), + ('c:func', 'printf'), + ('c:func', 'realloc'), + ('c:func', 'snprintf'), + ('c:func', 'sprintf'), + ('c:func', 'stat'), + ('c:func', 'system'), + ('c:func', 'time'), + ('c:func', 'vsnprintf'), + # Standard C types + ('c:type', 'FILE'), + ('c:type', 'int64_t'), + ('c:type', 'intmax_t'), + ('c:type', 'off_t'), + ('c:type', 'ptrdiff_t'), + ('c:type', 'siginfo_t'), + ('c:type', 'size_t'), + ('c:type', 'ssize_t'), + ('c:type', 'time_t'), + ('c:type', 'uint64_t'), + ('c:type', 'uintmax_t'), + ('c:type', 'uintptr_t'), + ('c:type', 'va_list'), + ('c:type', 'wchar_t'), + ('c:type', '__int64'), + ('c:type', 'unsigned __int64'), + # Standard C structures + ('c:struct', 'in6_addr'), + ('c:struct', 'in_addr'), + ('c:struct', 'stat'), + ('c:struct', 'statvfs'), + # Standard C macros + ('c:macro', 'LLONG_MAX'), + ('c:macro', 'LLONG_MIN'), + ('c:macro', 'LONG_MAX'), + ('c:macro', 'LONG_MIN'), + # Standard C variables + ('c:data', 'errno'), + # Standard environment variables + ('envvar', 'BROWSER'), + ('envvar', 'COLUMNS'), + ('envvar', 'COMSPEC'), + ('envvar', 'DISPLAY'), + ('envvar', 'HOME'), + ('envvar', 'HOMEDRIVE'), + ('envvar', 'HOMEPATH'), + ('envvar', 'IDLESTARTUP'), + ('envvar', 'LANG'), + ('envvar', 'LANGUAGE'), + ('envvar', 'LC_ALL'), + ('envvar', 'LC_CTYPE'), + ('envvar', 'LC_COLLATE'), + ('envvar', 'LC_MESSAGES'), + ('envvar', 'LC_MONETARY'), + ('envvar', 'LC_NUMERIC'), + ('envvar', 'LC_TIME'), + ('envvar', 'LINES'), + ('envvar', 'LOGNAME'), + ('envvar', 'PAGER'), + ('envvar', 'PATH'), + ('envvar', 'PATHEXT'), + ('envvar', 'SOURCE_DATE_EPOCH'), + ('envvar', 'TEMP'), + ('envvar', 'TERM'), + ('envvar', 'TMP'), + ('envvar', 'TMPDIR'), + ('envvar', 'TZ'), + ('envvar', 'USER'), + ('envvar', 'USERNAME'), + ('envvar', 'USERPROFILE'), # Do not error nit-picky mode builds when _SubParsersAction.add_parser cannot # be resolved, as the method is currently undocumented. For context, see # https://github.com/python/cpython/pull/103289. ('py:meth', '_SubParsersAction.add_parser'), ] +# gh-106948: Copy standard C types declared in the "c:type" domain to the +# "c:identifier" domain, since "c:function" markup looks for types in the +# "c:identifier" domain. Use list() to not iterate on items which are being +# added +for role, name in list(nitpick_ignore): + if role == 'c:type': + nitpick_ignore.append(('c:identifier', name)) +del role, name + # Disable Docutils smartquotes for several translations smartquotes_excludes = { 'languages': ['ja', 'fr', 'zh_TW', 'zh_CN'], 'builders': ['man', 'text'], diff --git a/Doc/data/stable_abi.dat b/Doc/data/stable_abi.dat index aa1edf54637058..ed415a4dc644a4 100644 --- a/Doc/data/stable_abi.dat +++ b/Doc/data/stable_abi.dat @@ -111,7 +111,9 @@ function,PyDict_Copy,3.2,, function,PyDict_DelItem,3.2,, function,PyDict_DelItemString,3.2,, function,PyDict_GetItem,3.2,, +function,PyDict_GetItemRef,3.13,, function,PyDict_GetItemString,3.2,, +function,PyDict_GetItemStringRef,3.13,, function,PyDict_GetItemWithError,3.2,, function,PyDict_Items,3.2,, function,PyDict_Keys,3.2,, diff --git a/Doc/extending/embedding.rst b/Doc/extending/embedding.rst index bd1abe36cbb80e..20397dc5add5db 100644 --- a/Doc/extending/embedding.rst +++ b/Doc/extending/embedding.rst @@ -269,7 +269,7 @@ following two statements before the call to :c:func:`Py_Initialize`:: PyImport_AppendInittab("emb", &PyInit_emb); These two lines initialize the ``numargs`` variable, and make the -:func:`emb.numargs` function accessible to the embedded Python interpreter. +:func:`!emb.numargs` function accessible to the embedded Python interpreter. With these extensions, the Python script can do things like .. code-block:: python diff --git a/Doc/extending/extending.rst b/Doc/extending/extending.rst index 7d08bb9f6b8dd8..f58b4f28113e8c 100644 --- a/Doc/extending/extending.rst +++ b/Doc/extending/extending.rst @@ -197,7 +197,7 @@ The choice of which exception to raise is entirely yours. There are predeclared C objects corresponding to all built-in Python exceptions, such as :c:data:`PyExc_ZeroDivisionError`, which you can use directly. Of course, you should choose exceptions wisely --- don't use :c:data:`PyExc_TypeError` to mean -that a file couldn't be opened (that should probably be :c:data:`PyExc_IOError`). +that a file couldn't be opened (that should probably be :c:data:`PyExc_OSError`). If something's wrong with the argument list, the :c:func:`PyArg_ParseTuple` function usually raises :c:data:`PyExc_TypeError`. If you have an argument whose value must be in a particular range or must satisfy other conditions, @@ -208,7 +208,7 @@ usually declare a static object variable at the beginning of your file:: static PyObject *SpamError; -and initialize it in your module's initialization function (:c:func:`PyInit_spam`) +and initialize it in your module's initialization function (:c:func:`!PyInit_spam`) with an exception object:: PyMODINIT_FUNC @@ -221,9 +221,7 @@ with an exception object:: return NULL; SpamError = PyErr_NewException("spam.error", NULL, NULL); - Py_XINCREF(SpamError); - if (PyModule_AddObject(m, "error", SpamError) < 0) { - Py_XDECREF(SpamError); + if (PyModule_AddObjectRef(m, "error", SpamError) < 0) { Py_CLEAR(SpamError); Py_DECREF(m); return NULL; @@ -232,22 +230,22 @@ with an exception object:: return m; } -Note that the Python name for the exception object is :exc:`spam.error`. The +Note that the Python name for the exception object is :exc:`!spam.error`. The :c:func:`PyErr_NewException` function may create a class with the base class being :exc:`Exception` (unless another class is passed in instead of ``NULL``), described in :ref:`bltin-exceptions`. -Note also that the :c:data:`SpamError` variable retains a reference to the newly +Note also that the :c:data:`!SpamError` variable retains a reference to the newly created exception class; this is intentional! Since the exception could be removed from the module by external code, an owned reference to the class is -needed to ensure that it will not be discarded, causing :c:data:`SpamError` to +needed to ensure that it will not be discarded, causing :c:data:`!SpamError` to become a dangling pointer. Should it become a dangling pointer, C code which raises the exception could cause a core dump or other unintended side effects. We discuss the use of ``PyMODINIT_FUNC`` as a function return type later in this sample. -The :exc:`spam.error` exception can be raised in your extension module using a +The :exc:`!spam.error` exception can be raised in your extension module using a call to :c:func:`PyErr_SetString` as shown below:: static PyObject * @@ -281,9 +279,9 @@ statement:: It returns ``NULL`` (the error indicator for functions returning object pointers) if an error is detected in the argument list, relying on the exception set by :c:func:`PyArg_ParseTuple`. Otherwise the string value of the argument has been -copied to the local variable :c:data:`command`. This is a pointer assignment and +copied to the local variable :c:data:`!command`. This is a pointer assignment and you are not supposed to modify the string to which it points (so in Standard C, -the variable :c:data:`command` should properly be declared as ``const char +the variable :c:data:`!command` should properly be declared as ``const char *command``). The next statement is a call to the Unix function :c:func:`system`, passing it @@ -291,7 +289,7 @@ the string we just got from :c:func:`PyArg_ParseTuple`:: sts = system(command); -Our :func:`spam.system` function must return the value of :c:data:`sts` as a +Our :func:`!spam.system` function must return the value of :c:data:`!sts` as a Python object. This is done using the function :c:func:`PyLong_FromLong`. :: return PyLong_FromLong(sts); @@ -317,7 +315,7 @@ contexts, as we have seen. The Module's Method Table and Initialization Function ===================================================== -I promised to show how :c:func:`spam_system` is called from Python programs. +I promised to show how :c:func:`!spam_system` is called from Python programs. First, we need to list its name and address in a "method table":: static PyMethodDef SpamMethods[] = { @@ -337,7 +335,7 @@ When using only ``METH_VARARGS``, the function should expect the Python-level parameters to be passed in as a tuple acceptable for parsing via :c:func:`PyArg_ParseTuple`; more information on this function is provided below. -The :const:`METH_KEYWORDS` bit may be set in the third field if keyword +The :c:macro:`METH_KEYWORDS` bit may be set in the third field if keyword arguments should be passed to the function. In this case, the C function should accept a third ``PyObject *`` parameter which will be a dictionary of keywords. Use :c:func:`PyArg_ParseTupleAndKeywords` to parse the arguments to such a @@ -356,7 +354,7 @@ The method table must be referenced in the module definition structure:: This structure, in turn, must be passed to the interpreter in the module's initialization function. The initialization function must be named -:c:func:`PyInit_name`, where *name* is the name of the module, and should be the +:c:func:`!PyInit_name`, where *name* is the name of the module, and should be the only non-\ ``static`` item defined in the module file:: PyMODINIT_FUNC @@ -369,8 +367,8 @@ Note that PyMODINIT_FUNC declares the function as ``PyObject *`` return type, declares any special linkage declarations required by the platform, and for C++ declares the function as ``extern "C"``. -When the Python program imports module :mod:`spam` for the first time, -:c:func:`PyInit_spam` is called. (See below for comments about embedding Python.) +When the Python program imports module :mod:`!spam` for the first time, +:c:func:`!PyInit_spam` is called. (See below for comments about embedding Python.) It calls :c:func:`PyModule_Create`, which returns a module object, and inserts built-in function objects into the newly created module based upon the table (an array of :c:type:`PyMethodDef` structures) found in the module definition. @@ -380,7 +378,7 @@ certain errors, or return ``NULL`` if the module could not be initialized satisfactorily. The init function must return the module object to its caller, so that it then gets inserted into ``sys.modules``. -When embedding Python, the :c:func:`PyInit_spam` function is not called +When embedding Python, the :c:func:`!PyInit_spam` function is not called automatically unless there's an entry in the :c:data:`PyImport_Inittab` table. To add the module to the initialization table, use :c:func:`PyImport_AppendInittab`, optionally followed by an import of the module:: @@ -540,7 +538,7 @@ be part of a module definition:: } This function must be registered with the interpreter using the -:const:`METH_VARARGS` flag; this is described in section :ref:`methodtable`. The +:c:macro:`METH_VARARGS` flag; this is described in section :ref:`methodtable`. The :c:func:`PyArg_ParseTuple` function and its arguments are documented in section :ref:`parsetuple`. @@ -1043,13 +1041,13 @@ Let's follow the control flow into :c:func:`PyList_SetItem`. The list owns references to all its items, so when item 1 is replaced, it has to dispose of the original item 1. Now let's suppose the original item 1 was an instance of a user-defined class, and let's further suppose that the class defined a -:meth:`__del__` method. If this class instance has a reference count of 1, -disposing of it will call its :meth:`__del__` method. +:meth:`!__del__` method. If this class instance has a reference count of 1, +disposing of it will call its :meth:`!__del__` method. -Since it is written in Python, the :meth:`__del__` method can execute arbitrary +Since it is written in Python, the :meth:`!__del__` method can execute arbitrary Python code. Could it perhaps do something to invalidate the reference to -``item`` in :c:func:`bug`? You bet! Assuming that the list passed into -:c:func:`bug` is accessible to the :meth:`__del__` method, it could execute a +``item`` in :c:func:`!bug`? You bet! Assuming that the list passed into +:c:func:`!bug` is accessible to the :meth:`!__del__` method, it could execute a statement to the effect of ``del list[0]``, and assuming this was the last reference to that object, it would free the memory associated with it, thereby invalidating ``item``. @@ -1070,7 +1068,7 @@ increment the reference count. The correct version of the function reads:: This is a true story. An older version of Python contained variants of this bug and someone spent a considerable amount of time in a C debugger to figure out -why his :meth:`__del__` methods would fail... +why his :meth:`!__del__` methods would fail... The second case of problems with a borrowed reference is a variant involving threads. Normally, multiple threads in the Python interpreter can't get in each @@ -1221,14 +1219,14 @@ file corresponding to the module provides a macro that takes care of importing the module and retrieving its C API pointers; client modules only have to call this macro before accessing the C API. -The exporting module is a modification of the :mod:`spam` module from section -:ref:`extending-simpleexample`. The function :func:`spam.system` does not call +The exporting module is a modification of the :mod:`!spam` module from section +:ref:`extending-simpleexample`. The function :func:`!spam.system` does not call the C library function :c:func:`system` directly, but a function -:c:func:`PySpam_System`, which would of course do something more complicated in +:c:func:`!PySpam_System`, which would of course do something more complicated in reality (such as adding "spam" to every command). This function -:c:func:`PySpam_System` is also exported to other extension modules. +:c:func:`!PySpam_System` is also exported to other extension modules. -The function :c:func:`PySpam_System` is a plain C function, declared +The function :c:func:`!PySpam_System` is a plain C function, declared ``static`` like everything else:: static int @@ -1237,7 +1235,7 @@ The function :c:func:`PySpam_System` is a plain C function, declared return system(command); } -The function :c:func:`spam_system` is modified in a trivial way:: +The function :c:func:`!spam_system` is modified in a trivial way:: static PyObject * spam_system(PyObject *self, PyObject *args) @@ -1281,8 +1279,7 @@ function must take care of initializing the C API pointer array:: /* Create a Capsule containing the API pointer array's address */ c_api_object = PyCapsule_New((void *)PySpam_API, "spam._C_API", NULL); - if (PyModule_AddObject(m, "_C_API", c_api_object) < 0) { - Py_XDECREF(c_api_object); + if (PyModule_Add(m, "_C_API", c_api_object) < 0) { Py_DECREF(m); return NULL; } @@ -1291,7 +1288,7 @@ function must take care of initializing the C API pointer array:: } Note that ``PySpam_API`` is declared ``static``; otherwise the pointer -array would disappear when :func:`PyInit_spam` terminates! +array would disappear when :c:func:`!PyInit_spam` terminates! The bulk of the work is in the header file :file:`spammodule.h`, which looks like this:: @@ -1345,8 +1342,8 @@ like this:: #endif /* !defined(Py_SPAMMODULE_H) */ All that a client module must do in order to have access to the function -:c:func:`PySpam_System` is to call the function (or rather macro) -:c:func:`import_spam` in its initialization function:: +:c:func:`!PySpam_System` is to call the function (or rather macro) +:c:func:`!import_spam` in its initialization function:: PyMODINIT_FUNC PyInit_client(void) diff --git a/Doc/extending/newtypes.rst b/Doc/extending/newtypes.rst index 7f8f8ddaaaccd6..386b3c8f4452c3 100644 --- a/Doc/extending/newtypes.rst +++ b/Doc/extending/newtypes.rst @@ -270,7 +270,7 @@ structure:: One entry should be defined for each method provided by the type; no entries are needed for methods inherited from a base type. One additional entry is needed at the end; it is a sentinel that marks the end of the array. The -:attr:`ml_name` field of the sentinel must be ``NULL``. +:c:member:`~PyMethodDef.ml_name` field of the sentinel must be ``NULL``. The second table is used to define attributes which map directly to data stored in the instance. A variety of primitive C types are supported, and access may @@ -286,9 +286,9 @@ be read-only or read-write. The structures in the table are defined as:: For each entry in the table, a :term:`descriptor` will be constructed and added to the type which will be able to extract a value from the instance structure. The -:attr:`type` field should contain a type code like :c:macro:`Py_T_INT` or +:c:member:`~PyMemberDef.type` field should contain a type code like :c:macro:`Py_T_INT` or :c:macro:`Py_T_DOUBLE`; the value will be used to determine how to -convert Python values to and from C values. The :attr:`flags` field is used to +convert Python values to and from C values. The :c:member:`~PyMemberDef.flags` field is used to store flags which control how the attribute can be accessed: you can set it to :c:macro:`Py_READONLY` to prevent Python code from setting it. @@ -298,7 +298,7 @@ have an associated doc string simply by providing the text in the table. An application can use the introspection API to retrieve the descriptor from the class object, and get the doc string using its :attr:`__doc__` attribute. -As with the :c:member:`~PyTypeObject.tp_methods` table, a sentinel entry with a :attr:`name` value +As with the :c:member:`~PyTypeObject.tp_methods` table, a sentinel entry with a :c:member:`~PyMethodDef.name` value of ``NULL`` is required. .. XXX Descriptors need to be explained in more detail somewhere, but not here. @@ -323,7 +323,7 @@ called, so that if you do need to extend their functionality, you'll understand what needs to be done. The :c:member:`~PyTypeObject.tp_getattr` handler is called when the object requires an attribute -look-up. It is called in the same situations where the :meth:`__getattr__` +look-up. It is called in the same situations where the :meth:`~object.__getattr__` method of a class would be called. Here is an example:: @@ -342,8 +342,8 @@ Here is an example:: return NULL; } -The :c:member:`~PyTypeObject.tp_setattr` handler is called when the :meth:`__setattr__` or -:meth:`__delattr__` method of a class instance would be called. When an +The :c:member:`~PyTypeObject.tp_setattr` handler is called when the :meth:`~object.__setattr__` or +:meth:`~object.__delattr__` method of a class instance would be called. When an attribute should be deleted, the third parameter will be ``NULL``. Here is an example that simply raises an exception; if this were really all you wanted, the :c:member:`~PyTypeObject.tp_setattr` handler should be set to ``NULL``. :: @@ -364,7 +364,7 @@ Object Comparison The :c:member:`~PyTypeObject.tp_richcompare` handler is called when comparisons are needed. It is analogous to the :ref:`rich comparison methods `, like -:meth:`__lt__`, and also called by :c:func:`PyObject_RichCompare` and +:meth:`!__lt__`, and also called by :c:func:`PyObject_RichCompare` and :c:func:`PyObject_RichCompareBool`. This function is called with two Python objects and the operator as arguments, @@ -505,7 +505,7 @@ These functions provide support for the iterator protocol. Both handlers take exactly one parameter, the instance for which they are being called, and return a new reference. In the case of an error, they should set an exception and return ``NULL``. :c:member:`~PyTypeObject.tp_iter` corresponds -to the Python :meth:`__iter__` method, while :c:member:`~PyTypeObject.tp_iternext` +to the Python :meth:`~object.__iter__` method, while :c:member:`~PyTypeObject.tp_iternext` corresponds to the Python :meth:`~iterator.__next__` method. Any :term:`iterable` object must implement the :c:member:`~PyTypeObject.tp_iter` diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst index f89934a11f12a8..209a4ab76d226d 100644 --- a/Doc/extending/newtypes_tutorial.rst +++ b/Doc/extending/newtypes_tutorial.rst @@ -36,8 +36,8 @@ So, if you want to define a new extension type, you need to create a new type object. This sort of thing can only be explained by example, so here's a minimal, but -complete, module that defines a new type named :class:`Custom` inside a C -extension module :mod:`custom`: +complete, module that defines a new type named :class:`!Custom` inside a C +extension module :mod:`!custom`: .. note:: What we're showing here is the traditional way of defining *static* @@ -50,12 +50,12 @@ extension module :mod:`custom`: Now that's quite a bit to take in at once, but hopefully bits will seem familiar from the previous chapter. This file defines three things: -#. What a :class:`Custom` **object** contains: this is the ``CustomObject`` - struct, which is allocated once for each :class:`Custom` instance. -#. How the :class:`Custom` **type** behaves: this is the ``CustomType`` struct, +#. What a :class:`!Custom` **object** contains: this is the ``CustomObject`` + struct, which is allocated once for each :class:`!Custom` instance. +#. How the :class:`!Custom` **type** behaves: this is the ``CustomType`` struct, which defines a set of flags and function pointers that the interpreter inspects when specific operations are requested. -#. How to initialize the :mod:`custom` module: this is the ``PyInit_custom`` +#. How to initialize the :mod:`!custom` module: this is the ``PyInit_custom`` function and the associated ``custommodule`` struct. The first bit is:: @@ -127,8 +127,8 @@ our objects and in some error messages, for example: TypeError: can only concatenate str (not "custom.Custom") to str Note that the name is a dotted name that includes both the module name and the -name of the type within the module. The module in this case is :mod:`custom` and -the type is :class:`Custom`, so we set the type name to :class:`custom.Custom`. +name of the type within the module. The module in this case is :mod:`!custom` and +the type is :class:`!Custom`, so we set the type name to :class:`!custom.Custom`. Using the real dotted import path is important to make your type compatible with the :mod:`pydoc` and :mod:`pickle` modules. :: @@ -136,7 +136,7 @@ with the :mod:`pydoc` and :mod:`pickle` modules. :: .tp_itemsize = 0, This is so that Python knows how much memory to allocate when creating -new :class:`Custom` instances. :c:member:`~PyTypeObject.tp_itemsize` is +new :class:`!Custom` instances. :c:member:`~PyTypeObject.tp_itemsize` is only used for variable-sized objects and should otherwise be zero. .. note:: @@ -145,13 +145,13 @@ only used for variable-sized objects and should otherwise be zero. :c:member:`~PyTypeObject.tp_basicsize` as its base type, you may have problems with multiple inheritance. A Python subclass of your type will have to list your type first in its :attr:`~class.__bases__`, or else it will not be able to call your type's - :meth:`__new__` method without getting an error. You can avoid this problem by + :meth:`~object.__new__` method without getting an error. You can avoid this problem by ensuring that your type has a larger value for :c:member:`~PyTypeObject.tp_basicsize` than its base type does. Most of the time, this will be true anyway, because either your base type will be :class:`object`, or else you will be adding data members to your base type, and therefore increasing its size. -We set the class flags to :const:`Py_TPFLAGS_DEFAULT`. :: +We set the class flags to :c:macro:`Py_TPFLAGS_DEFAULT`. :: .tp_flags = Py_TPFLAGS_DEFAULT, @@ -164,31 +164,29 @@ We provide a doc string for the type in :c:member:`~PyTypeObject.tp_doc`. :: .tp_doc = PyDoc_STR("Custom objects"), To enable object creation, we have to provide a :c:member:`~PyTypeObject.tp_new` -handler. This is the equivalent of the Python method :meth:`__new__`, but +handler. This is the equivalent of the Python method :meth:`~object.__new__`, but has to be specified explicitly. In this case, we can just use the default implementation provided by the API function :c:func:`PyType_GenericNew`. :: .tp_new = PyType_GenericNew, Everything else in the file should be familiar, except for some code in -:c:func:`PyInit_custom`:: +:c:func:`!PyInit_custom`:: if (PyType_Ready(&CustomType) < 0) return; -This initializes the :class:`Custom` type, filling in a number of members -to the appropriate default values, including :attr:`ob_type` that we initially +This initializes the :class:`!Custom` type, filling in a number of members +to the appropriate default values, including :c:member:`~PyObject.ob_type` that we initially set to ``NULL``. :: - Py_INCREF(&CustomType); - if (PyModule_AddObject(m, "Custom", (PyObject *) &CustomType) < 0) { - Py_DECREF(&CustomType); + if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) { Py_DECREF(m); return NULL; } This adds the type to the module dictionary. This allows us to create -:class:`Custom` instances by calling the :class:`Custom` class: +:class:`!Custom` instances by calling the :class:`!Custom` class: .. code-block:: pycon @@ -220,7 +218,7 @@ Of course, the current Custom type is pretty uninteresting. It has no data and doesn't do anything. It can't even be subclassed. .. note:: - While this documentation showcases the standard :mod:`distutils` module + While this documentation showcases the standard :mod:`!distutils` module for building C extensions, it is recommended in real-world use cases to use the newer and better-maintained ``setuptools`` library. Documentation on how to do this is out of scope for this document and can be found in @@ -231,7 +229,7 @@ Adding data and methods to the Basic example ============================================ Let's extend the basic example to add some data and methods. Let's also make -the type usable as a base class. We'll create a new module, :mod:`custom2` that +the type usable as a base class. We'll create a new module, :mod:`!custom2` that adds these capabilities: .. literalinclude:: ../includes/custom2.c @@ -239,7 +237,7 @@ adds these capabilities: This version of the module has a number of changes. -The :class:`Custom` type now has three data attributes in its C struct, +The :class:`!Custom` type now has three data attributes in its C struct, *first*, *last*, and *number*. The *first* and *last* variables are Python strings containing first and last names. The *number* attribute is a C integer. @@ -272,7 +270,7 @@ This method first clears the reference counts of the two Python attributes. ``NULL`` (which might happen here if ``tp_new`` failed midway). It then calls the :c:member:`~PyTypeObject.tp_free` member of the object's type (computed by ``Py_TYPE(self)``) to free the object's memory. Note that -the object's type might not be :class:`CustomType`, because the object may +the object's type might not be :class:`!CustomType`, because the object may be an instance of a subclass. .. note:: @@ -311,10 +309,10 @@ and install it in the :c:member:`~PyTypeObject.tp_new` member:: .tp_new = Custom_new, The ``tp_new`` handler is responsible for creating (as opposed to initializing) -objects of the type. It is exposed in Python as the :meth:`__new__` method. +objects of the type. It is exposed in Python as the :meth:`~object.__new__` method. It is not required to define a ``tp_new`` member, and indeed many extension types will simply reuse :c:func:`PyType_GenericNew` as done in the first -version of the ``Custom`` type above. In this case, we use the ``tp_new`` +version of the :class:`!Custom` type above. In this case, we use the ``tp_new`` handler to initialize the ``first`` and ``last`` attributes to non-``NULL`` default values. @@ -345,7 +343,7 @@ result against ``NULL`` before proceeding. .. note:: If you are creating a co-operative :c:member:`~PyTypeObject.tp_new` (one - that calls a base type's :c:member:`~PyTypeObject.tp_new` or :meth:`__new__`), + that calls a base type's :c:member:`~PyTypeObject.tp_new` or :meth:`~object.__new__`), you must *not* try to determine what method to call using method resolution order at runtime. Always statically determine what type you are going to call, and call its :c:member:`~PyTypeObject.tp_new` directly, or via @@ -388,14 +386,14 @@ by filling the :c:member:`~PyTypeObject.tp_init` slot. :: .tp_init = (initproc) Custom_init, The :c:member:`~PyTypeObject.tp_init` slot is exposed in Python as the -:meth:`__init__` method. It is used to initialize an object after it's +:meth:`~object.__init__` method. It is used to initialize an object after it's created. Initializers always accept positional and keyword arguments, and they should return either ``0`` on success or ``-1`` on error. Unlike the ``tp_new`` handler, there is no guarantee that ``tp_init`` is called at all (for example, the :mod:`pickle` module by default -doesn't call :meth:`__init__` on unpickled instances). It can also be -called multiple times. Anyone can call the :meth:`__init__` method on +doesn't call :meth:`~object.__init__` on unpickled instances). It can also be +called multiple times. Anyone can call the :meth:`!__init__` method on our objects. For this reason, we have to be extra careful when assigning the new attribute values. We might be tempted, for example to assign the ``first`` member like this:: @@ -453,7 +451,7 @@ Further, the attributes can be deleted, setting the C pointers to ``NULL``. Eve though we can make sure the members are initialized to non-``NULL`` values, the members can be set to ``NULL`` if the attributes are deleted. -We define a single method, :meth:`Custom.name()`, that outputs the objects name as the +We define a single method, :meth:`!Custom.name()`, that outputs the objects name as the concatenation of the first and last names. :: static PyObject * @@ -470,8 +468,8 @@ concatenation of the first and last names. :: return PyUnicode_FromFormat("%S %S", self->first, self->last); } -The method is implemented as a C function that takes a :class:`Custom` (or -:class:`Custom` subclass) instance as the first argument. Methods always take an +The method is implemented as a C function that takes a :class:`!Custom` (or +:class:`!Custom` subclass) instance as the first argument. Methods always take an instance as the first argument. Methods often take positional and keyword arguments as well, but in this case we don't take any and don't need to accept a positional argument tuple or keyword argument dictionary. This method is @@ -482,8 +480,8 @@ equivalent to the Python method: def name(self): return "%s %s" % (self.first, self.last) -Note that we have to check for the possibility that our :attr:`first` and -:attr:`last` members are ``NULL``. This is because they can be deleted, in which +Note that we have to check for the possibility that our :attr:`!first` and +:attr:`!last` members are ``NULL``. This is because they can be deleted, in which case they are set to ``NULL``. It would be better to prevent deletion of these attributes and to restrict the attribute values to be strings. We'll see how to do that in the next section. @@ -498,7 +496,7 @@ definitions:: {NULL} /* Sentinel */ }; -(note that we used the :const:`METH_NOARGS` flag to indicate that the method +(note that we used the :c:macro:`METH_NOARGS` flag to indicate that the method is expecting no arguments other than *self*) and assign it to the :c:member:`~PyTypeObject.tp_methods` slot:: @@ -508,11 +506,11 @@ and assign it to the :c:member:`~PyTypeObject.tp_methods` slot:: Finally, we'll make our type usable as a base class for subclassing. We've written our methods carefully so far so that they don't make any assumptions about the type of the object being created or used, so all we need to do is -to add the :const:`Py_TPFLAGS_BASETYPE` to our class flag definition:: +to add the :c:macro:`Py_TPFLAGS_BASETYPE` to our class flag definition:: .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, -We rename :c:func:`PyInit_custom` to :c:func:`PyInit_custom2`, update the +We rename :c:func:`!PyInit_custom` to :c:func:`!PyInit_custom2`, update the module name in the :c:type:`PyModuleDef` struct, and update the full class name in the :c:type:`PyTypeObject` struct. @@ -531,18 +529,18 @@ Finally, we update our :file:`setup.py` file to build the new module: Providing finer control over data attributes ============================================ -In this section, we'll provide finer control over how the :attr:`first` and -:attr:`last` attributes are set in the :class:`Custom` example. In the previous -version of our module, the instance variables :attr:`first` and :attr:`last` +In this section, we'll provide finer control over how the :attr:`!first` and +:attr:`!last` attributes are set in the :class:`!Custom` example. In the previous +version of our module, the instance variables :attr:`!first` and :attr:`!last` could be set to non-string values or even deleted. We want to make sure that these attributes always contain strings. .. literalinclude:: ../includes/custom3.c -To provide greater control, over the :attr:`first` and :attr:`last` attributes, +To provide greater control, over the :attr:`!first` and :attr:`!last` attributes, we'll use custom getter and setter functions. Here are the functions for -getting and setting the :attr:`first` attribute:: +getting and setting the :attr:`!first` attribute:: static PyObject * Custom_getfirst(CustomObject *self, void *closure) @@ -571,13 +569,13 @@ getting and setting the :attr:`first` attribute:: return 0; } -The getter function is passed a :class:`Custom` object and a "closure", which is +The getter function is passed a :class:`!Custom` object and a "closure", which is a void pointer. In this case, the closure is ignored. (The closure supports an advanced usage in which definition data is passed to the getter and setter. This could, for example, be used to allow a single set of getter and setter functions that decide the attribute to get or set based on data in the closure.) -The setter function is passed the :class:`Custom` object, the new value, and the +The setter function is passed the :class:`!Custom` object, the new value, and the closure. The new value may be ``NULL``, in which case the attribute is being deleted. In our setter, we raise an error if the attribute is deleted or if its new value is not a string. @@ -666,11 +664,11 @@ still has a reference from itself. Its reference count doesn't drop to zero. Fortunately, Python's cyclic garbage collector will eventually figure out that the list is garbage and free it. -In the second version of the :class:`Custom` example, we allowed any kind of -object to be stored in the :attr:`first` or :attr:`last` attributes [#]_. +In the second version of the :class:`!Custom` example, we allowed any kind of +object to be stored in the :attr:`!first` or :attr:`!last` attributes [#]_. Besides, in the second and third versions, we allowed subclassing -:class:`Custom`, and subclasses may add arbitrary attributes. For any of -those two reasons, :class:`Custom` objects can participate in cycles: +:class:`!Custom`, and subclasses may add arbitrary attributes. For any of +those two reasons, :class:`!Custom` objects can participate in cycles: .. code-block:: pycon @@ -680,8 +678,8 @@ those two reasons, :class:`Custom` objects can participate in cycles: >>> n = Derived() >>> n.some_attribute = n -To allow a :class:`Custom` instance participating in a reference cycle to -be properly detected and collected by the cyclic GC, our :class:`Custom` type +To allow a :class:`!Custom` instance participating in a reference cycle to +be properly detected and collected by the cyclic GC, our :class:`!Custom` type needs to fill two additional slots and to enable a flag that enables these slots: .. literalinclude:: ../includes/custom4.c @@ -708,8 +706,8 @@ participate in cycles:: } For each subobject that can participate in cycles, we need to call the -:c:func:`visit` function, which is passed to the traversal method. The -:c:func:`visit` function takes as arguments the subobject and the extra argument +:c:func:`!visit` function, which is passed to the traversal method. The +:c:func:`!visit` function takes as arguments the subobject and the extra argument *arg* passed to the traversal method. It returns an integer value that must be returned if it is non-zero. @@ -774,7 +772,7 @@ and ``Custom_clear``:: Py_TYPE(self)->tp_free((PyObject *) self); } -Finally, we add the :const:`Py_TPFLAGS_HAVE_GC` flag to the class flags:: +Finally, we add the :c:macro:`Py_TPFLAGS_HAVE_GC` flag to the class flags:: .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC, @@ -791,9 +789,9 @@ types. It is easiest to inherit from the built in types, since an extension can easily use the :c:type:`PyTypeObject` it needs. It can be difficult to share these :c:type:`PyTypeObject` structures between extension modules. -In this example we will create a :class:`SubList` type that inherits from the +In this example we will create a :class:`!SubList` type that inherits from the built-in :class:`list` type. The new type will be completely compatible with -regular lists, but will have an additional :meth:`increment` method that +regular lists, but will have an additional :meth:`!increment` method that increases an internal counter: .. code-block:: pycon @@ -811,7 +809,7 @@ increases an internal counter: .. literalinclude:: ../includes/sublist.c -As you can see, the source code closely resembles the :class:`Custom` examples in +As you can see, the source code closely resembles the :class:`!Custom` examples in previous sections. We will break down the main differences between them. :: typedef struct { @@ -823,7 +821,7 @@ The primary difference for derived type objects is that the base type's object structure must be the first value. The base type will already include the :c:func:`PyObject_HEAD` at the beginning of its structure. -When a Python object is a :class:`SubList` instance, its ``PyObject *`` pointer +When a Python object is a :class:`!SubList` instance, its ``PyObject *`` pointer can be safely cast to both ``PyListObject *`` and ``SubListObject *``:: static int @@ -835,7 +833,7 @@ can be safely cast to both ``PyListObject *`` and ``SubListObject *``:: return 0; } -We see above how to call through to the :attr:`__init__` method of the base +We see above how to call through to the :meth:`~object.__init__` method of the base type. This pattern is important when writing a type with custom @@ -862,9 +860,7 @@ function:: if (m == NULL) return NULL; - Py_INCREF(&SubListType); - if (PyModule_AddObject(m, "SubList", (PyObject *) &SubListType) < 0) { - Py_DECREF(&SubListType); + if (PyModule_AddObjectRef(m, "SubList", (PyObject *) &SubListType) < 0) { Py_DECREF(m); return NULL; } @@ -879,7 +875,7 @@ slot with :c:func:`PyType_GenericNew` -- the allocation function from the base type will be inherited. After that, calling :c:func:`PyType_Ready` and adding the type object to the -module is the same as with the basic :class:`Custom` examples. +module is the same as with the basic :class:`!Custom` examples. .. rubric:: Footnotes diff --git a/Doc/faq/extending.rst b/Doc/faq/extending.rst index bc3080f60ee237..2a8b976925d042 100644 --- a/Doc/faq/extending.rst +++ b/Doc/faq/extending.rst @@ -81,13 +81,13 @@ How do I extract C values from a Python object? That depends on the object's type. If it's a tuple, :c:func:`PyTuple_Size` returns its length and :c:func:`PyTuple_GetItem` returns the item at a specified -index. Lists have similar functions, :c:func:`PyListSize` and +index. Lists have similar functions, :c:func:`PyList_Size` and :c:func:`PyList_GetItem`. For bytes, :c:func:`PyBytes_Size` returns its length and :c:func:`PyBytes_AsStringAndSize` provides a pointer to its value and its length. Note that Python bytes objects may contain null bytes so C's -:c:func:`strlen` should not be used. +:c:func:`!strlen` should not be used. To test the type of an object, first make sure it isn't ``NULL``, and then use :c:func:`PyBytes_Check`, :c:func:`PyTuple_Check`, :c:func:`PyList_Check`, etc. diff --git a/Doc/faq/library.rst b/Doc/faq/library.rst index 22f7f846d261d8..9e3727456bb96e 100644 --- a/Doc/faq/library.rst +++ b/Doc/faq/library.rst @@ -566,7 +566,7 @@ use ``p.read(n)``. Note on a bug in popen2: unless your program calls ``wait()`` or ``waitpid()``, finished child processes are never removed, and eventually calls to popen2 will fail because of a limit on the number of child - processes. Calling :func:`os.waitpid` with the :data:`os.WNOHANG` option can + processes. Calling :func:`os.waitpid` with the :const:`os.WNOHANG` option can prevent this; a good place to insert such a call would be before calling ``popen2`` again. diff --git a/Doc/glossary.rst b/Doc/glossary.rst index a4650a6c3efa22..a4cd05b0cf019d 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -159,8 +159,9 @@ Glossary :class:`str` objects. borrowed reference - In Python's C API, a borrowed reference is a reference to an object. - It does not modify the object reference count. It becomes a dangling + In Python's C API, a borrowed reference is a reference to an object, + where the code using the object does not own the reference. + It becomes a dangling pointer if the object is destroyed. For example, a garbage collection can remove the last :term:`strong reference` to the object and so destroy it. @@ -1054,7 +1055,9 @@ Glossary reference count The number of references to an object. When the reference count of an - object drops to zero, it is deallocated. Reference counting is + object drops to zero, it is deallocated. Some objects are + "immortal" and have reference counts that are never modified, and + therefore the objects are never deallocated. Reference counting is generally not visible to Python code, but it is a key element of the :term:`CPython` implementation. Programmers can call the :func:`sys.getrefcount` function to return the @@ -1137,8 +1140,10 @@ Glossary strong reference In Python's C API, a strong reference is a reference to an object - which increments the object's reference count when it is created and - decrements the object's reference count when it is deleted. + which is owned by the code holding the reference. The strong + reference is taken by calling :c:func:`Py_INCREF` when the + reference is created and released with :c:func:`Py_DECREF` + when the reference is deleted. The :c:func:`Py_NewRef` function can be used to create a strong reference to an object. Usually, the :c:func:`Py_DECREF` function must be called on diff --git a/Doc/howto/annotations.rst b/Doc/howto/annotations.rst index 472069032d6509..1134686c947d66 100644 --- a/Doc/howto/annotations.rst +++ b/Doc/howto/annotations.rst @@ -32,201 +32,201 @@ Annotations Best Practices Accessing The Annotations Dict Of An Object In Python 3.10 And Newer ==================================================================== - Python 3.10 adds a new function to the standard library: - :func:`inspect.get_annotations`. In Python versions 3.10 - and newer, calling this function is the best practice for - accessing the annotations dict of any object that supports - annotations. This function can also "un-stringize" - stringized annotations for you. - - If for some reason :func:`inspect.get_annotations` isn't - viable for your use case, you may access the - ``__annotations__`` data member manually. Best practice - for this changed in Python 3.10 as well: as of Python 3.10, - ``o.__annotations__`` is guaranteed to *always* work - on Python functions, classes, and modules. If you're - certain the object you're examining is one of these three - *specific* objects, you may simply use ``o.__annotations__`` - to get at the object's annotations dict. - - However, other types of callables--for example, - callables created by :func:`functools.partial`--may - not have an ``__annotations__`` attribute defined. When - accessing the ``__annotations__`` of a possibly unknown - object, best practice in Python versions 3.10 and - newer is to call :func:`getattr` with three arguments, - for example ``getattr(o, '__annotations__', None)``. - - Before Python 3.10, accessing ``__annotations__`` on a class that - defines no annotations but that has a parent class with - annotations would return the parent's ``__annotations__``. - In Python 3.10 and newer, the child class's annotations - will be an empty dict instead. +Python 3.10 adds a new function to the standard library: +:func:`inspect.get_annotations`. In Python versions 3.10 +and newer, calling this function is the best practice for +accessing the annotations dict of any object that supports +annotations. This function can also "un-stringize" +stringized annotations for you. + +If for some reason :func:`inspect.get_annotations` isn't +viable for your use case, you may access the +``__annotations__`` data member manually. Best practice +for this changed in Python 3.10 as well: as of Python 3.10, +``o.__annotations__`` is guaranteed to *always* work +on Python functions, classes, and modules. If you're +certain the object you're examining is one of these three +*specific* objects, you may simply use ``o.__annotations__`` +to get at the object's annotations dict. + +However, other types of callables--for example, +callables created by :func:`functools.partial`--may +not have an ``__annotations__`` attribute defined. When +accessing the ``__annotations__`` of a possibly unknown +object, best practice in Python versions 3.10 and +newer is to call :func:`getattr` with three arguments, +for example ``getattr(o, '__annotations__', None)``. + +Before Python 3.10, accessing ``__annotations__`` on a class that +defines no annotations but that has a parent class with +annotations would return the parent's ``__annotations__``. +In Python 3.10 and newer, the child class's annotations +will be an empty dict instead. Accessing The Annotations Dict Of An Object In Python 3.9 And Older =================================================================== - In Python 3.9 and older, accessing the annotations dict - of an object is much more complicated than in newer versions. - The problem is a design flaw in these older versions of Python, - specifically to do with class annotations. +In Python 3.9 and older, accessing the annotations dict +of an object is much more complicated than in newer versions. +The problem is a design flaw in these older versions of Python, +specifically to do with class annotations. - Best practice for accessing the annotations dict of other - objects--functions, other callables, and modules--is the same - as best practice for 3.10, assuming you aren't calling - :func:`inspect.get_annotations`: you should use three-argument - :func:`getattr` to access the object's ``__annotations__`` - attribute. +Best practice for accessing the annotations dict of other +objects--functions, other callables, and modules--is the same +as best practice for 3.10, assuming you aren't calling +:func:`inspect.get_annotations`: you should use three-argument +:func:`getattr` to access the object's ``__annotations__`` +attribute. - Unfortunately, this isn't best practice for classes. The problem - is that, since ``__annotations__`` is optional on classes, and - because classes can inherit attributes from their base classes, - accessing the ``__annotations__`` attribute of a class may - inadvertently return the annotations dict of a *base class.* - As an example:: +Unfortunately, this isn't best practice for classes. The problem +is that, since ``__annotations__`` is optional on classes, and +because classes can inherit attributes from their base classes, +accessing the ``__annotations__`` attribute of a class may +inadvertently return the annotations dict of a *base class.* +As an example:: - class Base: - a: int = 3 - b: str = 'abc' + class Base: + a: int = 3 + b: str = 'abc' - class Derived(Base): - pass + class Derived(Base): + pass - print(Derived.__annotations__) + print(Derived.__annotations__) - This will print the annotations dict from ``Base``, not - ``Derived``. +This will print the annotations dict from ``Base``, not +``Derived``. - Your code will have to have a separate code path if the object - you're examining is a class (``isinstance(o, type)``). - In that case, best practice relies on an implementation detail - of Python 3.9 and before: if a class has annotations defined, - they are stored in the class's ``__dict__`` dictionary. Since - the class may or may not have annotations defined, best practice - is to call the ``get`` method on the class dict. +Your code will have to have a separate code path if the object +you're examining is a class (``isinstance(o, type)``). +In that case, best practice relies on an implementation detail +of Python 3.9 and before: if a class has annotations defined, +they are stored in the class's ``__dict__`` dictionary. Since +the class may or may not have annotations defined, best practice +is to call the ``get`` method on the class dict. - To put it all together, here is some sample code that safely - accesses the ``__annotations__`` attribute on an arbitrary - object in Python 3.9 and before:: +To put it all together, here is some sample code that safely +accesses the ``__annotations__`` attribute on an arbitrary +object in Python 3.9 and before:: - if isinstance(o, type): - ann = o.__dict__.get('__annotations__', None) - else: - ann = getattr(o, '__annotations__', None) + if isinstance(o, type): + ann = o.__dict__.get('__annotations__', None) + else: + ann = getattr(o, '__annotations__', None) - After running this code, ``ann`` should be either a - dictionary or ``None``. You're encouraged to double-check - the type of ``ann`` using :func:`isinstance` before further - examination. +After running this code, ``ann`` should be either a +dictionary or ``None``. You're encouraged to double-check +the type of ``ann`` using :func:`isinstance` before further +examination. - Note that some exotic or malformed type objects may not have - a ``__dict__`` attribute, so for extra safety you may also wish - to use :func:`getattr` to access ``__dict__``. +Note that some exotic or malformed type objects may not have +a ``__dict__`` attribute, so for extra safety you may also wish +to use :func:`getattr` to access ``__dict__``. Manually Un-Stringizing Stringized Annotations ============================================== - In situations where some annotations may be "stringized", - and you wish to evaluate those strings to produce the - Python values they represent, it really is best to - call :func:`inspect.get_annotations` to do this work - for you. - - If you're using Python 3.9 or older, or if for some reason - you can't use :func:`inspect.get_annotations`, you'll need - to duplicate its logic. You're encouraged to examine the - implementation of :func:`inspect.get_annotations` in the - current Python version and follow a similar approach. - - In a nutshell, if you wish to evaluate a stringized annotation - on an arbitrary object ``o``: - - * If ``o`` is a module, use ``o.__dict__`` as the - ``globals`` when calling :func:`eval`. - * If ``o`` is a class, use ``sys.modules[o.__module__].__dict__`` - as the ``globals``, and ``dict(vars(o))`` as the ``locals``, - when calling :func:`eval`. - * If ``o`` is a wrapped callable using :func:`functools.update_wrapper`, - :func:`functools.wraps`, or :func:`functools.partial`, iteratively - unwrap it by accessing either ``o.__wrapped__`` or ``o.func`` as - appropriate, until you have found the root unwrapped function. - * If ``o`` is a callable (but not a class), use - ``o.__globals__`` as the globals when calling :func:`eval`. - - However, not all string values used as annotations can - be successfully turned into Python values by :func:`eval`. - String values could theoretically contain any valid string, - and in practice there are valid use cases for type hints that - require annotating with string values that specifically - *can't* be evaluated. For example: - - * :pep:`604` union types using ``|``, before support for this - was added to Python 3.10. - * Definitions that aren't needed at runtime, only imported - when :const:`typing.TYPE_CHECKING` is true. - - If :func:`eval` attempts to evaluate such values, it will - fail and raise an exception. So, when designing a library - API that works with annotations, it's recommended to only - attempt to evaluate string values when explicitly requested - to by the caller. +In situations where some annotations may be "stringized", +and you wish to evaluate those strings to produce the +Python values they represent, it really is best to +call :func:`inspect.get_annotations` to do this work +for you. + +If you're using Python 3.9 or older, or if for some reason +you can't use :func:`inspect.get_annotations`, you'll need +to duplicate its logic. You're encouraged to examine the +implementation of :func:`inspect.get_annotations` in the +current Python version and follow a similar approach. + +In a nutshell, if you wish to evaluate a stringized annotation +on an arbitrary object ``o``: + +* If ``o`` is a module, use ``o.__dict__`` as the + ``globals`` when calling :func:`eval`. +* If ``o`` is a class, use ``sys.modules[o.__module__].__dict__`` + as the ``globals``, and ``dict(vars(o))`` as the ``locals``, + when calling :func:`eval`. +* If ``o`` is a wrapped callable using :func:`functools.update_wrapper`, + :func:`functools.wraps`, or :func:`functools.partial`, iteratively + unwrap it by accessing either ``o.__wrapped__`` or ``o.func`` as + appropriate, until you have found the root unwrapped function. +* If ``o`` is a callable (but not a class), use + ``o.__globals__`` as the globals when calling :func:`eval`. + +However, not all string values used as annotations can +be successfully turned into Python values by :func:`eval`. +String values could theoretically contain any valid string, +and in practice there are valid use cases for type hints that +require annotating with string values that specifically +*can't* be evaluated. For example: + +* :pep:`604` union types using ``|``, before support for this + was added to Python 3.10. +* Definitions that aren't needed at runtime, only imported + when :const:`typing.TYPE_CHECKING` is true. + +If :func:`eval` attempts to evaluate such values, it will +fail and raise an exception. So, when designing a library +API that works with annotations, it's recommended to only +attempt to evaluate string values when explicitly requested +to by the caller. Best Practices For ``__annotations__`` In Any Python Version ============================================================ - * You should avoid assigning to the ``__annotations__`` member - of objects directly. Let Python manage setting ``__annotations__``. +* You should avoid assigning to the ``__annotations__`` member + of objects directly. Let Python manage setting ``__annotations__``. - * If you do assign directly to the ``__annotations__`` member - of an object, you should always set it to a ``dict`` object. +* If you do assign directly to the ``__annotations__`` member + of an object, you should always set it to a ``dict`` object. - * If you directly access the ``__annotations__`` member - of an object, you should ensure that it's a - dictionary before attempting to examine its contents. +* If you directly access the ``__annotations__`` member + of an object, you should ensure that it's a + dictionary before attempting to examine its contents. - * You should avoid modifying ``__annotations__`` dicts. +* You should avoid modifying ``__annotations__`` dicts. - * You should avoid deleting the ``__annotations__`` attribute - of an object. +* You should avoid deleting the ``__annotations__`` attribute + of an object. ``__annotations__`` Quirks ========================== - In all versions of Python 3, function - objects lazy-create an annotations dict if no annotations - are defined on that object. You can delete the ``__annotations__`` - attribute using ``del fn.__annotations__``, but if you then - access ``fn.__annotations__`` the object will create a new empty dict - that it will store and return as its annotations. Deleting the - annotations on a function before it has lazily created its annotations - dict will throw an ``AttributeError``; using ``del fn.__annotations__`` - twice in a row is guaranteed to always throw an ``AttributeError``. - - Everything in the above paragraph also applies to class and module - objects in Python 3.10 and newer. - - In all versions of Python 3, you can set ``__annotations__`` - on a function object to ``None``. However, subsequently - accessing the annotations on that object using ``fn.__annotations__`` - will lazy-create an empty dictionary as per the first paragraph of - this section. This is *not* true of modules and classes, in any Python - version; those objects permit setting ``__annotations__`` to any - Python value, and will retain whatever value is set. - - If Python stringizes your annotations for you - (using ``from __future__ import annotations``), and you - specify a string as an annotation, the string will - itself be quoted. In effect the annotation is quoted - *twice.* For example:: - - from __future__ import annotations - def foo(a: "str"): pass - - print(foo.__annotations__) - - This prints ``{'a': "'str'"}``. This shouldn't really be considered - a "quirk"; it's mentioned here simply because it might be surprising. +In all versions of Python 3, function +objects lazy-create an annotations dict if no annotations +are defined on that object. You can delete the ``__annotations__`` +attribute using ``del fn.__annotations__``, but if you then +access ``fn.__annotations__`` the object will create a new empty dict +that it will store and return as its annotations. Deleting the +annotations on a function before it has lazily created its annotations +dict will throw an ``AttributeError``; using ``del fn.__annotations__`` +twice in a row is guaranteed to always throw an ``AttributeError``. + +Everything in the above paragraph also applies to class and module +objects in Python 3.10 and newer. + +In all versions of Python 3, you can set ``__annotations__`` +on a function object to ``None``. However, subsequently +accessing the annotations on that object using ``fn.__annotations__`` +will lazy-create an empty dictionary as per the first paragraph of +this section. This is *not* true of modules and classes, in any Python +version; those objects permit setting ``__annotations__`` to any +Python value, and will retain whatever value is set. + +If Python stringizes your annotations for you +(using ``from __future__ import annotations``), and you +specify a string as an annotation, the string will +itself be quoted. In effect the annotation is quoted +*twice.* For example:: + + from __future__ import annotations + def foo(a: "str"): pass + + print(foo.__annotations__) + +This prints ``{'a': "'str'"}``. This shouldn't really be considered +a "quirk"; it's mentioned here simply because it might be surprising. diff --git a/Doc/howto/argparse.rst b/Doc/howto/argparse.rst index 52e98fa9620194..ae5bab90bf8131 100644 --- a/Doc/howto/argparse.rst +++ b/Doc/howto/argparse.rst @@ -788,6 +788,59 @@ but not both at the same time: -q, --quiet +How to translate the argparse output +==================================== + +The output of the :mod:`argparse` module such as its help text and error +messages are all made translatable using the :mod:`gettext` module. This +allows applications to easily localize messages produced by +:mod:`argparse`. See also :ref:`i18n-howto`. + +For instance, in this :mod:`argparse` output: + +.. code-block:: shell-session + + $ python prog.py --help + usage: prog.py [-h] [-v | -q] x y + + calculate X to the power of Y + + positional arguments: + x the base + y the exponent + + options: + -h, --help show this help message and exit + -v, --verbose + -q, --quiet + +The strings ``usage:``, ``positional arguments:``, ``options:`` and +``show this help message and exit`` are all translatable. + +In order to translate these strings, they must first be extracted +into a ``.po`` file. For example, using `Babel `__, +run this command: + +.. code-block:: shell-session + + $ pybabel extract -o messages.po /usr/lib/python3.12/argparse.py + +This command will extract all translatable strings from the :mod:`argparse` +module and output them into a file named ``messages.po``. This command assumes +that your Python installation is in ``/usr/lib``. + +You can find out the location of the :mod:`argparse` module on your system +using this script:: + + import argparse + print(argparse.__file__) + +Once the messages in the ``.po`` file are translated and the translations are +installed using :mod:`gettext`, :mod:`argparse` will be able to display the +translated messages. + +To translate your own strings in the :mod:`argparse` output, use :mod:`gettext`. + Conclusion ========== diff --git a/Doc/howto/clinic.rst b/Doc/howto/clinic.rst index efeb22c618b512..463a938fafa8dc 100644 --- a/Doc/howto/clinic.rst +++ b/Doc/howto/clinic.rst @@ -8,17 +8,30 @@ Argument Clinic How-To :author: Larry Hastings +**Source code:** :source:`Tools/clinic/clinic.py`. .. topic:: Abstract Argument Clinic is a preprocessor for CPython C files. - Its purpose is to automate all the boilerplate involved - with writing argument parsing code for "builtins". - This document shows you how to convert your first C - function to work with Argument Clinic, and then introduces - some advanced topics on Argument Clinic usage. + It was introduced in Python 3.4 with :pep:`436`, + in order to provide introspection signatures, + and to generate performant and tailor-made boilerplate code + for argument parsing in CPython builtins, + module level functions, and class methods. + This document is divided in four major sections: - Currently Argument Clinic is considered internal-only + * :ref:`clinic-background` talks about the basic concepts and goals of + Argument Clinic. + * :ref:`clinic-reference` describes the command-line interface and Argument + Clinic terminology. + * :ref:`clinic-tutorial` guides you through all the steps required to + adapt an existing C function to Argument Clinic. + * :ref:`clinic-howtos` details how to handle specific tasks. + + +.. note:: + + Argument Clinic is considered internal-only for CPython. Its use is not supported for files outside CPython, and no guarantees are made regarding backwards compatibility for future versions. In other words: if you @@ -28,89 +41,34 @@ Argument Clinic How-To of CPython *could* be totally incompatible and break all your code. -The goals of Argument Clinic -============================ - -Argument Clinic's primary goal -is to take over responsibility for all argument parsing code -inside CPython. This means that, when you convert a function -to work with Argument Clinic, that function should no longer -do any of its own argument parsing—the code generated by -Argument Clinic should be a "black box" to you, where CPython -calls in at the top, and your code gets called at the bottom, -with ``PyObject *args`` (and maybe ``PyObject *kwargs``) -magically converted into the C variables and types you need. - -In order for Argument Clinic to accomplish its primary goal, -it must be easy to use. Currently, working with CPython's -argument parsing library is a chore, requiring maintaining -redundant information in a surprising number of places. -When you use Argument Clinic, you don't have to repeat yourself. - -Obviously, no one would want to use Argument Clinic unless -it's solving their problem—and without creating new problems of -its own. -So it's paramount that Argument Clinic generate correct code. -It'd be nice if the code was faster, too, but at the very least -it should not introduce a major speed regression. (Eventually Argument -Clinic *should* make a major speedup possible—we could -rewrite its code generator to produce tailor-made argument -parsing code, rather than calling the general-purpose CPython -argument parsing library. That would make for the fastest -argument parsing possible!) - -Additionally, Argument Clinic must be flexible enough to -work with any approach to argument parsing. Python has -some functions with some very strange parsing behaviors; -Argument Clinic's goal is to support all of them. - -Finally, the original motivation for Argument Clinic was -to provide introspection "signatures" for CPython builtins. -It used to be, the introspection query functions would throw -an exception if you passed in a builtin. With Argument -Clinic, that's a thing of the past! - -One idea you should keep in mind, as you work with -Argument Clinic: the more information you give it, the -better job it'll be able to do. -Argument Clinic is admittedly relatively simple right -now. But as it evolves it will get more sophisticated, -and it should be able to do many interesting and smart -things with all the information you give it. - - -Basic concepts and usage -======================== - -Argument Clinic ships with CPython; you'll find it in ``Tools/clinic/clinic.py``. -If you run that script, specifying a C file as an argument: +.. _clinic-background: -.. code-block:: shell-session +Background +========== - $ python Tools/clinic/clinic.py foo.c +Basic concepts +-------------- -Argument Clinic will scan over the file looking for lines that -look exactly like this: +When Argument Clinic is run on a file, either via the :ref:`clinic-cli` +or via ``make clinic``, it will scan over the input files looking for +:term:`start lines `: .. code-block:: none /*[clinic input] -When it finds one, it reads everything up to a line that looks -exactly like this: +When it finds one, it reads everything up to the :term:`end line`: .. code-block:: none [clinic start generated code]*/ -Everything in between these two lines is input for Argument Clinic. -All of these lines, including the beginning and ending comment -lines, are collectively called an Argument Clinic "block". - -When Argument Clinic parses one of these blocks, it -generates output. This output is rewritten into the C file -immediately after the block, followed by a comment containing a checksum. -The Argument Clinic block now looks like this: +Everything in between these two lines is Argument Clinic :term:`input`. +When Argument Clinic parses input, it generates :term:`output`. +The output is rewritten into the C file immediately after the input, +followed by a :term:`checksum line`. +All of these lines, including the :term:`start line` and :term:`checksum line`, +are collectively called an Argument Clinic :term:`block`: .. code-block:: none @@ -118,54 +76,220 @@ The Argument Clinic block now looks like this: ... clinic input goes here ... [clinic start generated code]*/ ... clinic output goes here ... - /*[clinic end generated code: checksum=...]*/ + /*[clinic end generated code: ...]*/ If you run Argument Clinic on the same file a second time, Argument Clinic -will discard the old output and write out the new output with a fresh checksum -line. However, if the input hasn't changed, the output won't change either. +will discard the old :term:`output` and write out the new output with a fresh +:term:`checksum line`. +If the :term:`input` hasn't changed, the output won't change either. + +.. note:: + + You should never modify the output of an Argument Clinic block, + as any change will be lost in future Argument Clinic runs; + Argument Clinic will detect an output checksum mismatch and regenerate the + correct output. + If you are not happy with the generated output, + you should instead change the input until it produces the output you want. + + +.. _clinic-reference: + +Reference +========= + + +.. _clinic-terminology: + +Terminology +----------- + +.. glossary:: + + start line + The line ``/*[clinic input]``. + This line marks the beginning of Argument Clinic input. + Note that the *start line* opens a C block comment. + + end line + The line ``[clinic start generated code]*/``. + The *end line* marks the _end_ of Argument Clinic :term:`input`, + but at the same time marks the _start_ of Argument Clinic :term:`output`, + thus the text *"clinic start start generated code"* + Note that the *end line* closes the C block comment opened + by the *start line*. + + checksum + A hash to distinguish unique :term:`inputs ` + and :term:`outputs `. + + checksum line + A line that looks like ``/*[clinic end generated code: ...]*/``. + The three dots will be replaced by a :term:`checksum` generated from the + :term:`input`, and a :term:`checksum` generated from the :term:`output`. + The checksum line marks the end of Argument Clinic generated code, + and is used by Argument Clinic to determine if it needs to regenerate + output. + + input + The text between the :term:`start line` and the :term:`end line`. + Note that the start and end lines open and close a C block comment; + the *input* is thus a part of that same C block comment. + + output + The text between the :term:`end line` and the :term:`checksum line`. + + block + All text from the :term:`start line` to the :term:`checksum line` inclusively. + + +.. _clinic-cli: + +Command-line interface +---------------------- + +The Argument Clinic :abbr:`CLI (Command-Line Interface)` is typically used to +process a single source file, like this: + +.. code-block:: shell-session + + $ python3 ./Tools/clinic/clinic.py foo.c + +The CLI supports the following options: + +.. program:: ./Tools/clinic/clinic.py [-h] [-f] [-o OUTPUT] [-v] \ + [--converters] [--make] [--srcdir SRCDIR] [FILE ...] + +.. option:: -h, --help + + Print CLI usage. + +.. option:: -f, --force + + Force output regeneration. + +.. option:: -o, --output OUTPUT + + Redirect file output to OUTPUT + +.. option:: -v, --verbose + + Enable verbose mode. + +.. option:: --converters + + Print a list of all supported converters and return converters. + +.. option:: --make -You should never modify the output portion of an Argument Clinic block. Instead, -change the input until it produces the output you want. (That's the purpose of the -checksum—to detect if someone changed the output, as these edits would be lost -the next time Argument Clinic writes out fresh output.) + Walk :option:`--srcdir` to run over all relevant files. -For the sake of clarity, here's the terminology we'll use with Argument Clinic: +.. option:: --srcdir SRCDIR -* The first line of the comment (``/*[clinic input]``) is the *start line*. -* The last line of the initial comment (``[clinic start generated code]*/``) is the *end line*. -* The last line (``/*[clinic end generated code: checksum=...]*/``) is the *checksum line*. -* In between the start line and the end line is the *input*. -* In between the end line and the checksum line is the *output*. -* All the text collectively, from the start line to the checksum line inclusively, - is the *block*. (A block that hasn't been successfully processed by Argument - Clinic yet doesn't have output or a checksum line, but it's still considered - a block.) + The directory tree to walk in :option:`--make` mode. +.. option:: --exclude EXCLUDE -Converting your first function -============================== + A file to exclude in :option:`--make` mode. + This option can be given multiple times. + +.. option:: FILE ... + + The list of files to process. + + +.. _clinic-classes: + +Classes for extending Argument Clinic +------------------------------------- + +.. module:: clinic + +.. class:: CConverter + + The base class for all converters. + See :ref:`clinic-howto-custom-converter` for how to subclass this class. + + .. attribute:: type + + The C type to use for this variable. + :attr:`!type` should be a Python string specifying the type, + e.g. ``'int'``. + If this is a pointer type, the type string should end with ``' *'``. + + .. attribute:: default + + The Python default value for this parameter, as a Python value. + Or the magic value ``unspecified`` if there is no default. + + .. attribute:: py_default + + :attr:`!default` as it should appear in Python code, + as a string. + Or ``None`` if there is no default. + + .. attribute:: c_default + + :attr:`!default` as it should appear in C code, + as a string. + Or ``None`` if there is no default. + + .. attribute:: c_ignored_default + + The default value used to initialize the C variable when + there is no default, but not specifying a default may + result in an "uninitialized variable" warning. This can + easily happen when using option groups—although + properly written code will never actually use this value, + the variable does get passed in to the impl, and the + C compiler will complain about the "use" of the + uninitialized value. This value should always be a + non-empty string. + + .. attribute:: converter + + The name of the C converter function, as a string. + + .. attribute:: impl_by_reference + + A boolean value. If true, + Argument Clinic will add a ``&`` in front of the name of + the variable when passing it into the impl function. + + .. attribute:: parse_by_reference + + A boolean value. If true, + Argument Clinic will add a ``&`` in front of the name of + the variable when passing it into :c:func:`PyArg_ParseTuple`. + + +.. _clinic-tutorial: + +Tutorial +======== The best way to get a sense of how Argument Clinic works is to convert a function to work with it. Here, then, are the bare minimum steps you'd need to follow to convert a function to work with Argument Clinic. Note that for code you plan to check in to CPython, you really should take the conversion farther, -using some of the advanced concepts you'll see later on in -the document (like "return converters" and "self converters"). +using some of the :ref:`advanced concepts ` +you'll see later on in the document, +like :ref:`clinic-howto-return-converters` +and :ref:`clinic-howto-self-converter`. But we'll keep it simple for this walkthrough so you can learn. -Let's dive in! - -0. Make sure you're working with a freshly updated checkout - of the CPython trunk. +First, make sure you're working with a freshly updated checkout +of the CPython trunk. -1. Find a Python builtin that calls either :c:func:`PyArg_ParseTuple` - or :c:func:`PyArg_ParseTupleAndKeywords`, and hasn't been converted - to work with Argument Clinic yet. - For my example I'm using ``_pickle.Pickler.dump()``. +Next, find a Python builtin that calls either :c:func:`PyArg_ParseTuple` +or :c:func:`PyArg_ParseTupleAndKeywords`, and hasn't been converted +to work with Argument Clinic yet. +For this tutorial, we'll be using +:py:meth:`_pickle.Pickler.dump `. -2. If the call to the ``PyArg_Parse`` function uses any of the - following format units: +If the call to the :c:func:`!PyArg_Parse*` function uses any of the +following format units...: .. code-block:: none @@ -176,396 +300,387 @@ Let's dive in! et et# - or if it has multiple calls to :c:func:`PyArg_ParseTuple`, - you should choose a different function. Argument Clinic *does* - support all of these scenarios. But these are advanced - topics—let's do something simpler for your first function. - - Also, if the function has multiple calls to :c:func:`PyArg_ParseTuple` - or :c:func:`PyArg_ParseTupleAndKeywords` where it supports different - types for the same argument, or if the function uses something besides - PyArg_Parse functions to parse its arguments, it probably - isn't suitable for conversion to Argument Clinic. Argument Clinic - doesn't support generic functions or polymorphic parameters. - -3. Add the following boilerplate above the function, creating our block:: - - /*[clinic input] - [clinic start generated code]*/ - -4. Cut the docstring and paste it in between the ``[clinic]`` lines, - removing all the junk that makes it a properly quoted C string. - When you're done you should have just the text, based at the left - margin, with no line wider than 80 characters. - (Argument Clinic will preserve indents inside the docstring.) - - If the old docstring had a first line that looked like a function - signature, throw that line away. (The docstring doesn't need it - anymore—when you use ``help()`` on your builtin in the future, - the first line will be built automatically based on the function's - signature.) - - Sample:: - - /*[clinic input] - Write a pickled representation of obj to the open file. - [clinic start generated code]*/ - -5. If your docstring doesn't have a "summary" line, Argument Clinic will - complain. So let's make sure it has one. The "summary" line should - be a paragraph consisting of a single 80-column line - at the beginning of the docstring. +... or if it has multiple calls to :c:func:`PyArg_ParseTuple`, +you should choose a different function. +(See :ref:`clinic-howto-advanced-converters` for those scenarios.) - (Our example docstring consists solely of a summary line, so the sample - code doesn't have to change for this step.) +Also, if the function has multiple calls to :c:func:`!PyArg_ParseTuple` +or :c:func:`PyArg_ParseTupleAndKeywords` where it supports different +types for the same argument, or if the function uses something besides +:c:func:`!PyArg_Parse*` functions to parse its arguments, it probably +isn't suitable for conversion to Argument Clinic. Argument Clinic +doesn't support generic functions or polymorphic parameters. -6. Above the docstring, enter the name of the function, followed - by a blank line. This should be the Python name of the function, - and should be the full dotted path - to the function—it should start with the name of the module, - include any sub-modules, and if the function is a method on - a class it should include the class name too. - - Sample:: +Next, add the following boilerplate above the function, +creating our input block:: /*[clinic input] - _pickle.Pickler.dump - - Write a pickled representation of obj to the open file. [clinic start generated code]*/ -7. If this is the first time that module or class has been used with Argument - Clinic in this C file, - you must declare the module and/or class. Proper Argument Clinic hygiene - prefers declaring these in a separate block somewhere near the - top of the C file, in the same way that include files and statics go at - the top. (In our sample code we'll just show the two blocks next to - each other.) +Cut the docstring and paste it in between the ``[clinic]`` lines, +removing all the junk that makes it a properly quoted C string. +When you're done you should have just the text, based at the left +margin, with no line wider than 80 characters. +Argument Clinic will preserve indents inside the docstring. - The name of the class and module should be the same as the one - seen by Python. Check the name defined in the :c:type:`PyModuleDef` - or :c:type:`PyTypeObject` as appropriate. +If the old docstring had a first line that looked like a function +signature, throw that line away; The docstring doesn't need it anymore --- +when you use :py:func:`help` on your builtin in the future, +the first line will be built automatically based on the function's signature. - When you declare a class, you must also specify two aspects of its type - in C: the type declaration you'd use for a pointer to an instance of - this class, and a pointer to the :c:type:`PyTypeObject` for this class. +Example docstring summary line:: - Sample:: + /*[clinic input] + Write a pickled representation of obj to the open file. + [clinic start generated code]*/ - /*[clinic input] - module _pickle - class _pickle.Pickler "PicklerObject *" "&Pickler_Type" - [clinic start generated code]*/ +If your docstring doesn't have a "summary" line, Argument Clinic will +complain, so let's make sure it has one. The "summary" line should +be a paragraph consisting of a single 80-column line +at the beginning of the docstring. +(See :pep:`257` regarding docstring conventions.) - /*[clinic input] - _pickle.Pickler.dump +Our example docstring consists solely of a summary line, so the sample +code doesn't have to change for this step. - Write a pickled representation of obj to the open file. - [clinic start generated code]*/ +Now, above the docstring, enter the name of the function, followed +by a blank line. This should be the Python name of the function, +and should be the full dotted path to the function --- +it should start with the name of the module, +include any sub-modules, and if the function is a method on +a class it should include the class name too. +In our example, :mod:`!_pickle` is the module, :py:class:`!Pickler` is the class, +and :py:meth:`!dump` is the method, so the name becomes +:py:meth:`!_pickle.Pickler.dump`:: + /*[clinic input] + _pickle.Pickler.dump + Write a pickled representation of obj to the open file. + [clinic start generated code]*/ -8. Declare each of the parameters to the function. Each parameter - should get its own line. All the parameter lines should be - indented from the function name and the docstring. +If this is the first time that module or class has been used with Argument +Clinic in this C file, +you must declare the module and/or class. Proper Argument Clinic hygiene +prefers declaring these in a separate block somewhere near the +top of the C file, in the same way that include files and statics go at +the top. +In our sample code we'll just show the two blocks next to each other. - The general form of these parameter lines is as follows: +The name of the class and module should be the same as the one +seen by Python. Check the name defined in the :c:type:`PyModuleDef` +or :c:type:`PyTypeObject` as appropriate. - .. code-block:: none +When you declare a class, you must also specify two aspects of its type +in C: the type declaration you'd use for a pointer to an instance of +this class, and a pointer to the :c:type:`!PyTypeObject` for this class:: - name_of_parameter: converter + /*[clinic input] + module _pickle + class _pickle.Pickler "PicklerObject *" "&Pickler_Type" + [clinic start generated code]*/ - If the parameter has a default value, add that after the - converter: + /*[clinic input] + _pickle.Pickler.dump - .. code-block:: none + Write a pickled representation of obj to the open file. + [clinic start generated code]*/ - name_of_parameter: converter = default_value +Declare each of the parameters to the function. Each parameter +should get its own line. All the parameter lines should be +indented from the function name and the docstring. +The general form of these parameter lines is as follows: - Argument Clinic's support for "default values" is quite sophisticated; - please see :ref:`the section below on default values ` - for more information. +.. code-block:: none - Add a blank line below the parameters. + name_of_parameter: converter - What's a "converter"? It establishes both the type - of the variable used in C, and the method to convert the Python - value into a C value at runtime. - For now you're going to use what's called a "legacy converter"—a - convenience syntax intended to make porting old code into Argument - Clinic easier. +If the parameter has a default value, add that after the +converter: - For each parameter, copy the "format unit" for that - parameter from the ``PyArg_Parse()`` format argument and - specify *that* as its converter, as a quoted - string. ("format unit" is the formal name for the one-to-three - character substring of the ``format`` parameter that tells - the argument parsing function what the type of the variable - is and how to convert it. For more on format units please - see :ref:`arg-parsing`.) +.. code-block:: none - For multicharacter format units like ``z#``, use the - entire two-or-three character string. + name_of_parameter: converter = default_value - Sample:: +Argument Clinic's support for "default values" is quite sophisticated; +see :ref:`clinic-howto-default-values` for more information. - /*[clinic input] - module _pickle - class _pickle.Pickler "PicklerObject *" "&Pickler_Type" - [clinic start generated code]*/ +Next, add a blank line below the parameters. - /*[clinic input] - _pickle.Pickler.dump +What's a "converter"? +It establishes both the type of the variable used in C, +and the method to convert the Python value into a C value at runtime. +For now you're going to use what's called a "legacy converter" --- +a convenience syntax intended to make porting old code into Argument +Clinic easier. - obj: 'O' +For each parameter, copy the "format unit" for that +parameter from the :c:func:`PyArg_Parse` format argument and +specify *that* as its converter, as a quoted string. +The "format unit" is the formal name for the one-to-three +character substring of the *format* parameter that tells +the argument parsing function what the type of the variable +is and how to convert it. +For more on format units please see :ref:`arg-parsing`. - Write a pickled representation of obj to the open file. - [clinic start generated code]*/ +For multicharacter format units like ``z#``, +use the entire two-or-three character string. -9. If your function has ``|`` in the format string, meaning some - parameters have default values, you can ignore it. Argument - Clinic infers which parameters are optional based on whether - or not they have default values. +Sample:: - If your function has ``$`` in the format string, meaning it - takes keyword-only arguments, specify ``*`` on a line by - itself before the first keyword-only argument, indented the - same as the parameter lines. + /*[clinic input] + module _pickle + class _pickle.Pickler "PicklerObject *" "&Pickler_Type" + [clinic start generated code]*/ - (``_pickle.Pickler.dump`` has neither, so our sample is unchanged.) + /*[clinic input] + _pickle.Pickler.dump + obj: 'O' -10. If the existing C function calls :c:func:`PyArg_ParseTuple` - (as opposed to :c:func:`PyArg_ParseTupleAndKeywords`), then all its - arguments are positional-only. + Write a pickled representation of obj to the open file. + [clinic start generated code]*/ - To mark all parameters as positional-only in Argument Clinic, - add a ``/`` on a line by itself after the last parameter, - indented the same as the parameter lines. +If your function has ``|`` in the format string, +meaning some parameters have default values, you can ignore it. +Argument Clinic infers which parameters are optional +based on whether or not they have default values. - Currently this is all-or-nothing; either all parameters are - positional-only, or none of them are. (In the future Argument - Clinic may relax this restriction.) +If your function has ``$`` in the format string, +meaning it takes keyword-only arguments, +specify ``*`` on a line by itself before the first keyword-only argument, +indented the same as the parameter lines. - Sample:: +:py:meth:`!_pickle.Pickler.dump` has neither, so our sample is unchanged. - /*[clinic input] - module _pickle - class _pickle.Pickler "PicklerObject *" "&Pickler_Type" - [clinic start generated code]*/ +Next, if the existing C function calls :c:func:`PyArg_ParseTuple` +(as opposed to :c:func:`PyArg_ParseTupleAndKeywords`), then all its +arguments are positional-only. - /*[clinic input] - _pickle.Pickler.dump +To mark parameters as positional-only in Argument Clinic, +add a ``/`` on a line by itself after the last positional-only parameter, +indented the same as the parameter lines. - obj: 'O' - / +Sample:: - Write a pickled representation of obj to the open file. - [clinic start generated code]*/ + /*[clinic input] + module _pickle + class _pickle.Pickler "PicklerObject *" "&Pickler_Type" + [clinic start generated code]*/ -11. It's helpful to write a per-parameter docstring for each parameter. - But per-parameter docstrings are optional; you can skip this step - if you prefer. + /*[clinic input] + _pickle.Pickler.dump - Here's how to add a per-parameter docstring. The first line - of the per-parameter docstring must be indented further than the - parameter definition. The left margin of this first line establishes - the left margin for the whole per-parameter docstring; all the text - you write will be outdented by this amount. You can write as much - text as you like, across multiple lines if you wish. + obj: 'O' + / - Sample:: + Write a pickled representation of obj to the open file. + [clinic start generated code]*/ - /*[clinic input] - module _pickle - class _pickle.Pickler "PicklerObject *" "&Pickler_Type" - [clinic start generated code]*/ +It can be helpful to write a per-parameter docstring for each parameter. +Since per-parameter docstrings are optional, +you can skip this step if you prefer. - /*[clinic input] - _pickle.Pickler.dump +Nevertheless, here's how to add a per-parameter docstring. +The first line of the per-parameter docstring +must be indented further than the parameter definition. +The left margin of this first line establishes +the left margin for the whole per-parameter docstring; +all the text you write will be outdented by this amount. +You can write as much text as you like, across multiple lines if you wish. - obj: 'O' - The object to be pickled. - / +Sample:: - Write a pickled representation of obj to the open file. - [clinic start generated code]*/ + /*[clinic input] + module _pickle + class _pickle.Pickler "PicklerObject *" "&Pickler_Type" + [clinic start generated code]*/ -12. Save and close the file, then run ``Tools/clinic/clinic.py`` on - it. With luck everything worked---your block now has output, and - a ``.c.h`` file has been generated! Reopen the file in your - text editor to see:: + /*[clinic input] + _pickle.Pickler.dump - /*[clinic input] - _pickle.Pickler.dump + obj: 'O' + The object to be pickled. + / - obj: 'O' - The object to be pickled. - / + Write a pickled representation of obj to the open file. + [clinic start generated code]*/ - Write a pickled representation of obj to the open file. - [clinic start generated code]*/ +Save and close the file, then run ``Tools/clinic/clinic.py`` on it. +With luck everything worked---your block now has output, +and a :file:`.c.h` file has been generated! +Reload the file in your text editor to see the generated code:: - static PyObject * - _pickle_Pickler_dump(PicklerObject *self, PyObject *obj) - /*[clinic end generated code: output=87ecad1261e02ac7 input=552eb1c0f52260d9]*/ + /*[clinic input] + _pickle.Pickler.dump - Obviously, if Argument Clinic didn't produce any output, it's because - it found an error in your input. Keep fixing your errors and retrying - until Argument Clinic processes your file without complaint. + obj: 'O' + The object to be pickled. + / - For readability, most of the glue code has been generated to a ``.c.h`` - file. You'll need to include that in your original ``.c`` file, - typically right after the clinic module block:: + Write a pickled representation of obj to the open file. + [clinic start generated code]*/ - #include "clinic/_pickle.c.h" + static PyObject * + _pickle_Pickler_dump(PicklerObject *self, PyObject *obj) + /*[clinic end generated code: output=87ecad1261e02ac7 input=552eb1c0f52260d9]*/ -13. Double-check that the argument-parsing code Argument Clinic generated - looks basically the same as the existing code. +Obviously, if Argument Clinic didn't produce any output, +it's because it found an error in your input. +Keep fixing your errors and retrying until Argument Clinic processes your file +without complaint. - First, ensure both places use the same argument-parsing function. - The existing code must call either - :c:func:`PyArg_ParseTuple` or :c:func:`PyArg_ParseTupleAndKeywords`; - ensure that the code generated by Argument Clinic calls the - *exact* same function. +For readability, most of the glue code has been generated to a :file:`.c.h` +file. You'll need to include that in your original :file:`.c` file, +typically right after the clinic module block:: - Second, the format string passed in to :c:func:`PyArg_ParseTuple` or - :c:func:`PyArg_ParseTupleAndKeywords` should be *exactly* the same - as the hand-written one in the existing function, up to the colon - or semi-colon. + #include "clinic/_pickle.c.h" - (Argument Clinic always generates its format strings - with a ``:`` followed by the name of the function. If the - existing code's format string ends with ``;``, to provide - usage help, this change is harmless—don't worry about it.) +Double-check that the argument-parsing code Argument Clinic generated +looks basically the same as the existing code. - Third, for parameters whose format units require two arguments - (like a length variable, or an encoding string, or a pointer - to a conversion function), ensure that the second argument is - *exactly* the same between the two invocations. +First, ensure both places use the same argument-parsing function. +The existing code must call either +:c:func:`PyArg_ParseTuple` or :c:func:`PyArg_ParseTupleAndKeywords`; +ensure that the code generated by Argument Clinic calls the +*exact* same function. - Fourth, inside the output portion of the block you'll find a preprocessor - macro defining the appropriate static :c:type:`PyMethodDef` structure for - this builtin:: +Second, the format string passed in to :c:func:`!PyArg_ParseTuple` or +:c:func:`!PyArg_ParseTupleAndKeywords` should be *exactly* the same +as the hand-written one in the existing function, +up to the colon or semi-colon. - #define __PICKLE_PICKLER_DUMP_METHODDEF \ - {"dump", (PyCFunction)__pickle_Pickler_dump, METH_O, __pickle_Pickler_dump__doc__}, +Argument Clinic always generates its format strings +with a ``:`` followed by the name of the function. +If the existing code's format string ends with ``;``, +to provide usage help, this change is harmless --- don't worry about it. - This static structure should be *exactly* the same as the existing static - :c:type:`PyMethodDef` structure for this builtin. +Third, for parameters whose format units require two arguments, +like a length variable, an encoding string, or a pointer +to a conversion function, ensure that the second argument is +*exactly* the same between the two invocations. - If any of these items differ in *any way*, - adjust your Argument Clinic function specification and rerun - ``Tools/clinic/clinic.py`` until they *are* the same. +Fourth, inside the output portion of the block, +you'll find a preprocessor macro defining the appropriate static +:c:type:`PyMethodDef` structure for this builtin:: + #define __PICKLE_PICKLER_DUMP_METHODDEF \ + {"dump", (PyCFunction)__pickle_Pickler_dump, METH_O, __pickle_Pickler_dump__doc__}, -14. Notice that the last line of its output is the declaration - of your "impl" function. This is where the builtin's implementation goes. - Delete the existing prototype of the function you're modifying, but leave - the opening curly brace. Now delete its argument parsing code and the - declarations of all the variables it dumps the arguments into. - Notice how the Python arguments are now arguments to this impl function; - if the implementation used different names for these variables, fix it. +This static structure should be *exactly* the same as the existing static +:c:type:`!PyMethodDef` structure for this builtin. - Let's reiterate, just because it's kind of weird. Your code should now - look like this:: +If any of these items differ in *any way*, +adjust your Argument Clinic function specification and rerun +``Tools/clinic/clinic.py`` until they *are* the same. - static return_type - your_function_impl(...) - /*[clinic end generated code: checksum=...]*/ - { - ... +Notice that the last line of its output is the declaration +of your "impl" function. This is where the builtin's implementation goes. +Delete the existing prototype of the function you're modifying, but leave +the opening curly brace. Now delete its argument parsing code and the +declarations of all the variables it dumps the arguments into. +Notice how the Python arguments are now arguments to this impl function; +if the implementation used different names for these variables, fix it. - Argument Clinic generated the checksum line and the function prototype just - above it. You should write the opening (and closing) curly braces for the - function, and the implementation inside. +Let's reiterate, just because it's kind of weird. +Your code should now look like this:: - Sample:: + static return_type + your_function_impl(...) + /*[clinic end generated code: input=..., output=...]*/ + { + ... - /*[clinic input] - module _pickle - class _pickle.Pickler "PicklerObject *" "&Pickler_Type" - [clinic start generated code]*/ - /*[clinic end generated code: checksum=da39a3ee5e6b4b0d3255bfef95601890afd80709]*/ +Argument Clinic generated the checksum line and the function prototype just +above it. You should write the opening and closing curly braces for the +function, and the implementation inside. - /*[clinic input] - _pickle.Pickler.dump +Sample:: - obj: 'O' - The object to be pickled. - / + /*[clinic input] + module _pickle + class _pickle.Pickler "PicklerObject *" "&Pickler_Type" + [clinic start generated code]*/ + /*[clinic end generated code: checksum=da39a3ee5e6b4b0d3255bfef95601890afd80709]*/ - Write a pickled representation of obj to the open file. - [clinic start generated code]*/ + /*[clinic input] + _pickle.Pickler.dump - PyDoc_STRVAR(__pickle_Pickler_dump__doc__, - "Write a pickled representation of obj to the open file.\n" - "\n" - ... - static PyObject * - _pickle_Pickler_dump_impl(PicklerObject *self, PyObject *obj) - /*[clinic end generated code: checksum=3bd30745bf206a48f8b576a1da3d90f55a0a4187]*/ - { - /* Check whether the Pickler was initialized correctly (issue3664). - Developers often forget to call __init__() in their subclasses, which - would trigger a segfault without this check. */ - if (self->write == NULL) { - PyErr_Format(PicklingError, - "Pickler.__init__() was not called by %s.__init__()", - Py_TYPE(self)->tp_name); - return NULL; - } + obj: 'O' + The object to be pickled. + / - if (_Pickler_ClearBuffer(self) < 0) - return NULL; + Write a pickled representation of obj to the open file. + [clinic start generated code]*/ - ... + PyDoc_STRVAR(__pickle_Pickler_dump__doc__, + "Write a pickled representation of obj to the open file.\n" + "\n" + ... + static PyObject * + _pickle_Pickler_dump_impl(PicklerObject *self, PyObject *obj) + /*[clinic end generated code: checksum=3bd30745bf206a48f8b576a1da3d90f55a0a4187]*/ + { + /* Check whether the Pickler was initialized correctly (issue3664). + Developers often forget to call __init__() in their subclasses, which + would trigger a segfault without this check. */ + if (self->write == NULL) { + PyErr_Format(PicklingError, + "Pickler.__init__() was not called by %s.__init__()", + Py_TYPE(self)->tp_name); + return NULL; + } -15. Remember the macro with the :c:type:`PyMethodDef` structure for this - function? Find the existing :c:type:`PyMethodDef` structure for this - function and replace it with a reference to the macro. (If the builtin - is at module scope, this will probably be very near the end of the file; - if the builtin is a class method, this will probably be below but relatively - near to the implementation.) + if (_Pickler_ClearBuffer(self) < 0) { + return NULL; + } - Note that the body of the macro contains a trailing comma. So when you - replace the existing static :c:type:`PyMethodDef` structure with the macro, - *don't* add a comma to the end. + ... - Sample:: +Remember the macro with the :c:type:`PyMethodDef` structure for this function? +Find the existing :c:type:`!PyMethodDef` structure for this +function and replace it with a reference to the macro. If the builtin +is at module scope, this will probably be very near the end of the file; +if the builtin is a class method, this will probably be below but relatively +near to the implementation. - static struct PyMethodDef Pickler_methods[] = { - __PICKLE_PICKLER_DUMP_METHODDEF - __PICKLE_PICKLER_CLEAR_MEMO_METHODDEF - {NULL, NULL} /* sentinel */ - }; +Note that the body of the macro contains a trailing comma; when you +replace the existing static :c:type:`!PyMethodDef` structure with the macro, +*don't* add a comma to the end. +Sample:: -16. Argument Clinic may generate new instances of ``_Py_ID``. For example:: + static struct PyMethodDef Pickler_methods[] = { + __PICKLE_PICKLER_DUMP_METHODDEF + __PICKLE_PICKLER_CLEAR_MEMO_METHODDEF + {NULL, NULL} /* sentinel */ + }; - &_Py_ID(new_unique_py_id) +Argument Clinic may generate new instances of ``_Py_ID``. For example:: - If it does, you'll have to run ``Tools/scripts/generate_global_objects.py`` - to regenerate the list of precompiled identifiers at this point. + &_Py_ID(new_unique_py_id) +If it does, you'll have to run ``make regen-global-objects`` +to regenerate the list of precompiled identifiers at this point. -17. Compile, then run the relevant portions of the regression-test suite. - This change should not introduce any new compile-time warnings or errors, - and there should be no externally visible change to Python's behavior. +Finally, compile, then run the relevant portions of the regression-test suite. +This change should not introduce any new compile-time warnings or errors, +and there should be no externally visible change to Python's behavior, +except for one difference: :py:func:`inspect.signature` run on your function +should now provide a valid signature! - Well, except for one difference: ``inspect.signature()`` run on your function - should now provide a valid signature! +Congratulations, you've ported your first function to work with Argument Clinic! - Congratulations, you've ported your first function to work with Argument Clinic! +.. _clinic-howtos: How-to guides ============= -How to to rename C functions and variables generated by Argument Clinic ------------------------------------------------------------------------ +How to rename C functions and variables generated by Argument Clinic +-------------------------------------------------------------------- Argument Clinic automatically names the functions it generates for you. Occasionally this may cause a problem, if the generated name collides with @@ -576,15 +691,15 @@ Argument Clinic will use that function name for the base (generated) function, then add ``"_impl"`` to the end and use that for the name of the impl function. For example, if we wanted to rename the C function names generated for -``pickle.Pickler.dump``, it'd look like this:: +:py:meth:`pickle.Pickler.dump`, it'd look like this:: /*[clinic input] pickle.Pickler.dump as pickler_dumper ... -The base function would now be named ``pickler_dumper()``, -and the impl function would now be named ``pickler_dumper_impl()``. +The base function would now be named :c:func:`!pickler_dumper`, +and the impl function would now be named :c:func:`!pickler_dumper_impl`. Similarly, you may have a problem where you want to give a parameter @@ -602,9 +717,9 @@ using the same ``"as"`` syntax:: fix_imports: bool = True Here, the name used in Python (in the signature and the ``keywords`` -array) would be ``file``, but the C variable would be named ``file_obj``. +array) would be *file*, but the C variable would be named ``file_obj``. -You can use this to rename the ``self`` parameter too! +You can use this to rename the *self* parameter too! How to convert functions using ``PyArg_UnpackTuple`` @@ -612,7 +727,7 @@ How to convert functions using ``PyArg_UnpackTuple`` To convert a function parsing its arguments with :c:func:`PyArg_UnpackTuple`, simply write out all the arguments, specifying each as an ``object``. You -may specify the ``type`` argument to cast the type as appropriate. All +may specify the *type* argument to cast the type as appropriate. All arguments should be marked positional-only (add a ``/`` on a line by itself after the last argument). @@ -631,16 +746,16 @@ keyword-only arguments.) This approach was used to simulate optional arguments back before :c:func:`PyArg_ParseTupleAndKeywords` was created. While functions using this approach can often be converted to -use :c:func:`PyArg_ParseTupleAndKeywords`, optional arguments, and default values, +use :c:func:`!PyArg_ParseTupleAndKeywords`, optional arguments, and default values, it's not always possible. Some of these legacy functions have -behaviors :c:func:`PyArg_ParseTupleAndKeywords` doesn't directly support. -The most obvious example is the builtin function ``range()``, which has +behaviors :c:func:`!PyArg_ParseTupleAndKeywords` doesn't directly support. +The most obvious example is the builtin function :py:func:`range`, which has an optional argument on the *left* side of its required argument! -Another example is ``curses.window.addch()``, which has a group of two +Another example is :py:meth:`curses.window.addch`, which has a group of two arguments that must always be specified together. (The arguments are -called ``x`` and ``y``; if you call the function passing in ``x``, -you must also pass in ``y``—and if you don't pass in ``x`` you may not -pass in ``y`` either.) +called *x* and *y*; if you call the function passing in *x*, +you must also pass in *y* — and if you don't pass in *x* you may not +pass in *y* either.) In any case, the goal of Argument Clinic is to support argument parsing for all existing CPython builtins without changing their semantics. @@ -661,7 +776,7 @@ can *only* be used with positional-only parameters. To specify an optional group, add a ``[`` on a line by itself before the parameters you wish to group together, and a ``]`` on a line by itself -after these parameters. As an example, here's how ``curses.window.addch`` +after these parameters. As an example, here's how :py:meth:`curses.window.addch` uses optional groups to make the first two parameters and the last parameter optional:: @@ -747,25 +862,25 @@ the same converters. All arguments to Argument Clinic converters are keyword-only. All Argument Clinic converters accept the following arguments: - ``c_default`` + *c_default* The default value for this parameter when defined in C. Specifically, this will be the initializer for the variable declared in the "parse function". See :ref:`the section on default values ` for how to use this. Specified as a string. - ``annotation`` + *annotation* The annotation value for this parameter. Not currently supported, because :pep:`8` mandates that the Python library may not use annotations. - ``unused`` + *unused* Wrap the argument with :c:macro:`Py_UNUSED` in the impl function signature. In addition, some converters accept additional arguments. Here is a list of these arguments, along with their meanings: - ``accept`` + *accept* A set of Python types (and possibly pseudo-types); this restricts the allowable Python argument to values of these types. (This is not a general-purpose facility; as a rule it only supports @@ -773,38 +888,38 @@ of these arguments, along with their meanings: To accept ``None``, add ``NoneType`` to this set. - ``bitwise`` + *bitwise* Only supported for unsigned integers. The native integer value of this Python argument will be written to the parameter without any range checking, even for negative values. - ``converter`` + *converter* Only supported by the ``object`` converter. Specifies the name of a :ref:`C "converter function" ` to use to convert this object to a native type. - ``encoding`` + *encoding* Only supported for strings. Specifies the encoding to use when converting this string from a Python str (Unicode) value into a C ``char *`` value. - ``subclass_of`` + *subclass_of* Only supported for the ``object`` converter. Requires that the Python value be a subclass of a Python type, as expressed in C. - ``type`` + *type* Only supported for the ``object`` and ``self`` converters. Specifies the C type that will be used to declare the variable. Default value is ``"PyObject *"``. - ``zeroes`` + *zeroes* Only supported for strings. If true, embedded NUL bytes (``'\\0'``) are permitted inside the value. The length of the string will be passed in to the impl function, just after the string parameter, as a parameter named ``_length``. Please note, not every possible combination of arguments will work. -Usually these arguments are implemented by specific ``PyArg_ParseTuple`` +Usually these arguments are implemented by specific :c:func:`PyArg_ParseTuple` *format units*, with specific behavior. For example, currently you cannot call ``unsigned_short`` without also specifying ``bitwise=True``. Although it's perfectly reasonable to think this would work, these semantics don't @@ -893,6 +1008,8 @@ you *must* not call :c:func:`PyBuffer_Release` on the provided buffer. Argument Clinic generates code that does it for you (in the parsing function). +.. _clinic-howto-advanced-converters: + How to use advanced converters ------------------------------ @@ -904,25 +1021,26 @@ conversion functions, or types, or strings specifying an encoding. (But "legacy converters" don't support arguments. That's why we skipped them for your first function.) The argument you specified to the format unit is now an argument to the converter; this -argument is either ``converter`` (for ``O&``), ``subclass_of`` (for ``O!``), -or ``encoding`` (for all the format units that start with ``e``). +argument is either *converter* (for ``O&``), *subclass_of* (for ``O!``), +or *encoding* (for all the format units that start with ``e``). -When using ``subclass_of``, you may also want to use the other -custom argument for ``object()``: ``type``, which lets you set the type +When using *subclass_of*, you may also want to use the other +custom argument for ``object()``: *type*, which lets you set the type actually used for the parameter. For example, if you want to ensure -that the object is a subclass of ``PyUnicode_Type``, you probably want +that the object is a subclass of :c:var:`PyUnicode_Type`, you probably want to use the converter ``object(type='PyUnicodeObject *', subclass_of='&PyUnicode_Type')``. One possible problem with using Argument Clinic: it takes away some possible flexibility for the format units starting with ``e``. When writing a -``PyArg_Parse`` call by hand, you could theoretically decide at runtime what -encoding string to pass in to :c:func:`PyArg_ParseTuple`. But now this string must +:c:func:`!PyArg_Parse*` call by hand, you could theoretically decide at runtime what +encoding string to pass to that call. But now this string must be hard-coded at Argument-Clinic-preprocessing-time. This limitation is deliberate; it made supporting this format unit much easier, and may allow for future optimizations. This restriction doesn't seem unreasonable; CPython itself always passes in static hard-coded encoding strings for parameters whose format units start with ``e``. +.. _clinic-howto-default-values: .. _default_values: How to assign default values to parameter @@ -969,7 +1087,7 @@ expression. Currently the following are explicitly supported: * Numeric constants (integer and float) * String constants * ``True``, ``False``, and ``None`` -* Simple symbolic constants like ``sys.maxsize``, which must +* Simple symbolic constants like :py:data:`sys.maxsize`, which must start with the name of the module (In the future, this may need to get even more elaborate, @@ -990,28 +1108,28 @@ Consider the following example: foo: Py_ssize_t = sys.maxsize - 1 -``sys.maxsize`` can have different values on different platforms. Therefore +:py:data:`sys.maxsize` can have different values on different platforms. Therefore Argument Clinic can't simply evaluate that expression locally and hard-code it in C. So it stores the default in such a way that it will get evaluated at runtime, when the user asks for the function's signature. What namespace is available when the expression is evaluated? It's evaluated in the context of the module the builtin came from. So, if your module has an -attribute called "``max_widgets``", you may simply use it: +attribute called :py:attr:`!max_widgets`, you may simply use it: .. code-block:: none foo: Py_ssize_t = max_widgets If the symbol isn't found in the current module, it fails over to looking in -``sys.modules``. That's how it can find ``sys.maxsize`` for example. (Since you -don't know in advance what modules the user will load into their interpreter, +:py:data:`sys.modules`. That's how it can find :py:data:`sys.maxsize` for example. +(Since you don't know in advance what modules the user will load into their interpreter, it's best to restrict yourself to modules that are preloaded by Python itself.) Evaluating default values only at runtime means Argument Clinic can't compute the correct equivalent C default value. So you need to tell it explicitly. When you use an expression, you must also specify the equivalent expression -in C, using the ``c_default`` parameter to the converter: +in C, using the *c_default* parameter to the converter: .. code-block:: none @@ -1033,6 +1151,8 @@ you're not permitted to use: * Tuple/list/set/dict literals. +.. _clinic-howto-return-converters: + How to use return converters ---------------------------- @@ -1077,7 +1197,7 @@ indicate an error has occurred? Normally, a function returns a valid (non-``NUL pointer for success, and ``NULL`` for failure. But if you use an integer return converter, all integers are valid. How can Argument Clinic detect an error? Its solution: each return converter implicitly looks for a special value that indicates an error. If you return -that value, and an error has been set (``PyErr_Occurred()`` returns a true +that value, and an error has been set (c:func:`PyErr_Occurred` returns a true value), then the generated code will propagate the error. Otherwise it will encode the value you return like normal. @@ -1175,6 +1295,8 @@ variable to the C code:: /*[python checksum:...]*/ +.. _clinic-howto-self-converter: + How to use the "self converter" ------------------------------- @@ -1183,9 +1305,9 @@ using a default converter. It automatically sets the ``type`` of this parameter to the "pointer to an instance" you specified when you declared the type. However, you can override Argument Clinic's converter and specify one yourself. -Just add your own ``self`` parameter as the first parameter in a +Just add your own *self* parameter as the first parameter in a block, and ensure that its converter is an instance of -``self_converter`` or a subclass thereof. +:class:`!self_converter` or a subclass thereof. What's the point? This lets you override the type of ``self``, or give it a different default name. @@ -1193,7 +1315,7 @@ or give it a different default name. How do you specify the custom type you want to cast ``self`` to? If you only have one or two functions with the same type for ``self``, you can directly use Argument Clinic's existing ``self`` converter, -passing in the type you want to use as the ``type`` parameter:: +passing in the type you want to use as the *type* parameter:: /*[clinic input] @@ -1208,7 +1330,7 @@ passing in the type you want to use as the ``type`` parameter:: On the other hand, if you have a lot of functions that will use the same type for ``self``, it's best to create your own converter, subclassing -``self_converter`` but overwriting the ``type`` member:: +:class:`!self_converter` but overwriting the :py:attr:`!type` member:: /*[python input] class PicklerObject_converter(self_converter): @@ -1236,8 +1358,8 @@ module level state. Use :c:func:`PyType_FromModuleAndSpec` to associate a new heap type with a module. You can now use :c:func:`PyType_GetModuleState` on the defining class to fetch the module state, for example from a module method. -Example from ``Modules/zlibmodule.c``. First, ``defining_class`` is added to -the clinic input:: +Example from :source:`Modules/zlibmodule.c`. +First, ``defining_class`` is added to the clinic input:: /*[clinic input] zlib.Compress.compress @@ -1267,16 +1389,17 @@ module state:: Each method may only have one argument using this converter, and it must appear after ``self``, or, if ``self`` is not used, as the first argument. The argument will be of type ``PyTypeObject *``. The argument will not appear in the -``__text_signature__``. +:py:attr:`!__text_signature__`. -The ``defining_class`` converter is not compatible with ``__init__`` and ``__new__`` -methods, which cannot use the ``METH_METHOD`` convention. +The ``defining_class`` converter is not compatible with :py:meth:`!__init__` +and :py:meth:`!__new__` methods, which cannot use the :c:macro:`METH_METHOD` +convention. It is not possible to use ``defining_class`` with slot methods. In order to fetch the module state from such methods, use :c:func:`PyType_GetModuleByDef` to look up the module and then :c:func:`PyModule_GetState` to fetch the module state. Example from the ``setattro`` slot method in -``Modules/_threadmodule.c``:: +:source:`Modules/_threadmodule.c`:: static int local_setattro(localobject *self, PyObject *name, PyObject *v) @@ -1290,76 +1413,31 @@ state. Example from the ``setattro`` slot method in See also :pep:`573`. +.. _clinic-howto-custom-converter: + How to write a custom converter ------------------------------- -As we hinted at in the previous section... you can write your own converters! -A converter is simply a Python class that inherits from ``CConverter``. -The main purpose of a custom converter is if you have a parameter using -the ``O&`` format unit—parsing this parameter means calling +A converter is a Python class that inherits from :py:class:`CConverter`. +The main purpose of a custom converter, is for parameters parsed with +the ``O&`` format unit --- parsing such a parameter means calling a :c:func:`PyArg_ParseTuple` "converter function". -Your converter class should be named ``*something*_converter``. -If the name follows this convention, then your converter class -will be automatically registered with Argument Clinic; its name -will be the name of your class with the ``_converter`` suffix -stripped off. (This is accomplished with a metaclass.) - -You shouldn't subclass ``CConverter.__init__``. Instead, you should -write a ``converter_init()`` function. ``converter_init()`` -always accepts a ``self`` parameter; after that, all additional -parameters *must* be keyword-only. Any arguments passed in to -the converter in Argument Clinic will be passed along to your -``converter_init()``. - -There are some additional members of ``CConverter`` you may wish -to specify in your subclass. Here's the current list: - -``type`` - The C type to use for this variable. - ``type`` should be a Python string specifying the type, e.g. ``int``. - If this is a pointer type, the type string should end with ``' *'``. - -``default`` - The Python default value for this parameter, as a Python value. - Or the magic value ``unspecified`` if there is no default. - -``py_default`` - ``default`` as it should appear in Python code, - as a string. - Or ``None`` if there is no default. - -``c_default`` - ``default`` as it should appear in C code, - as a string. - Or ``None`` if there is no default. - -``c_ignored_default`` - The default value used to initialize the C variable when - there is no default, but not specifying a default may - result in an "uninitialized variable" warning. This can - easily happen when using option groups—although - properly written code will never actually use this value, - the variable does get passed in to the impl, and the - C compiler will complain about the "use" of the - uninitialized value. This value should always be a - non-empty string. - -``converter`` - The name of the C converter function, as a string. - -``impl_by_reference`` - A boolean value. If true, - Argument Clinic will add a ``&`` in front of the name of - the variable when passing it into the impl function. - -``parse_by_reference`` - A boolean value. If true, - Argument Clinic will add a ``&`` in front of the name of - the variable when passing it into :c:func:`PyArg_ParseTuple`. - - -Here's the simplest example of a custom converter, from ``Modules/zlibmodule.c``:: +Your converter class should be named :samp:`{ConverterName}_converter`. +By following this convention, your converter class will be automatically +registered with Argument Clinic, with its *converter name* being the name of +your converter class with the ``_converter`` suffix stripped off. + +Instead of subclassing :py:meth:`!CConverter.__init__`, +write a :py:meth:`!converter_init` method. +:py:meth:`!converter_init` always accepts a *self* parameter. +After *self*, all additional parameters **must** be keyword-only. +Any arguments passed to the converter in Argument Clinic +will be passed along to your :py:meth:`!converter_init` method. +See :py:class:`CConverter` for a list of members you may wish to specify in +your subclass. + +Here's the simplest example of a custom converter, from :source:`Modules/zlibmodule.c`:: /*[python input] @@ -1370,11 +1448,11 @@ Here's the simplest example of a custom converter, from ``Modules/zlibmodule.c`` [python start generated code]*/ /*[python end generated code: output=da39a3ee5e6b4b0d input=35521e4e733823c7]*/ -This block adds a converter to Argument Clinic named ``ssize_t``. Parameters -declared as ``ssize_t`` will be declared as type :c:type:`Py_ssize_t`, and will -be parsed by the ``'O&'`` format unit, which will call the -``ssize_t_converter`` converter function. ``ssize_t`` variables -automatically support default values. +This block adds a converter named ``ssize_t`` to Argument Clinic. +Parameters declared as ``ssize_t`` will be declared with type :c:type:`Py_ssize_t`, +and will be parsed by the ``'O&'`` format unit, +which will call the :c:func:`!ssize_t_converter` converter C function. +``ssize_t`` variables automatically support default values. More sophisticated custom converters can insert custom C code to handle initialization and cleanup. @@ -1389,18 +1467,18 @@ Writing a custom return converter is much like writing a custom converter. Except it's somewhat simpler, because return converters are themselves much simpler. -Return converters must subclass ``CReturnConverter``. +Return converters must subclass :py:class:`!CReturnConverter`. There are no examples yet of custom return converters, because they are not widely used yet. If you wish to -write your own return converter, please read ``Tools/clinic/clinic.py``, -specifically the implementation of ``CReturnConverter`` and +write your own return converter, please read :source:`Tools/clinic/clinic.py`, +specifically the implementation of :py:class:`!CReturnConverter` and all its subclasses. How to convert ``METH_O`` and ``METH_NOARGS`` functions ------------------------------------------------------- -To convert a function using ``METH_O``, make sure the function's +To convert a function using :c:macro:`METH_O`, make sure the function's single argument is using the ``object`` converter, and mark the arguments as positional-only:: @@ -1412,24 +1490,25 @@ arguments as positional-only:: [clinic start generated code]*/ -To convert a function using ``METH_NOARGS``, just don't specify +To convert a function using :c:macro:`METH_NOARGS`, just don't specify any arguments. You can still use a self converter, a return converter, and specify -a ``type`` argument to the object converter for ``METH_O``. +a *type* argument to the object converter for :c:macro:`METH_O`. How to convert ``tp_new`` and ``tp_init`` functions --------------------------------------------------- -You can convert ``tp_new`` and ``tp_init`` functions. Just name -them ``__new__`` or ``__init__`` as appropriate. Notes: +You can convert :c:member:`~PyTypeObject.tp_new` and +:c:member:`~PyTypeObject.tp_init` functions. +Just name them ``__new__`` or ``__init__`` as appropriate. Notes: * The function name generated for ``__new__`` doesn't end in ``__new__`` like it would by default. It's just the name of the class, converted into a valid C identifier. -* No ``PyMethodDef`` ``#define`` is generated for these functions. +* No :c:type:`PyMethodDef` ``#define`` is generated for these functions. * ``__init__`` functions return ``int``, not ``PyObject *``. @@ -1464,7 +1543,7 @@ Let's start with defining some terminology: *field* A field, in this context, is a subsection of Clinic's output. - For example, the ``#define`` for the ``PyMethodDef`` structure + For example, the ``#define`` for the :c:type:`PyMethodDef` structure is a field, called ``methoddef_define``. Clinic has seven different fields it can output per function definition: @@ -1508,8 +1587,8 @@ Let's start with defining some terminology: The filename chosen for the file is ``{basename}.clinic{extension}``, where ``basename`` and ``extension`` were assigned the output from ``os.path.splitext()`` run on the current file. (Example: - the ``file`` destination for ``_pickle.c`` would be written to - ``_pickle.clinic.c``.) + the ``file`` destination for :file:`_pickle.c` would be written to + :file:`_pickle.clinic.c`.) **Important: When using a** ``file`` **destination, you** *must check in* **the generated file!** @@ -1762,7 +1841,7 @@ like so:: } #endif /* HAVE_FUNCTIONNAME */ -Then, remove those three lines from the ``PyMethodDef`` structure, +Then, remove those three lines from the :c:type:`PyMethodDef` structure, replacing them with the macro Argument Clinic generated: .. code-block:: none @@ -1803,7 +1882,7 @@ This may mean that you get a complaint from Argument Clinic: When this happens, just open your file, find the ``dump buffer`` block that Argument Clinic added to your file (it'll be at the very bottom), then -move it above the ``PyMethodDef`` structure where that macro is used. +move it above the :c:type:`PyMethodDef` structure where that macro is used. How to use Argument Clinic in Python files @@ -1824,3 +1903,126 @@ blocks embedded in Python files look slightly different. They look like this: #[python start generated code]*/ def foo(): pass #/*[python checksum:...]*/ + + +.. _clinic-howto-override-signature: + +How to override the generated signature +--------------------------------------- + +You can use the ``@text_signature`` directive to override the default generated +signature in the docstring. +This can be useful for complex signatures that Argument Clinic cannot handle. +The ``@text_signature`` directive takes one argument: +the custom signature as a string. +The provided signature is copied verbatim to the generated docstring. + +Example from :source:`Objects/codeobject.c`:: + + /*[clinic input] + @text_signature "($self, /, **changes)" + code.replace + * + co_argcount: int(c_default="self->co_argcount") = unchanged + co_posonlyargcount: int(c_default="self->co_posonlyargcount") = unchanged + # etc ... + + Return a copy of the code object with new values for the specified fields. + [clinic start generated output]*/ + +The generated docstring ends up looking like this: + +.. code-block:: none + + replace($self, /, **changes) + -- + + Return a copy of the code object with new values for the specified fields. + + +.. _clinic-howto-deprecate-positional: + +How to deprecate passing parameters positionally +------------------------------------------------ + +Argument Clinic provides syntax that makes it possible to generate code that +deprecates passing :term:`arguments ` positionally. +For example, say we've got a module-level function :py:func:`!foo.myfunc` +that has three :term:`parameters `: +positional-or-keyword parameters *a* and *b*, and a keyword-only parameter *c*:: + + /*[clinic input] + module foo + myfunc + a: int + b: int + * + c: int + [clinic start generated output]*/ + +We now want to make the *b* parameter keyword-only; +however, we'll have to wait two releases before making this change, +as mandated by Python's backwards-compatibility policy (see :pep:`387`). +For this example, imagine we're in the development phase for Python 3.12: +that means we'll be allowed to introduce deprecation warnings in Python 3.12 +whenever the *b* parameter is passed positionally, +and we'll be allowed to make it keyword-only in Python 3.14 at the earliest. + +We can use Argument Clinic to emit the desired deprecation warnings +using the ``* [from ...]`` syntax, +by adding the line ``* [from 3.14]`` right above the *b* parameter:: + + /*[clinic input] + module foo + myfunc + a: int + * [from 3.14] + b: int + * + c: int + [clinic start generated output]*/ + +Next, regenerate Argument Clinic code (``make clinic``), +and add unit tests for the new behaviour. + +The generated code will now emit a :exc:`DeprecationWarning` +when an :term:`argument` for the :term:`parameter` *b* is passed positionally. +C preprocessor directives are also generated for emitting +compiler warnings if the ``* [from ...]`` line has not been removed +from the Argument Clinic input when the deprecation period is over, +which means when the alpha phase of the specified Python version kicks in. + +Let's return to our example and skip ahead two years: +Python 3.14 development has now entered the alpha phase, +but we forgot all about updating the Argument Clinic code +for :py:func:`!myfunc`! +Luckily for us, compiler warnings are now generated: + +.. code-block:: none + + In file included from Modules/foomodule.c:139: + Modules/clinic/foomodule.c.h:139:8: warning: In 'foomodule.c', update parameter(s) 'a' and 'b' in the clinic input of 'mymod.myfunc' to be keyword-only. [-W#warnings] + # warning "In 'foomodule.c', update parameter(s) 'a' and 'b' in the clinic input of 'mymod.myfunc' to be keyword-only. [-W#warnings]" + ^ + +We now close the deprecation phase by making *b* keyword-only; +replace the ``* [from ...]`` line above *b* +with the ``*`` from the line above *c*:: + + /*[clinic input] + module foo + myfunc + a: int + * + b: int + c: int + [clinic start generated output]*/ + +Finally, run ``make clinic`` to regenerate the Argument Clinic code, +and update your unit tests to reflect the new behaviour. + +.. note:: + + If you forget to update your input block during the alpha and beta phases, + the compiler warning will turn into a compiler error when the + release candidate phase begins. diff --git a/Doc/howto/curses.rst b/Doc/howto/curses.rst index a3068d86d85bc4..4828e2fa29bd24 100644 --- a/Doc/howto/curses.rst +++ b/Doc/howto/curses.rst @@ -527,7 +527,7 @@ If you're in doubt about the detailed behavior of the curses functions, consult the manual pages for your curses implementation, whether it's ncurses or a proprietary Unix vendor's. The manual pages will document any quirks, and provide complete lists of all the -functions, attributes, and :const:`ACS_\*` characters available to +functions, attributes, and :ref:`ACS_\* ` characters available to you. Because the curses API is so large, some functions aren't supported in diff --git a/Doc/howto/descriptor.rst b/Doc/howto/descriptor.rst index 3688c47f0d6ec9..1d9424cb735a46 100644 --- a/Doc/howto/descriptor.rst +++ b/Doc/howto/descriptor.rst @@ -779,8 +779,8 @@ by a search through the class's :term:`method resolution order`. If a descriptor is found, it is invoked with ``desc.__get__(None, A)``. -The full C implementation can be found in :c:func:`type_getattro()` and -:c:func:`_PyType_Lookup()` in :source:`Objects/typeobject.c`. +The full C implementation can be found in :c:func:`!type_getattro` and +:c:func:`!_PyType_Lookup` in :source:`Objects/typeobject.c`. Invocation from super @@ -794,7 +794,7 @@ for the base class ``B`` immediately following ``A`` and then returns ``B.__dict__['m'].__get__(obj, A)``. If not a descriptor, ``m`` is returned unchanged. -The full C implementation can be found in :c:func:`super_getattro()` in +The full C implementation can be found in :c:func:`!super_getattro` in :source:`Objects/typeobject.c`. A pure Python equivalent can be found in `Guido's Tutorial `_. @@ -836,8 +836,8 @@ and if they define :meth:`__set_name__`, that method is called with two arguments. The *owner* is the class where the descriptor is used, and the *name* is the class variable the descriptor was assigned to. -The implementation details are in :c:func:`type_new()` and -:c:func:`set_names()` in :source:`Objects/typeobject.c`. +The implementation details are in :c:func:`!type_new` and +:c:func:`!set_names` in :source:`Objects/typeobject.c`. Since the update logic is in :meth:`type.__new__`, notifications only take place at the time of class creation. If descriptors are added to the class diff --git a/Doc/howto/functional.rst b/Doc/howto/functional.rst index 5cf12cc52bde4e..b0f9d22d74f0e3 100644 --- a/Doc/howto/functional.rst +++ b/Doc/howto/functional.rst @@ -1072,8 +1072,8 @@ write the obvious :keyword:`for` loop:: A related function is :func:`itertools.accumulate(iterable, func=operator.add) `. It performs the same calculation, but instead of -returning only the final result, :func:`accumulate` returns an iterator that -also yields each partial result:: +returning only the final result, :func:`~itertools.accumulate` returns an iterator +that also yields each partial result:: itertools.accumulate([1, 2, 3, 4, 5]) => 1, 3, 6, 10, 15 diff --git a/Doc/howto/instrumentation.rst b/Doc/howto/instrumentation.rst index 4ce15c69dac90b..875f846aad0051 100644 --- a/Doc/howto/instrumentation.rst +++ b/Doc/howto/instrumentation.rst @@ -292,11 +292,11 @@ Available static markers .. object:: function__return(str filename, str funcname, int lineno) - This marker is the converse of :c:func:`function__entry`, and indicates that + This marker is the converse of :c:func:`!function__entry`, and indicates that execution of a Python function has ended (either via ``return``, or via an exception). It is only triggered for pure-Python (bytecode) functions. - The arguments are the same as for :c:func:`function__entry` + The arguments are the same as for :c:func:`!function__entry` .. object:: line(str filename, str funcname, int lineno) @@ -304,7 +304,7 @@ Available static markers the equivalent of line-by-line tracing with a Python profiler. It is not triggered within C functions. - The arguments are the same as for :c:func:`function__entry`. + The arguments are the same as for :c:func:`!function__entry`. .. object:: gc__start(int generation) diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst index 8adb85f3a87401..2551fbe87b5c2a 100644 --- a/Doc/howto/isolating-extensions.rst +++ b/Doc/howto/isolating-extensions.rst @@ -1,5 +1,7 @@ .. highlight:: c +.. _isolating-extensions-howto: + *************************** Isolating Extension Modules *************************** @@ -298,10 +300,10 @@ Watch out for the following two points in particular (but note that this is not a comprehensive list): * Unlike static types, heap type objects are mutable by default. - Use the :c:data:`Py_TPFLAGS_IMMUTABLETYPE` flag to prevent mutability. + Use the :c:macro:`Py_TPFLAGS_IMMUTABLETYPE` flag to prevent mutability. * Heap types inherit :c:member:`~PyTypeObject.tp_new` by default, so it may become possible to instantiate them from Python code. - You can prevent this with the :c:data:`Py_TPFLAGS_DISALLOW_INSTANTIATION` flag. + You can prevent this with the :c:macro:`Py_TPFLAGS_DISALLOW_INSTANTIATION` flag. Defining Heap Types @@ -333,12 +335,12 @@ To avoid memory leaks, instances of heap types must implement the garbage collection protocol. That is, heap types should: -- Have the :c:data:`Py_TPFLAGS_HAVE_GC` flag. +- Have the :c:macro:`Py_TPFLAGS_HAVE_GC` flag. - Define a traverse function using ``Py_tp_traverse``, which visits the type (e.g. using :c:expr:`Py_VISIT(Py_TYPE(self))`). Please refer to the :ref:`the documentation ` of -:c:data:`Py_TPFLAGS_HAVE_GC` and :c:member:`~PyTypeObject.tp_traverse` +:c:macro:`Py_TPFLAGS_HAVE_GC` and :c:member:`~PyTypeObject.tp_traverse` for additional considerations. If your traverse function delegates to the ``tp_traverse`` of its base class @@ -411,7 +413,7 @@ that subclass, which may be defined in different module than yours. pass For a method to get its "defining class", it must use the -:data:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS` +:ref:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS ` :c:type:`calling convention ` and the corresponding :c:type:`PyCMethod` signature:: @@ -467,7 +469,7 @@ Module State Access from Slot Methods, Getters and Setters Slot methods—the fast C equivalents for special methods, such as :c:member:`~PyNumberMethods.nb_add` for :py:attr:`~object.__add__` or -:c:member:`~PyType.tp_new` for initialization—have a very simple API that +:c:member:`~PyTypeObject.tp_new` for initialization—have a very simple API that doesn't allow passing in the defining class, unlike with :c:type:`PyCMethod`. The same goes for getters and setters defined with :c:type:`PyGetSetDef`. @@ -483,14 +485,14 @@ to get the state:: return NULL; } -``PyType_GetModuleByDef`` works by searching the +:c:func:`!PyType_GetModuleByDef` works by searching the :term:`method resolution order` (i.e. all superclasses) for the first superclass that has a corresponding module. .. note:: In very exotic cases (inheritance chains spanning multiple modules - created from the same definition), ``PyType_GetModuleByDef`` might not + created from the same definition), :c:func:`!PyType_GetModuleByDef` might not return the module of the true defining class. However, it will always return a module with the same definition, ensuring a compatible C memory layout. diff --git a/Doc/howto/regex.rst b/Doc/howto/regex.rst index 655df59e27b641..c19c48301f5848 100644 --- a/Doc/howto/regex.rst +++ b/Doc/howto/regex.rst @@ -518,6 +518,8 @@ cache. Compilation Flags ----------------- +.. currentmodule:: re + Compilation flags let you modify some aspects of how regular expressions work. Flags are available in the :mod:`re` module under two names, a long name such as :const:`IGNORECASE` and a short, one-letter form such as :const:`I`. (If you're diff --git a/Doc/howto/sorting.rst b/Doc/howto/sorting.rst index decce12bf3faf6..38dd09f0a721d2 100644 --- a/Doc/howto/sorting.rst +++ b/Doc/howto/sorting.rst @@ -273,7 +273,7 @@ Odds and Ends * The sort routines use ``<`` when making comparisons between two objects. So, it is easy to add a standard sort order to a class by - defining an :meth:`__lt__` method: + defining an :meth:`~object.__lt__` method: .. doctest:: @@ -281,8 +281,8 @@ Odds and Ends >>> sorted(student_objects) [('dave', 'B', 10), ('jane', 'B', 12), ('john', 'A', 15)] - However, note that ``<`` can fall back to using :meth:`__gt__` if - :meth:`__lt__` is not implemented (see :func:`object.__lt__`). + However, note that ``<`` can fall back to using :meth:`~object.__gt__` if + :meth:`~object.__lt__` is not implemented (see :func:`object.__lt__`). * Key functions need not depend directly on the objects being sorted. A key function can also access external resources. For instance, if the student grades diff --git a/Doc/howto/unicode.rst b/Doc/howto/unicode.rst index b0faa68d240896..254fe729355353 100644 --- a/Doc/howto/unicode.rst +++ b/Doc/howto/unicode.rst @@ -424,8 +424,8 @@ lowercase letters 'ss'. A second tool is the :mod:`unicodedata` module's :func:`~unicodedata.normalize` function that converts strings to one -of several normal forms, where letters followed by a combining -character are replaced with single characters. :func:`normalize` can +of several normal forms, where letters followed by a combining character are +replaced with single characters. :func:`~unicodedata.normalize` can be used to perform string comparisons that won't falsely report inequality if two strings use combining characters differently: @@ -474,8 +474,8 @@ The Unicode Standard also specifies how to do caseless comparisons:: print(compare_caseless(single_char, multiple_chars)) -This will print ``True``. (Why is :func:`NFD` invoked twice? Because -there are a few characters that make :meth:`casefold` return a +This will print ``True``. (Why is :func:`!NFD` invoked twice? Because +there are a few characters that make :meth:`~str.casefold` return a non-normalized string, so the result needs to be normalized again. See section 3.13 of the Unicode Standard for a discussion and an example.) diff --git a/Doc/includes/custom.c b/Doc/includes/custom.c index 9cfba50ace25db..5253f879360210 100644 --- a/Doc/includes/custom.c +++ b/Doc/includes/custom.c @@ -34,9 +34,7 @@ PyInit_custom(void) if (m == NULL) return NULL; - Py_INCREF(&CustomType); - if (PyModule_AddObject(m, "Custom", (PyObject *) &CustomType) < 0) { - Py_DECREF(&CustomType); + if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) { Py_DECREF(m); return NULL; } diff --git a/Doc/includes/sublist.c b/Doc/includes/sublist.c index b36dadf07eae87..d8aba463f30ba2 100644 --- a/Doc/includes/sublist.c +++ b/Doc/includes/sublist.c @@ -58,9 +58,7 @@ PyInit_sublist(void) if (m == NULL) return NULL; - Py_INCREF(&SubListType); - if (PyModule_AddObject(m, "SubList", (PyObject *) &SubListType) < 0) { - Py_DECREF(&SubListType); + if (PyModule_AddObjectRef(m, "SubList", (PyObject *) &SubListType) < 0) { Py_DECREF(m); return NULL; } diff --git a/Doc/includes/turtle-star.py b/Doc/includes/turtle-star.py deleted file mode 100644 index 1a5db761b32385..00000000000000 --- a/Doc/includes/turtle-star.py +++ /dev/null @@ -1,10 +0,0 @@ -from turtle import * -color('red', 'yellow') -begin_fill() -while True: - forward(200) - left(170) - if abs(pos()) < 1: - break -end_fill() -done() diff --git a/Doc/includes/typestruct.h b/Doc/includes/typestruct.h index f0ad1e47cb0d86..ec939c28831c33 100644 --- a/Doc/includes/typestruct.h +++ b/Doc/includes/typestruct.h @@ -82,5 +82,5 @@ typedef struct _typeobject { vectorcallfunc tp_vectorcall; /* bitset of which type-watchers care about this type */ - char tp_watched; + unsigned char tp_watched; } PyTypeObject; diff --git a/Doc/install/index.rst b/Doc/install/index.rst index beb34f0cf21b22..ffb4a202fe89f2 100644 --- a/Doc/install/index.rst +++ b/Doc/install/index.rst @@ -313,9 +313,9 @@ install into it. It is enabled with a simple option:: python setup.py install --user -Files will be installed into subdirectories of :data:`site.USER_BASE` (written +Files will be installed into subdirectories of :const:`site.USER_BASE` (written as :file:`{userbase}` hereafter). This scheme installs pure Python modules and -extension modules in the same location (also known as :data:`site.USER_SITE`). +extension modules in the same location (also known as :const:`site.USER_SITE`). Here are the values for UNIX, including macOS: =============== =========================================================== @@ -374,7 +374,7 @@ will expand this to your home directory:: To make Python find the distributions installed with this scheme, you may have to :ref:`modify Python's search path ` or edit -:mod:`sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit +:mod:`!sitecustomize` (see :mod:`site`) to call :func:`site.addsitedir` or edit :data:`sys.path`. The :option:`!--home` option defines the installation base directory. Files are @@ -778,7 +778,7 @@ Notes: (2) On Unix, if the :envvar:`HOME` environment variable is not defined, the user's - home directory will be determined with the :func:`getpwuid` function from the + home directory will be determined with the :func:`~pwd.getpwuid` function from the standard :mod:`pwd` module. This is done by the :func:`os.path.expanduser` function used by Distutils. diff --git a/Doc/library/__main__.rst b/Doc/library/__main__.rst index d29cbdff7830c8..fd60d92d4eb0f9 100644 --- a/Doc/library/__main__.rst +++ b/Doc/library/__main__.rst @@ -336,12 +336,12 @@ Note that importing ``__main__`` doesn't cause any issues with unintentionally running top-level code meant for script use which is put in the ``if __name__ == "__main__"`` block of the ``start`` module. Why does this work? -Python inserts an empty ``__main__`` module in :attr:`sys.modules` at +Python inserts an empty ``__main__`` module in :data:`sys.modules` at interpreter startup, and populates it by running top-level code. In our example this is the ``start`` module which runs line by line and imports ``namely``. In turn, ``namely`` imports ``__main__`` (which is really ``start``). That's an import cycle! Fortunately, since the partially populated ``__main__`` -module is present in :attr:`sys.modules`, Python passes that to ``namely``. +module is present in :data:`sys.modules`, Python passes that to ``namely``. See :ref:`Special considerations for __main__ ` in the import system's reference for details on how this works. diff --git a/Doc/library/_thread.rst b/Doc/library/_thread.rst index ba9314e46ab6ea..0442c298c137ba 100644 --- a/Doc/library/_thread.rst +++ b/Doc/library/_thread.rst @@ -70,10 +70,10 @@ This module defines the following constants and functions: there is no guarantee that the interruption will happen immediately. If given, *signum* is the number of the signal to simulate. - If *signum* is not given, :data:`signal.SIGINT` is simulated. + If *signum* is not given, :const:`signal.SIGINT` is simulated. If the given signal isn't handled by Python (it was set to - :data:`signal.SIG_DFL` or :data:`signal.SIG_IGN`), this function does + :const:`signal.SIG_DFL` or :const:`signal.SIG_IGN`), this function does nothing. .. versionchanged:: 3.10 @@ -150,8 +150,8 @@ This module defines the following constants and functions: .. data:: TIMEOUT_MAX The maximum value allowed for the *timeout* parameter of - :meth:`Lock.acquire`. Specifying a timeout greater than this value will - raise an :exc:`OverflowError`. + :meth:`Lock.acquire `. Specifying a timeout greater + than this value will raise an :exc:`OverflowError`. .. versionadded:: 3.2 @@ -217,8 +217,9 @@ In addition to these methods, lock objects can also be used via the * Calling :func:`sys.exit` or raising the :exc:`SystemExit` exception is equivalent to calling :func:`_thread.exit`. -* It is not possible to interrupt the :meth:`acquire` method on a lock --- the - :exc:`KeyboardInterrupt` exception will happen after the lock has been acquired. +* It is not possible to interrupt the :meth:`~threading.Lock.acquire` method on + a lock --- the :exc:`KeyboardInterrupt` exception will happen after the lock + has been acquired. * When the main thread exits, it is system defined whether the other threads survive. On most systems, they are killed without executing diff --git a/Doc/library/array.rst b/Doc/library/array.rst index 0afc217642a756..ad622627724217 100644 --- a/Doc/library/array.rst +++ b/Doc/library/array.rst @@ -53,9 +53,9 @@ Notes: It can be 16 bits or 32 bits depending on the platform. .. versionchanged:: 3.9 - ``array('u')`` now uses ``wchar_t`` as C type instead of deprecated + ``array('u')`` now uses :c:type:`wchar_t` as C type instead of deprecated ``Py_UNICODE``. This change doesn't affect its behavior because - ``Py_UNICODE`` is alias of ``wchar_t`` since Python 3.3. + ``Py_UNICODE`` is alias of :c:type:`wchar_t` since Python 3.3. .. deprecated-removed:: 3.3 3.16 Please migrate to ``'w'`` typecode. diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index 530cf30643687f..cd657aedf6d23d 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -2146,7 +2146,7 @@ and classes for traversing abstract syntax trees: Currently ``major`` must equal to ``3``. For example, setting ``feature_version=(3, 4)`` will allow the use of ``async`` and ``await`` as variable names. The lowest supported version is - ``(3, 4)``; the highest is ``sys.version_info[0:2]``. + ``(3, 7)``; the highest is ``sys.version_info[0:2]``. If source contains a null character ('\0'), :exc:`ValueError` is raised. @@ -2169,6 +2169,9 @@ and classes for traversing abstract syntax trees: .. versionchanged:: 3.8 Added ``type_comments``, ``mode='func_type'`` and ``feature_version``. + .. versionchanged:: 3.13 + The minimum supported version for feature_version is now (3,7) + .. function:: unparse(ast_obj) diff --git a/Doc/library/asyncio-dev.rst b/Doc/library/asyncio-dev.rst index 921a394a59fec7..c7d97008fb490e 100644 --- a/Doc/library/asyncio-dev.rst +++ b/Doc/library/asyncio-dev.rst @@ -34,7 +34,7 @@ There are several ways to enable asyncio debug mode: In addition to enabling the debug mode, consider also: * setting the log level of the :ref:`asyncio logger ` to - :py:data:`logging.DEBUG`, for example the following snippet of code + :py:const:`logging.DEBUG`, for example the following snippet of code can be run at startup of the application:: logging.basicConfig(level=logging.DEBUG) @@ -142,7 +142,7 @@ Logging asyncio uses the :mod:`logging` module and all logging is performed via the ``"asyncio"`` logger. -The default log level is :py:data:`logging.INFO`, which can be easily +The default log level is :py:const:`logging.INFO`, which can be easily adjusted:: logging.getLogger("asyncio").setLevel(logging.WARNING) diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 38f2e2f510c176..8f2d8f336c82bb 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -403,11 +403,11 @@ Opening network connections Open a streaming transport connection to a given address specified by *host* and *port*. - The socket family can be either :py:data:`~socket.AF_INET` or - :py:data:`~socket.AF_INET6` depending on *host* (or the *family* + The socket family can be either :py:const:`~socket.AF_INET` or + :py:const:`~socket.AF_INET6` depending on *host* (or the *family* argument, if provided). - The socket type will be :py:data:`~socket.SOCK_STREAM`. + The socket type will be :py:const:`~socket.SOCK_STREAM`. *protocol_factory* must be a callable returning an :ref:`asyncio protocol ` implementation. @@ -509,7 +509,7 @@ Opening network connections .. versionchanged:: 3.6 - The socket option :py:data:`~socket.TCP_NODELAY` is set by default + The socket option :py:const:`~socket.TCP_NODELAY` is set by default for all TCP connections. .. versionchanged:: 3.7 @@ -552,11 +552,11 @@ Opening network connections Create a datagram connection. - The socket family can be either :py:data:`~socket.AF_INET`, - :py:data:`~socket.AF_INET6`, or :py:data:`~socket.AF_UNIX`, + The socket family can be either :py:const:`~socket.AF_INET`, + :py:const:`~socket.AF_INET6`, or :py:const:`~socket.AF_UNIX`, depending on *host* (or the *family* argument, if provided). - The socket type will be :py:data:`~socket.SOCK_DGRAM`. + The socket type will be :py:const:`~socket.SOCK_DGRAM`. *protocol_factory* must be a callable returning a :ref:`protocol ` implementation. @@ -581,7 +581,7 @@ Opening network connections * *reuse_port* tells the kernel to allow this endpoint to be bound to the same port as other existing endpoints are bound to, so long as they all set this flag when being created. This option is not supported on Windows - and some Unixes. If the :py:data:`~socket.SO_REUSEPORT` constant is not + and some Unixes. If the :py:const:`~socket.SO_REUSEPORT` constant is not defined then this capability is unsupported. * *allow_broadcast* tells the kernel to allow this endpoint to send @@ -607,7 +607,7 @@ Opening network connections .. versionchanged:: 3.8.1 The *reuse_address* parameter is no longer supported, as using - :py:data:`~sockets.SO_REUSEADDR` poses a significant security concern for + :py:const:`~sockets.SO_REUSEADDR` poses a significant security concern for UDP. Explicitly passing ``reuse_address=True`` will raise an exception. When multiple processes with differing UIDs assign sockets to an @@ -616,7 +616,7 @@ Opening network connections For supported platforms, *reuse_port* can be used as a replacement for similar functionality. With *reuse_port*, - :py:data:`~sockets.SO_REUSEPORT` is used instead, which specifically + :py:const:`~sockets.SO_REUSEPORT` is used instead, which specifically prevents processes with differing UIDs from assigning sockets to the same socket address. @@ -634,8 +634,8 @@ Opening network connections Create a Unix connection. - The socket family will be :py:data:`~socket.AF_UNIX`; socket - type will be :py:data:`~socket.SOCK_STREAM`. + The socket family will be :py:const:`~socket.AF_UNIX`; socket + type will be :py:const:`~socket.SOCK_STREAM`. A tuple of ``(transport, protocol)`` is returned on success. @@ -671,7 +671,7 @@ Creating network servers ssl_shutdown_timeout=None, \ start_serving=True) - Create a TCP server (socket type :data:`~socket.SOCK_STREAM`) listening + Create a TCP server (socket type :const:`~socket.SOCK_STREAM`) listening on *port* of the *host* address. Returns a :class:`Server` object. @@ -699,10 +699,10 @@ Creating network servers be selected (note that if *host* resolves to multiple network interfaces, a different random port will be selected for each interface). - * *family* can be set to either :data:`socket.AF_INET` or - :data:`~socket.AF_INET6` to force the socket to use IPv4 or IPv6. + * *family* can be set to either :const:`socket.AF_INET` or + :const:`~socket.AF_INET6` to force the socket to use IPv4 or IPv6. If not set, the *family* will be determined from host name - (defaults to :data:`~socket.AF_UNSPEC`). + (defaults to :const:`~socket.AF_UNSPEC`). * *flags* is a bitmask for :meth:`getaddrinfo`. @@ -756,7 +756,7 @@ Creating network servers .. versionchanged:: 3.6 Added *ssl_handshake_timeout* and *start_serving* parameters. - The socket option :py:data:`~socket.TCP_NODELAY` is set by default + The socket option :py:const:`~socket.TCP_NODELAY` is set by default for all TCP connections. .. versionchanged:: 3.11 @@ -777,7 +777,7 @@ Creating network servers start_serving=True) Similar to :meth:`loop.create_server` but works with the - :py:data:`~socket.AF_UNIX` socket family. + :py:const:`~socket.AF_UNIX` socket family. *path* is the name of a Unix domain socket, and is required, unless a *sock* argument is provided. Abstract Unix sockets, @@ -1593,6 +1593,9 @@ Do not instantiate the :class:`Server` class directly. .. versionchanged:: 3.7 Server object is an asynchronous context manager since Python 3.7. + .. versionchanged:: 3.11 + This class was exposed publicly as ``asyncio.Server`` in Python 3.9.11, 3.10.3 and 3.11. + .. method:: close() Stop serving: close listening sockets and set the :attr:`sockets` diff --git a/Doc/library/asyncio-extending.rst b/Doc/library/asyncio-extending.rst index 8ffd356f2d1cc3..e7b293f484f8de 100644 --- a/Doc/library/asyncio-extending.rst +++ b/Doc/library/asyncio-extending.rst @@ -69,7 +69,7 @@ Task lifetime support ===================== A third party task implementation should call the following functions to keep a task -visible by :func:`asyncio.get_tasks` and :func:`asyncio.current_task`: +visible by :func:`asyncio.all_tasks` and :func:`asyncio.current_task`: .. function:: _register_task(task) diff --git a/Doc/library/asyncio-future.rst b/Doc/library/asyncio-future.rst index 70cec9b2f90248..893ae5518f757d 100644 --- a/Doc/library/asyncio-future.rst +++ b/Doc/library/asyncio-future.rst @@ -276,4 +276,4 @@ the Future has a result:: :func:`concurrent.futures.as_completed` functions. - :meth:`asyncio.Future.cancel` accepts an optional ``msg`` argument, - but :func:`concurrent.futures.cancel` does not. + but :meth:`concurrent.futures.Future.cancel` does not. diff --git a/Doc/library/asyncio-platforms.rst b/Doc/library/asyncio-platforms.rst index 50ad8a2ab70324..19ec726c1be060 100644 --- a/Doc/library/asyncio-platforms.rst +++ b/Doc/library/asyncio-platforms.rst @@ -37,7 +37,7 @@ All event loops on Windows do not support the following methods: * :meth:`loop.create_unix_connection` and :meth:`loop.create_unix_server` are not supported. - The :data:`socket.AF_UNIX` socket family is specific to Unix. + The :const:`socket.AF_UNIX` socket family is specific to Unix. * :meth:`loop.add_signal_handler` and :meth:`loop.remove_signal_handler` are not supported. diff --git a/Doc/library/asyncio-subprocess.rst b/Doc/library/asyncio-subprocess.rst index b7c83aa04c09f1..bf35b1cb798aee 100644 --- a/Doc/library/asyncio-subprocess.rst +++ b/Doc/library/asyncio-subprocess.rst @@ -68,7 +68,7 @@ Creating Subprocesses The *limit* argument sets the buffer limit for :class:`StreamReader` wrappers for :attr:`Process.stdout` and :attr:`Process.stderr` - (if :attr:`subprocess.PIPE` is passed to *stdout* and *stderr* arguments). + (if :const:`subprocess.PIPE` is passed to *stdout* and *stderr* arguments). Return a :class:`~asyncio.subprocess.Process` instance. @@ -86,7 +86,7 @@ Creating Subprocesses The *limit* argument sets the buffer limit for :class:`StreamReader` wrappers for :attr:`Process.stdout` and :attr:`Process.stderr` - (if :attr:`subprocess.PIPE` is passed to *stdout* and *stderr* arguments). + (if :const:`subprocess.PIPE` is passed to *stdout* and *stderr* arguments). Return a :class:`~asyncio.subprocess.Process` instance. @@ -249,7 +249,7 @@ their completion. Stop the child process. - On POSIX systems this method sends :py:data:`signal.SIGTERM` to the + On POSIX systems this method sends :py:const:`signal.SIGTERM` to the child process. On Windows the Win32 API function :c:func:`TerminateProcess` is diff --git a/Doc/library/bz2.rst b/Doc/library/bz2.rst index 32df99869eb530..ec4aeaa04395ac 100644 --- a/Doc/library/bz2.rst +++ b/Doc/library/bz2.rst @@ -87,7 +87,8 @@ The :mod:`bz2` module contains: compressed streams. :class:`BZ2File` provides all of the members specified by the - :class:`io.BufferedIOBase`, except for :meth:`detach` and :meth:`truncate`. + :class:`io.BufferedIOBase`, except for :meth:`~io.BufferedIOBase.detach` + and :meth:`~io.IOBase.truncate`. Iteration and the :keyword:`with` statement are supported. :class:`BZ2File` also provides the following method: diff --git a/Doc/library/code.rst b/Doc/library/code.rst index 538e5afc7822aa..3d7f43c86a0557 100644 --- a/Doc/library/code.rst +++ b/Doc/library/code.rst @@ -163,12 +163,12 @@ interpreter objects as well as the following additions. Push a line of source text to the interpreter. The line should not have a trailing newline; it may have internal newlines. The line is appended to a - buffer and the interpreter's :meth:`runsource` method is called with the + buffer and the interpreter's :meth:`~InteractiveInterpreter.runsource` method is called with the concatenated contents of the buffer as source. If this indicates that the command was executed or invalid, the buffer is reset; otherwise, the command is incomplete, and the buffer is left as it was after the line was appended. The return value is ``True`` if more input is required, ``False`` if the line was - dealt with in some way (this is the same as :meth:`runsource`). + dealt with in some way (this is the same as :meth:`!runsource`). .. method:: InteractiveConsole.resetbuffer() diff --git a/Doc/library/codeop.rst b/Doc/library/codeop.rst index 90df499f8207b7..55606e1c5f09ac 100644 --- a/Doc/library/codeop.rst +++ b/Doc/library/codeop.rst @@ -58,7 +58,7 @@ To do just the former: .. class:: Compile() - Instances of this class have :meth:`__call__` methods identical in signature to + Instances of this class have :meth:`~object.__call__` methods identical in signature to the built-in function :func:`compile`, but with the difference that if the instance compiles program text containing a :mod:`__future__` statement, the instance 'remembers' and compiles all subsequent program texts with the @@ -67,7 +67,7 @@ To do just the former: .. class:: CommandCompiler() - Instances of this class have :meth:`__call__` methods identical in signature to + Instances of this class have :meth:`~object.__call__` methods identical in signature to :func:`compile_command`; the difference is that if the instance compiles program text containing a :mod:`__future__` statement, the instance 'remembers' and compiles all subsequent program texts with the statement in force. diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst index bb46782c06e1c8..b8b231bb15b1b0 100644 --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -1224,7 +1224,7 @@ variants of :func:`functools.lru_cache`: result = self.func(*args) self.cache[args] = time(), result if len(self.cache) > self.maxsize: - self.cache.popitem(0) + self.cache.popitem(last=False) return result @@ -1256,12 +1256,12 @@ variants of :func:`functools.lru_cache`: if self.requests[args] <= self.cache_after: self.requests.move_to_end(args) if len(self.requests) > self.maxrequests: - self.requests.popitem(0) + self.requests.popitem(last=False) else: self.requests.pop(args, None) self.cache[args] = result if len(self.cache) > self.maxsize: - self.cache.popitem(0) + self.cache.popitem(last=False) return result .. doctest:: diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst index 180f5b81c2b615..4226348a17240a 100644 --- a/Doc/library/compileall.rst +++ b/Doc/library/compileall.rst @@ -141,9 +141,9 @@ There is no command-line option to control the optimization level used by the :func:`compile` function, because the Python interpreter itself already provides the option: :program:`python -O -m compileall`. -Similarly, the :func:`compile` function respects the :attr:`sys.pycache_prefix` +Similarly, the :func:`compile` function respects the :data:`sys.pycache_prefix` setting. The generated bytecode cache will only be useful if :func:`compile` is -run with the same :attr:`sys.pycache_prefix` (if any) that will be used at +run with the same :data:`sys.pycache_prefix` (if any) that will be used at runtime. Public functions diff --git a/Doc/library/concurrent.rst b/Doc/library/concurrent.rst index 2eba5365125805..8caea78bbb57e8 100644 --- a/Doc/library/concurrent.rst +++ b/Doc/library/concurrent.rst @@ -1,5 +1,5 @@ -The :mod:`concurrent` package -============================= +The :mod:`!concurrent` package +============================== Currently, there is only one module in this package: diff --git a/Doc/library/constants.rst b/Doc/library/constants.rst index 38dd552a0363ac..401dc9a320c5e0 100644 --- a/Doc/library/constants.rst +++ b/Doc/library/constants.rst @@ -22,16 +22,16 @@ A small number of constants live in the built-in namespace. They are: An object frequently used to represent the absence of a value, as when default arguments are not passed to a function. Assignments to ``None`` are illegal and raise a :exc:`SyntaxError`. - ``None`` is the sole instance of the :data:`NoneType` type. + ``None`` is the sole instance of the :data:`~types.NoneType` type. .. data:: NotImplemented A special value which should be returned by the binary special methods - (e.g. :meth:`__eq__`, :meth:`__lt__`, :meth:`__add__`, :meth:`__rsub__`, + (e.g. :meth:`~object.__eq__`, :meth:`~object.__lt__`, :meth:`~object.__add__`, :meth:`~object.__rsub__`, etc.) to indicate that the operation is not implemented with respect to the other type; may be returned by the in-place binary special methods - (e.g. :meth:`__imul__`, :meth:`__iand__`, etc.) for the same purpose. + (e.g. :meth:`~object.__imul__`, :meth:`~object.__iand__`, etc.) for the same purpose. It should not be evaluated in a boolean context. ``NotImplemented`` is the sole instance of the :data:`types.NotImplementedType` type. diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 81509c0920bb6e..474359a5cd98bd 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -41,7 +41,7 @@ You load libraries by accessing them as attributes of these objects. *cdll* loads libraries which export functions using the standard ``cdecl`` calling convention, while *windll* libraries call functions using the ``stdcall`` calling convention. *oledll* also uses the ``stdcall`` calling convention, and -assumes the functions return a Windows :c:type:`HRESULT` error code. The error +assumes the functions return a Windows :c:type:`!HRESULT` error code. The error code is used to automatically raise an :class:`OSError` exception when the function call fails. @@ -72,8 +72,9 @@ Windows appends the usual ``.dll`` file suffix automatically. On Linux, it is required to specify the filename *including* the extension to load a library, so attribute access can not be used to load libraries. Either the -:meth:`LoadLibrary` method of the dll loaders should be used, or you should load -the library by creating an instance of CDLL by calling the constructor:: +:meth:`~LibraryLoader.LoadLibrary` method of the dll loaders should be used, +or you should load the library by creating an instance of CDLL by calling +the constructor:: >>> cdll.LoadLibrary("libc.so.6") # doctest: +LINUX @@ -220,7 +221,7 @@ Fundamental data types +----------------------+------------------------------------------+----------------------------+ | :class:`c_char` | :c:expr:`char` | 1-character bytes object | +----------------------+------------------------------------------+----------------------------+ -| :class:`c_wchar` | :c:expr:`wchar_t` | 1-character string | +| :class:`c_wchar` | :c:type:`wchar_t` | 1-character string | +----------------------+------------------------------------------+----------------------------+ | :class:`c_byte` | :c:expr:`char` | int | +----------------------+------------------------------------------+----------------------------+ @@ -243,9 +244,9 @@ Fundamental data types | :class:`c_ulonglong` | :c:expr:`unsigned __int64` or | int | | | :c:expr:`unsigned long long` | | +----------------------+------------------------------------------+----------------------------+ -| :class:`c_size_t` | :c:expr:`size_t` | int | +| :class:`c_size_t` | :c:type:`size_t` | int | +----------------------+------------------------------------------+----------------------------+ -| :class:`c_ssize_t` | :c:expr:`ssize_t` or | int | +| :class:`c_ssize_t` | :c:type:`ssize_t` or | int | | | :c:expr:`Py_ssize_t` | | +----------------------+------------------------------------------+----------------------------+ | :class:`c_time_t` | :c:type:`time_t` | int | @@ -333,9 +334,9 @@ property:: 10 b'Hi\x00lo\x00\x00\x00\x00\x00' >>> -The :func:`create_string_buffer` function replaces the old :func:`c_buffer` +The :func:`create_string_buffer` function replaces the old :func:`!c_buffer` function (which is still available as an alias). To create a mutable memory -block containing unicode characters of the C type :c:expr:`wchar_t`, use the +block containing unicode characters of the C type :c:type:`wchar_t`, use the :func:`create_unicode_buffer` function. @@ -361,7 +362,7 @@ from within *IDLE* or *PythonWin*:: >>> printf(b"%f bottles of beer\n", 42.5) Traceback (most recent call last): File "", line 1, in - ArgumentError: argument 2: TypeError: Don't know how to convert parameter 2 + ctypes.ArgumentError: argument 2: TypeError: Don't know how to convert parameter 2 >>> As has been mentioned before, all Python types except integers, strings, and @@ -383,15 +384,15 @@ as calling functions with a fixed number of parameters. On some platforms, and i particular ARM64 for Apple Platforms, the calling convention for variadic functions is different than that for regular functions. -On those platforms it is required to specify the *argtypes* attribute for the -regular, non-variadic, function arguments: +On those platforms it is required to specify the :attr:`~_FuncPtr.argtypes` +attribute for the regular, non-variadic, function arguments: .. code-block:: python3 libc.printf.argtypes = [ctypes.c_char_p] Because specifying the attribute does not inhibit portability it is advised to always -specify ``argtypes`` for all variadic functions. +specify :attr:`~_FuncPtr.argtypes` for all variadic functions. .. _ctypes-calling-functions-with-own-custom-data-types: @@ -400,9 +401,10 @@ Calling functions with your own custom data types ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ You can also customize :mod:`ctypes` argument conversion to allow instances of -your own classes be used as function arguments. :mod:`ctypes` looks for an -:attr:`_as_parameter_` attribute and uses this as the function argument. Of -course, it must be one of integer, string, or bytes:: +your own classes be used as function arguments. :mod:`ctypes` looks for an +:attr:`!_as_parameter_` attribute and uses this as the function argument. The +attribute must be an integer, string, bytes, a :mod:`ctypes` instance, or an +object with an :attr:`!_as_parameter_` attribute:: >>> class Bottles: ... def __init__(self, number): @@ -414,7 +416,7 @@ course, it must be one of integer, string, or bytes:: 19 >>> -If you don't want to store the instance's data in the :attr:`_as_parameter_` +If you don't want to store the instance's data in the :attr:`!_as_parameter_` instance variable, you could define a :class:`property` which makes the attribute available on request. @@ -425,9 +427,9 @@ Specifying the required argument types (function prototypes) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ It is possible to specify the required argument types of functions exported from -DLLs by setting the :attr:`argtypes` attribute. +DLLs by setting the :attr:`~_FuncPtr.argtypes` attribute. -:attr:`argtypes` must be a sequence of C data types (the ``printf`` function is +:attr:`~_FuncPtr.argtypes` must be a sequence of C data types (the :func:`!printf` function is probably not a good example here, because it takes a variable number and different types of parameters depending on the format string, on the other hand this is quite handy to experiment with this feature):: @@ -444,21 +446,21 @@ prototype for a C function), and tries to convert the arguments to valid types:: >>> printf(b"%d %d %d", 1, 2, 3) Traceback (most recent call last): File "", line 1, in - ArgumentError: argument 2: TypeError: wrong type + ctypes.ArgumentError: argument 2: TypeError: 'int' object cannot be interpreted as ctypes.c_char_p >>> printf(b"%s %d %f\n", b"X", 2, 3) X 2 3.000000 13 >>> If you have defined your own classes which you pass to function calls, you have -to implement a :meth:`from_param` class method for them to be able to use them -in the :attr:`argtypes` sequence. The :meth:`from_param` class method receives +to implement a :meth:`~_CData.from_param` class method for them to be able to use them +in the :attr:`~_FuncPtr.argtypes` sequence. The :meth:`~_CData.from_param` class method receives the Python object passed to the function call, it should do a typecheck or whatever is needed to make sure this object is acceptable, and then return the -object itself, its :attr:`_as_parameter_` attribute, or whatever you want to +object itself, its :attr:`!_as_parameter_` attribute, or whatever you want to pass as the C function argument in this case. Again, the result should be an integer, string, bytes, a :mod:`ctypes` instance, or an object with an -:attr:`_as_parameter_` attribute. +:attr:`!_as_parameter_` attribute. .. _ctypes-return-types: @@ -475,16 +477,16 @@ Return types By default functions are assumed to return the C :c:expr:`int` type. Other -return types can be specified by setting the :attr:`restype` attribute of the +return types can be specified by setting the :attr:`~_FuncPtr.restype` attribute of the function object. -The C prototype of ``time()`` is ``time_t time(time_t *)``. Because ``time_t`` -might be of a different type than the default return type ``int``, you should -specify the ``restype``:: +The C prototype of :c:func:`time` is ``time_t time(time_t *)``. Because :c:type:`time_t` +might be of a different type than the default return type :c:expr:`int`, you should +specify the :attr:`!restype` attribute:: >>> libc.time.restype = c_time_t -The argument types can be specified using ``argtypes``:: +The argument types can be specified using :attr:`~_FuncPtr.argtypes`:: >>> libc.time.argtypes = (POINTER(c_time_t),) @@ -493,7 +495,7 @@ To call the function with a ``NULL`` pointer as first argument, use ``None``:: >>> print(libc.time(None)) # doctest: +SKIP 1150640792 -Here is a more advanced example, it uses the ``strchr`` function, which expects +Here is a more advanced example, it uses the :func:`!strchr` function, which expects a string pointer and a char, and returns a pointer to a string:: >>> strchr = libc.strchr @@ -506,8 +508,8 @@ a string pointer and a char, and returns a pointer to a string:: None >>> -If you want to avoid the ``ord("x")`` calls above, you can set the -:attr:`argtypes` attribute, and the second argument will be converted from a +If you want to avoid the :func:`ord("x") ` calls above, you can set the +:attr:`~_FuncPtr.argtypes` attribute, and the second argument will be converted from a single character Python bytes object into a C char: .. doctest:: @@ -526,7 +528,7 @@ single character Python bytes object into a C char: >>> You can also use a callable Python object (a function or a class for example) as -the :attr:`restype` attribute, if the foreign function returns an integer. The +the :attr:`~_FuncPtr.restype` attribute, if the foreign function returns an integer. The callable will be called with the *integer* the C function returns, and the result of this call will be used as the result of your function call. This is useful to check for error return values and automatically raise an exception:: @@ -554,7 +556,8 @@ get the string representation of an error code, and *returns* an exception. :func:`GetLastError` to retrieve it. Please note that a much more powerful error checking mechanism is available -through the :attr:`errcheck` attribute; see the reference manual for details. +through the :attr:`~_FuncPtr.errcheck` attribute; +see the reference manual for details. .. _ctypes-passing-pointers: @@ -592,7 +595,7 @@ Structures and unions Structures and unions must derive from the :class:`Structure` and :class:`Union` base classes which are defined in the :mod:`ctypes` module. Each subclass must -define a :attr:`_fields_` attribute. :attr:`_fields_` must be a list of +define a :attr:`~Structure._fields_` attribute. :attr:`!_fields_` must be a list of *2-tuples*, containing a *field name* and a *field type*. The field type must be a :mod:`ctypes` type like :class:`c_int`, or any other @@ -664,9 +667,9 @@ Structure/union alignment and byte order By default, Structure and Union fields are aligned in the same way the C compiler does it. It is possible to override this behavior by specifying a -:attr:`_pack_` class attribute in the subclass definition. This must be set to a -positive integer and specifies the maximum alignment for the fields. This is -what ``#pragma pack(n)`` also does in MSVC. +:attr:`~Structure._pack_` class attribute in the subclass definition. +This must be set to a positive integer and specifies the maximum alignment for the fields. +This is what ``#pragma pack(n)`` also does in MSVC. :mod:`ctypes` uses the native byte order for Structures and Unions. To build structures with non-native byte order, you can use one of the @@ -682,7 +685,7 @@ Bit fields in structures and unions It is possible to create structures and unions containing bit fields. Bit fields are only possible for integer fields, the bit width is specified as the third -item in the :attr:`_fields_` tuples:: +item in the :attr:`~Structure._fields_` tuples:: >>> class Int(Structure): ... _fields_ = [("first_16", c_int, 16), @@ -853,7 +856,7 @@ Type conversions ^^^^^^^^^^^^^^^^ Usually, ctypes does strict type checking. This means, if you have -``POINTER(c_int)`` in the :attr:`argtypes` list of a function or as the type of +``POINTER(c_int)`` in the :attr:`~_FuncPtr.argtypes` list of a function or as the type of a member field in a structure definition, only instances of exactly the same type are accepted. There are some exceptions to this rule, where ctypes accepts other objects. For example, you can pass compatible array instances instead of @@ -874,7 +877,7 @@ pointer types. So, for ``POINTER(c_int)``, ctypes accepts an array of c_int:: >>> In addition, if a function argument is explicitly declared to be a pointer type -(such as ``POINTER(c_int)``) in :attr:`argtypes`, an object of the pointed +(such as ``POINTER(c_int)``) in :attr:`~_FuncPtr.argtypes`, an object of the pointed type (``c_int`` in this case) can be passed to the function. ctypes will apply the required :func:`byref` conversion in this case automatically. @@ -950,8 +953,8 @@ work:: >>> because the new ``class cell`` is not available in the class statement itself. -In :mod:`ctypes`, we can define the ``cell`` class and set the :attr:`_fields_` -attribute later, after the class statement:: +In :mod:`ctypes`, we can define the ``cell`` class and set the +:attr:`~Structure._fields_` attribute later, after the class statement:: >>> from ctypes import * >>> class cell(Structure): @@ -1001,8 +1004,8 @@ argument, and the callback functions expected argument types as the remaining arguments. I will present an example here which uses the standard C library's -:c:func:`qsort` function, that is used to sort items with the help of a callback -function. :c:func:`qsort` will be used to sort an array of integers:: +:c:func:`!qsort` function, that is used to sort items with the help of a callback +function. :c:func:`!qsort` will be used to sort an array of integers:: >>> IntArray5 = c_int * 5 >>> ia = IntArray5(5, 1, 7, 33, 99) @@ -1010,7 +1013,7 @@ function. :c:func:`qsort` will be used to sort an array of integers:: >>> qsort.restype = None >>> -:func:`qsort` must be called with a pointer to the data to sort, the number of +:func:`!qsort` must be called with a pointer to the data to sort, the number of items in the data array, the size of one item, and a pointer to the comparison function, the callback. The callback will then be called with two pointers to items, and it must return a negative integer if the first item is smaller than @@ -1102,7 +1105,7 @@ Some shared libraries not only export functions, they also export variables. An example in the Python library itself is the :c:data:`Py_Version`, Python runtime version number encoded in a single constant integer. -:mod:`ctypes` can access values like this with the :meth:`in_dll` class methods of +:mod:`ctypes` can access values like this with the :meth:`~_CData.in_dll` class methods of the type. *pythonapi* is a predefined symbol giving access to the Python C api:: @@ -1292,13 +1295,13 @@ Finding shared libraries When programming in a compiled language, shared libraries are accessed when compiling/linking a program, and when the program is run. -The purpose of the :func:`find_library` function is to locate a library in a way +The purpose of the :func:`~ctypes.util.find_library` function is to locate a library in a way similar to what the compiler or runtime loader does (on platforms with several versions of a shared library the most recent should be loaded), while the ctypes library loaders act like when a program is run, and call the runtime loader directly. -The :mod:`ctypes.util` module provides a function which can help to determine +The :mod:`!ctypes.util` module provides a function which can help to determine the library to load. @@ -1313,7 +1316,7 @@ the library to load. The exact functionality is system dependent. -On Linux, :func:`find_library` tries to run external programs +On Linux, :func:`~ctypes.util.find_library` tries to run external programs (``/sbin/ldconfig``, ``gcc``, ``objdump`` and ``ld``) to find the library file. It returns the filename of the library file. @@ -1332,7 +1335,7 @@ Here are some examples:: 'libbz2.so.1.0' >>> -On macOS, :func:`find_library` tries several predefined naming schemes and paths +On macOS, :func:`~ctypes.util.find_library` tries several predefined naming schemes and paths to locate the library, and returns a full pathname if successful:: >>> from ctypes.util import find_library @@ -1346,13 +1349,13 @@ to locate the library, and returns a full pathname if successful:: '/System/Library/Frameworks/AGL.framework/AGL' >>> -On Windows, :func:`find_library` searches along the system search path, and +On Windows, :func:`~ctypes.util.find_library` searches along the system search path, and returns the full pathname, but since there is no predefined naming scheme a call like ``find_library("c")`` will fail and return ``None``. If wrapping a shared library with :mod:`ctypes`, it *may* be better to determine the shared library name at development time, and hardcode that into the wrapper -module instead of using :func:`find_library` to locate the library at runtime. +module instead of using :func:`~ctypes.util.find_library` to locate the library at runtime. .. _ctypes-loading-shared-libraries: @@ -1437,9 +1440,9 @@ function exported by these libraries, and reacquired afterwards. All these classes can be instantiated by calling them with at least one argument, the pathname of the shared library. If you have an existing handle to an already loaded shared library, it can be passed as the ``handle`` named -parameter, otherwise the underlying platforms ``dlopen`` or ``LoadLibrary`` -function is used to load the library into the process, and to get a handle to -it. +parameter, otherwise the underlying platforms :c:func:`!dlopen` or +:c:func:`!LoadLibrary` function is used to load the library into +the process, and to get a handle to it. The *mode* parameter can be used to specify how the library is loaded. For details, consult the :manpage:`dlopen(3)` manpage. On Windows, *mode* is @@ -1459,7 +1462,7 @@ to a new value and returns the former value. The *use_last_error* parameter, when set to true, enables the same mechanism for the Windows error code which is managed by the :func:`GetLastError` and -:func:`SetLastError` Windows API functions; :func:`ctypes.get_last_error` and +:func:`!SetLastError` Windows API functions; :func:`ctypes.get_last_error` and :func:`ctypes.set_last_error` are used to request and change the ctypes private copy of the windows error code. @@ -1522,8 +1525,8 @@ underscore to not clash with exported function names: Shared libraries can also be loaded by using one of the prefabricated objects, which are instances of the :class:`LibraryLoader` class, either by calling the -:meth:`LoadLibrary` method, or by retrieving the library as attribute of the -loader instance. +:meth:`~LibraryLoader.LoadLibrary` method, or by retrieving the library as +attribute of the loader instance. .. class:: LibraryLoader(dlltype) @@ -1531,7 +1534,7 @@ loader instance. Class which loads shared libraries. *dlltype* should be one of the :class:`CDLL`, :class:`PyDLL`, :class:`WinDLL`, or :class:`OleDLL` types. - :meth:`__getattr__` has special behavior: It allows loading a shared library by + :meth:`!__getattr__` has special behavior: It allows loading a shared library by accessing it as attribute of a library loader instance. The result is cached, so repeated attribute accesses return the same library each time. @@ -1576,7 +1579,7 @@ object is available: An instance of :class:`PyDLL` that exposes Python C API functions as attributes. Note that all these functions are assumed to return C :c:expr:`int`, which is of course not always the truth, so you have to assign - the correct :attr:`restype` attribute to use these functions. + the correct :attr:`!restype` attribute to use these functions. .. audit-event:: ctypes.dlopen name ctypes.LibraryLoader @@ -1628,7 +1631,7 @@ They are instances of a private class: the callable will be called with this integer, allowing further processing or error checking. Using this is deprecated, for more flexible post processing or error checking use a ctypes data type as - :attr:`restype` and assign a callable to the :attr:`errcheck` attribute. + :attr:`!restype` and assign a callable to the :attr:`errcheck` attribute. .. attribute:: argtypes @@ -1639,14 +1642,14 @@ They are instances of a private class: unspecified arguments as well. When a foreign function is called, each actual argument is passed to the - :meth:`from_param` class method of the items in the :attr:`argtypes` + :meth:`~_CData.from_param` class method of the items in the :attr:`argtypes` tuple, this method allows adapting the actual argument to an object that the foreign function accepts. For example, a :class:`c_char_p` item in the :attr:`argtypes` tuple will convert a string passed as argument into a bytes object using ctypes conversion rules. New: It is now possible to put items in argtypes which are not ctypes - types, but each item must have a :meth:`from_param` method which returns a + types, but each item must have a :meth:`~_CData.from_param` method which returns a value usable as argument (integer, string, ctypes instance). This allows defining adapters that can adapt custom objects as function parameters. @@ -1660,7 +1663,7 @@ They are instances of a private class: :module: *result* is what the foreign function returns, as specified by the - :attr:`restype` attribute. + :attr:`!restype` attribute. *func* is the foreign function object itself, this allows reusing the same callable object to check or post process the results of several @@ -1770,12 +1773,12 @@ different ways, depending on the type and number of the parameters in the call: COM methods use a special calling convention: They require a pointer to the COM interface as first argument, in addition to those parameters that - are specified in the :attr:`argtypes` tuple. + are specified in the :attr:`!argtypes` tuple. The optional *paramflags* parameter creates foreign function wrappers with much more functionality than the features described above. - *paramflags* must be a tuple of the same length as :attr:`argtypes`. + *paramflags* must be a tuple of the same length as :attr:`~_FuncPtr.argtypes`. Each item in this tuple contains further information about a parameter, it must be a tuple containing one, two, or three items. @@ -1845,7 +1848,7 @@ value if there is a single one, or a tuple containing the output parameter values when there are more than one, so the GetWindowRect function now returns a RECT instance, when called. -Output parameters can be combined with the :attr:`errcheck` protocol to do +Output parameters can be combined with the :attr:`~_FuncPtr.errcheck` protocol to do further output processing and error checking. The win32 ``GetWindowRect`` api function returns a ``BOOL`` to signal success or failure, so this function could do the error checking, and raises an exception when the api call failed:: @@ -1858,7 +1861,7 @@ do the error checking, and raises an exception when the api call failed:: >>> GetWindowRect.errcheck = errcheck >>> -If the :attr:`errcheck` function returns the argument tuple it receives +If the :attr:`~_FuncPtr.errcheck` function returns the argument tuple it receives unchanged, :mod:`ctypes` continues the normal processing it does on the output parameters. If you want to return a tuple of window coordinates instead of a ``RECT`` instance, you can retrieve the fields in the function and return them @@ -2008,7 +2011,7 @@ Utility functions .. function:: get_last_error() Windows only: returns the current value of the ctypes-private copy of the system - :data:`LastError` variable in the calling thread. + :data:`!LastError` variable in the calling thread. .. audit-event:: ctypes.get_last_error "" ctypes.get_last_error @@ -2061,7 +2064,7 @@ Utility functions .. function:: set_last_error(value) Windows only: set the current value of the ctypes-private copy of the system - :data:`LastError` variable in the calling thread to *value* and return the + :data:`!LastError` variable in the calling thread to *value* and return the previous value. .. audit-event:: ctypes.set_last_error error ctypes.set_last_error @@ -2157,8 +2160,8 @@ Data types This method adapts *obj* to a ctypes type. It is called with the actual object used in a foreign function call when the type is present in the - foreign function's :attr:`argtypes` tuple; it must return an object that - can be used as a function call parameter. + foreign function's :attr:`~_FuncPtr.argtypes` tuple; + it must return an object that can be used as a function call parameter. All ctypes data types have a default implementation of this classmethod that normally returns *obj* if that is an instance of the type. Some @@ -2223,13 +2226,13 @@ Fundamental data types Fundamental data types, when returned as foreign function call results, or, for example, by retrieving structure field members or array items, are transparently converted to native Python types. In other words, if a foreign function has a -:attr:`restype` of :class:`c_char_p`, you will always receive a Python bytes +:attr:`~_FuncPtr.restype` of :class:`c_char_p`, you will always receive a Python bytes object, *not* a :class:`c_char_p` instance. .. XXX above is false, it actually returns a Unicode string Subclasses of fundamental data types do *not* inherit this behavior. So, if a -foreign functions :attr:`restype` is a subclass of :class:`c_void_p`, you will +foreign functions :attr:`!restype` is a subclass of :class:`c_void_p`, you will receive an instance of this subclass from the function call. Of course, you can get the value of the pointer by accessing the ``value`` attribute. @@ -2407,7 +2410,7 @@ These are the fundamental ctypes data types: .. class:: c_wchar - Represents the C :c:expr:`wchar_t` datatype, and interprets the value as a + Represents the C :c:type:`wchar_t` datatype, and interprets the value as a single character unicode string. The constructor accepts an optional string initializer, the length of the string must be exactly one character. @@ -2428,7 +2431,7 @@ These are the fundamental ctypes data types: .. class:: HRESULT - Windows only: Represents a :c:type:`HRESULT` value, which contains success or + Windows only: Represents a :c:type:`!HRESULT` value, which contains success or error information for a function or method call. @@ -2437,9 +2440,9 @@ These are the fundamental ctypes data types: Represents the C :c:expr:`PyObject *` datatype. Calling this without an argument creates a ``NULL`` :c:expr:`PyObject *` pointer. -The :mod:`ctypes.wintypes` module provides quite some other Windows specific -data types, for example :c:type:`HWND`, :c:type:`WPARAM`, or :c:type:`DWORD`. Some -useful structures like :c:type:`MSG` or :c:type:`RECT` are also defined. +The :mod:`!ctypes.wintypes` module provides quite some other Windows specific +data types, for example :c:type:`!HWND`, :c:type:`!WPARAM`, or :c:type:`!DWORD`. +Some useful structures like :c:type:`!MSG` or :c:type:`!RECT` are also defined. .. _ctypes-structured-data-types: diff --git a/Doc/library/curses.rst b/Doc/library/curses.rst index cf208f3ba0db36..9ab67c21975394 100644 --- a/Doc/library/curses.rst +++ b/Doc/library/curses.rst @@ -641,7 +641,8 @@ The module :mod:`curses` defines the following functions: .. function:: update_lines_cols() - Update :envvar:`LINES` and :envvar:`COLS`. Useful for detecting manual screen resize. + Update the :const:`LINES` and :const:`COLS` module variables. + Useful for detecting manual screen resize. .. versionadded:: 3.5 @@ -1342,10 +1343,27 @@ The :mod:`curses` module defines the following data members: .. data:: COLORS The maximum number of colors the terminal can support. + It is defined only after the call to :func:`start_color`. .. data:: COLOR_PAIRS The maximum number of color pairs the terminal can support. + It is defined only after the call to :func:`start_color`. + +.. data:: COLS + + The width of the screen, i.e., the number of columns. + It is defined only after the call to :func:`initscr`. + Updated by :func:`update_lines_cols`, :func:`resizeterm` and + :func:`resize_term`. + +.. data:: LINES + + The height of the screen, i.e., the number of lines. + It is defined only after the call to :func:`initscr`. + Updated by :func:`update_lines_cols`, :func:`resizeterm` and + :func:`resize_term`. + Some constants are available to specify character cell attributes. The exact constants available are system dependent. @@ -1630,6 +1648,8 @@ keys); also, the following keypad mappings are standard: | :kbd:`Page Down` | KEY_NPAGE | +------------------+-----------+ +.. _curses-acs-codes: + The following table lists characters from the alternate character set. These are inherited from the VT100 terminal, and will generally be available on software emulations such as X terminals. When there is no graphic available, curses diff --git a/Doc/library/dbm.rst b/Doc/library/dbm.rst index 2be499337a2a15..766847b971b645 100644 --- a/Doc/library/dbm.rst +++ b/Doc/library/dbm.rst @@ -245,6 +245,13 @@ supported. Close the ``gdbm`` database. + .. method:: gdbm.clear() + + Remove all items from the ``gdbm`` database. + + .. versionadded:: 3.13 + + :mod:`dbm.ndbm` --- Interface based on ndbm ------------------------------------------- @@ -313,6 +320,12 @@ to locate the appropriate header file to simplify building this module. Close the ``ndbm`` database. + .. method:: ndbm.clear() + + Remove all items from the ``ndbm`` database. + + .. versionadded:: 3.13 + :mod:`dbm.dumb` --- Portable DBM implementation ----------------------------------------------- diff --git a/Doc/library/devmode.rst b/Doc/library/devmode.rst index 80ac13b116c1d2..914aa45cf9cbc3 100644 --- a/Doc/library/devmode.rst +++ b/Doc/library/devmode.rst @@ -81,7 +81,7 @@ Effects of the Python Development Mode: ignored for empty strings. * The :class:`io.IOBase` destructor logs ``close()`` exceptions. -* Set the :attr:`~sys.flags.dev_mode` attribute of :attr:`sys.flags` to +* Set the :attr:`~sys.flags.dev_mode` attribute of :data:`sys.flags` to ``True``. The Python Development Mode does not enable the :mod:`tracemalloc` module by diff --git a/Doc/library/dis.rst b/Doc/library/dis.rst index 6beaad3825aba8..0b44d160de58a7 100644 --- a/Doc/library/dis.rst +++ b/Doc/library/dis.rst @@ -56,7 +56,7 @@ the following command can be used to display the disassembly of >>> dis.dis(myfunc) 2 0 RESUME 0 - 3 2 LOAD_GLOBAL 1 (NULL + len) + 3 2 LOAD_GLOBAL 1 (len + NULL) 12 LOAD_FAST 0 (alist) 14 CALL 1 22 RETURN_VALUE @@ -876,7 +876,7 @@ iterations of the loop. .. opcode:: MATCH_MAPPING If ``STACK[-1]`` is an instance of :class:`collections.abc.Mapping` (or, more - technically: if it has the :const:`Py_TPFLAGS_MAPPING` flag set in its + technically: if it has the :c:macro:`Py_TPFLAGS_MAPPING` flag set in its :c:member:`~PyTypeObject.tp_flags`), push ``True`` onto the stack. Otherwise, push ``False``. @@ -887,7 +887,7 @@ iterations of the loop. If ``STACK[-1]`` is an instance of :class:`collections.abc.Sequence` and is *not* an instance of :class:`str`/:class:`bytes`/:class:`bytearray` (or, more technically: if it has - the :const:`Py_TPFLAGS_SEQUENCE` flag set in its :c:member:`~PyTypeObject.tp_flags`), + the :c:macro:`Py_TPFLAGS_SEQUENCE` flag set in its :c:member:`~PyTypeObject.tp_flags`), push ``True`` onto the stack. Otherwise, push ``False``. .. versionadded:: 3.10 @@ -922,9 +922,10 @@ iterations of the loop. .. opcode:: UNPACK_SEQUENCE (count) Unpacks ``STACK[-1]`` into *count* individual values, which are put onto the stack - right-to-left:: + right-to-left. Require there to be exactly *count* values.:: - STACK.extend(STACK.pop()[:count:-1]) + assert(len(STACK[-1]) == count) + STACK.extend(STACK.pop()[:-count-1:-1]) .. opcode:: UNPACK_EX (counts) diff --git a/Doc/library/email.charset.rst b/Doc/library/email.charset.rst index adbe6c1c7d29b8..aa0134412f3a60 100644 --- a/Doc/library/email.charset.rst +++ b/Doc/library/email.charset.rst @@ -150,7 +150,7 @@ Import this class from the :mod:`email.charset` module. .. method:: __str__() Returns *input_charset* as a string coerced to lower - case. :meth:`__repr__` is an alias for :meth:`__str__`. + case. :meth:`!__repr__` is an alias for :meth:`!__str__`. .. method:: __eq__(other) diff --git a/Doc/library/email.encoders.rst b/Doc/library/email.encoders.rst index 5d68b104f3a45c..3bd377e33f6c15 100644 --- a/Doc/library/email.encoders.rst +++ b/Doc/library/email.encoders.rst @@ -25,7 +25,7 @@ is especially true for :mimetype:`image/\*` and :mimetype:`text/\*` type message containing binary data. The :mod:`email` package provides some convenient encoders in its -:mod:`encoders` module. These encoders are actually used by the +:mod:`~email.encoders` module. These encoders are actually used by the :class:`~email.mime.audio.MIMEAudio` and :class:`~email.mime.image.MIMEImage` class constructors to provide default encodings. All encoder functions take exactly one argument, the message object to encode. They usually extract the diff --git a/Doc/library/email.generator.rst b/Doc/library/email.generator.rst index eb775b68362c76..afa0038ea2d6c4 100644 --- a/Doc/library/email.generator.rst +++ b/Doc/library/email.generator.rst @@ -274,9 +274,9 @@ in with information about the part. .. rubric:: Footnotes .. [#] This statement assumes that you use the appropriate setting for - ``unixfrom``, and that there are no :mod:`policy` settings calling for + ``unixfrom``, and that there are no :mod:`email.policy` settings calling for automatic adjustments (for example, - :attr:`~email.policy.Policy.refold_source` must be ``none``, which is + :attr:`~email.policy.EmailPolicy.refold_source` must be ``none``, which is *not* the default). It is also not 100% true, since if the message does not conform to the RFC standards occasionally information about the exact original text is lost during parsing error recovery. It is a goal diff --git a/Doc/library/email.message.rst b/Doc/library/email.message.rst index 5e0509f4181199..225f498781fa86 100644 --- a/Doc/library/email.message.rst +++ b/Doc/library/email.message.rst @@ -67,7 +67,7 @@ message objects. with the base :class:`~email.message.Message` class *maxheaderlen* is accepted, but defaults to ``None``, which means that by default the line length is controlled by the - :attr:`~email.policy.EmailPolicy.max_line_length` of the policy. The + :attr:`~email.policy.Policy.max_line_length` of the policy. The *policy* argument may be used to override the default policy obtained from the message instance. This can be used to control some of the formatting produced by the method, since the specified *policy* will be @@ -213,7 +213,7 @@ message objects. del msg['subject'] msg['subject'] = 'Python roolz!' - If the :mod:`policy` defines certain headers to be unique (as the standard + If the :mod:`policy ` defines certain headers to be unique (as the standard policies do), this method may raise a :exc:`ValueError` when an attempt is made to assign a value to such a header when one already exists. This behavior is intentional for consistency's sake, but do not depend on it @@ -378,7 +378,7 @@ message objects. deprecated. Note that existing parameter values of headers may be accessed through - the :attr:`~email.headerregistry.BaseHeader.params` attribute of the + the :attr:`~email.headerregistry.ParameterizedMIMEHeader.params` attribute of the header value (for example, ``msg['Content-Type'].params['charset']``). .. versionchanged:: 3.4 ``replace`` keyword was added. @@ -691,7 +691,7 @@ message objects. .. method:: clear_content() - Remove the payload and all of the :exc:`Content-` headers, leaving + Remove the payload and all of the :mailheader:`!Content-` headers, leaving all other headers intact and in their original order. diff --git a/Doc/library/email.parser.rst b/Doc/library/email.parser.rst index d9a61616bbbdfb..dda0466a6afa7d 100644 --- a/Doc/library/email.parser.rst +++ b/Doc/library/email.parser.rst @@ -39,9 +39,9 @@ returns the root object when you close the parser. Note that the parser can be extended in limited ways, and of course you can implement your own parser completely from scratch. All of the logic that connects the :mod:`email` package's bundled parser and the -:class:`~email.message.EmailMessage` class is embodied in the :mod:`policy` +:class:`~email.message.EmailMessage` class is embodied in the :class:`~email.policy.Policy` class, so a custom parser can create message object trees any way it finds -necessary by implementing custom versions of the appropriate :mod:`policy` +necessary by implementing custom versions of the appropriate :class:`!Policy` methods. diff --git a/Doc/library/email.utils.rst b/Doc/library/email.utils.rst index a87a0bd2e7de6b..345b64001c1ace 100644 --- a/Doc/library/email.utils.rst +++ b/Doc/library/email.utils.rst @@ -65,11 +65,6 @@ of the new API. *email address* parts. Returns a tuple of that information, unless the parse fails, in which case a 2-tuple of ``('', '')`` is returned. - .. versionchanged:: 3.12 - For security reasons, addresses that were ambiguous and could parse into - multiple different addresses now cause ``('', '')`` to be returned - instead of only one of the *potential* addresses. - .. function:: formataddr(pair, charset='utf-8') @@ -92,7 +87,7 @@ of the new API. This method returns a list of 2-tuples of the form returned by ``parseaddr()``. *fieldvalues* is a sequence of header field values as might be returned by :meth:`Message.get_all `. Here's a simple - example that gets all the recipients of a message: + example that gets all the recipients of a message:: from email.utils import getaddresses @@ -102,25 +97,6 @@ of the new API. resent_ccs = msg.get_all('resent-cc', []) all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs) - When parsing fails for a single fieldvalue, a 2-tuple of ``('', '')`` - is returned in its place. Other errors in parsing the list of - addresses such as a fieldvalue seemingly parsing into multiple - addresses may result in a list containing a single empty 2-tuple - ``[('', '')]`` being returned rather than returning potentially - invalid output. - - Example malformed input parsing: - - .. doctest:: - - >>> from email.utils import getaddresses - >>> getaddresses(['alice@example.com ', 'me@example.com']) - [('', '')] - - .. versionchanged:: 3.12 - The 2-tuple of ``('', '')`` in the returned values when parsing - fails were added as to address a security issue. - .. function:: parsedate(date) diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst index 4651eddf843700..fae0cf621323c8 100644 --- a/Doc/library/exceptions.rst +++ b/Doc/library/exceptions.rst @@ -659,8 +659,8 @@ depending on the system error code. Raised when an operation would block on an object (e.g. socket) set for non-blocking operation. - Corresponds to :c:data:`errno` :py:data:`~errno.EAGAIN`, :py:data:`~errno.EALREADY`, - :py:data:`~errno.EWOULDBLOCK` and :py:data:`~errno.EINPROGRESS`. + Corresponds to :c:data:`errno` :py:const:`~errno.EAGAIN`, :py:const:`~errno.EALREADY`, + :py:const:`~errno.EWOULDBLOCK` and :py:const:`~errno.EINPROGRESS`. In addition to those of :exc:`OSError`, :exc:`BlockingIOError` can have one more attribute: @@ -674,7 +674,7 @@ depending on the system error code. .. exception:: ChildProcessError Raised when an operation on a child process failed. - Corresponds to :c:data:`errno` :py:data:`~errno.ECHILD`. + Corresponds to :c:data:`errno` :py:const:`~errno.ECHILD`. .. exception:: ConnectionError @@ -688,40 +688,40 @@ depending on the system error code. A subclass of :exc:`ConnectionError`, raised when trying to write on a pipe while the other end has been closed, or trying to write on a socket which has been shutdown for writing. - Corresponds to :c:data:`errno` :py:data:`~errno.EPIPE` and :py:data:`~errno.ESHUTDOWN`. + Corresponds to :c:data:`errno` :py:const:`~errno.EPIPE` and :py:const:`~errno.ESHUTDOWN`. .. exception:: ConnectionAbortedError A subclass of :exc:`ConnectionError`, raised when a connection attempt is aborted by the peer. - Corresponds to :c:data:`errno` :py:data:`~errno.ECONNABORTED`. + Corresponds to :c:data:`errno` :py:const:`~errno.ECONNABORTED`. .. exception:: ConnectionRefusedError A subclass of :exc:`ConnectionError`, raised when a connection attempt is refused by the peer. - Corresponds to :c:data:`errno` :py:data:`~errno.ECONNREFUSED`. + Corresponds to :c:data:`errno` :py:const:`~errno.ECONNREFUSED`. .. exception:: ConnectionResetError A subclass of :exc:`ConnectionError`, raised when a connection is reset by the peer. - Corresponds to :c:data:`errno` :py:data:`~errno.ECONNRESET`. + Corresponds to :c:data:`errno` :py:const:`~errno.ECONNRESET`. .. exception:: FileExistsError Raised when trying to create a file or directory which already exists. - Corresponds to :c:data:`errno` :py:data:`~errno.EEXIST`. + Corresponds to :c:data:`errno` :py:const:`~errno.EEXIST`. .. exception:: FileNotFoundError Raised when a file or directory is requested but doesn't exist. - Corresponds to :c:data:`errno` :py:data:`~errno.ENOENT`. + Corresponds to :c:data:`errno` :py:const:`~errno.ENOENT`. .. exception:: InterruptedError Raised when a system call is interrupted by an incoming signal. - Corresponds to :c:data:`errno` :py:data:`~errno.EINTR`. + Corresponds to :c:data:`errno` :py:const:`~errno.EINTR`. .. versionchanged:: 3.5 Python now retries system calls when a syscall is interrupted by a @@ -732,7 +732,7 @@ depending on the system error code. Raised when a file operation (such as :func:`os.remove`) is requested on a directory. - Corresponds to :c:data:`errno` :py:data:`~errno.EISDIR`. + Corresponds to :c:data:`errno` :py:const:`~errno.EISDIR`. .. exception:: NotADirectoryError @@ -740,28 +740,28 @@ depending on the system error code. something which is not a directory. On most POSIX platforms, it may also be raised if an operation attempts to open or traverse a non-directory file as if it were a directory. - Corresponds to :c:data:`errno` :py:data:`~errno.ENOTDIR`. + Corresponds to :c:data:`errno` :py:const:`~errno.ENOTDIR`. .. exception:: PermissionError Raised when trying to run an operation without the adequate access rights - for example filesystem permissions. - Corresponds to :c:data:`errno` :py:data:`~errno.EACCES`, - :py:data:`~errno.EPERM`, and :py:data:`~errno.ENOTCAPABLE`. + Corresponds to :c:data:`errno` :py:const:`~errno.EACCES`, + :py:const:`~errno.EPERM`, and :py:const:`~errno.ENOTCAPABLE`. .. versionchanged:: 3.11.1 - WASI's :py:data:`~errno.ENOTCAPABLE` is now mapped to + WASI's :py:const:`~errno.ENOTCAPABLE` is now mapped to :exc:`PermissionError`. .. exception:: ProcessLookupError Raised when a given process doesn't exist. - Corresponds to :c:data:`errno` :py:data:`~errno.ESRCH`. + Corresponds to :c:data:`errno` :py:const:`~errno.ESRCH`. .. exception:: TimeoutError Raised when a system function timed out at the system level. - Corresponds to :c:data:`errno` :py:data:`~errno.ETIMEDOUT`. + Corresponds to :c:data:`errno` :py:const:`~errno.ETIMEDOUT`. .. versionadded:: 3.3 All the above :exc:`OSError` subclasses were added. diff --git a/Doc/library/fcntl.rst b/Doc/library/fcntl.rst index 997c7ea571fc03..969a79fa873395 100644 --- a/Doc/library/fcntl.rst +++ b/Doc/library/fcntl.rst @@ -172,9 +172,9 @@ The module defines the following functions: which the lock starts, relative to *whence*, and *whence* is as with :func:`io.IOBase.seek`, specifically: - * :const:`0` -- relative to the start of the file (:data:`os.SEEK_SET`) - * :const:`1` -- relative to the current buffer position (:data:`os.SEEK_CUR`) - * :const:`2` -- relative to the end of the file (:data:`os.SEEK_END`) + * ``0`` -- relative to the start of the file (:const:`os.SEEK_SET`) + * ``1`` -- relative to the current buffer position (:const:`os.SEEK_CUR`) + * ``2`` -- relative to the end of the file (:const:`os.SEEK_END`) The default for *start* is 0, which means to start at the beginning of the file. The default for *len* is 0 which means to lock to the end of the file. The @@ -201,7 +201,7 @@ using the :func:`flock` call may be better. .. seealso:: Module :mod:`os` - If the locking flags :data:`~os.O_SHLOCK` and :data:`~os.O_EXLOCK` are + If the locking flags :const:`~os.O_SHLOCK` and :const:`~os.O_EXLOCK` are present in the :mod:`os` module (on BSD only), the :func:`os.open` function provides an alternative to the :func:`lockf` and :func:`flock` functions. diff --git a/Doc/library/filecmp.rst b/Doc/library/filecmp.rst index 83e9e14ddcacd8..dfe4b7c59fd578 100644 --- a/Doc/library/filecmp.rst +++ b/Doc/library/filecmp.rst @@ -74,7 +74,7 @@ The :class:`dircmp` class Construct a new directory comparison object, to compare the directories *a* and *b*. *ignore* is a list of names to ignore, and defaults to - :attr:`filecmp.DEFAULT_IGNORES`. *hide* is a list of names to hide, and + :const:`filecmp.DEFAULT_IGNORES`. *hide* is a list of names to hide, and defaults to ``[os.curdir, os.pardir]``. The :class:`dircmp` class compares files by doing *shallow* comparisons @@ -100,7 +100,7 @@ The :class:`dircmp` class used to get various bits of information about the directory trees being compared. - Note that via :meth:`__getattr__` hooks, all attributes are computed lazily, + Note that via :meth:`~object.__getattr__` hooks, all attributes are computed lazily, so there is no speed penalty if only those attributes which are lightweight to compute are used. diff --git a/Doc/library/fileinput.rst b/Doc/library/fileinput.rst index 4bc868759f2025..f93e9a58791eeb 100644 --- a/Doc/library/fileinput.rst +++ b/Doc/library/fileinput.rst @@ -177,7 +177,7 @@ available for subclassing as well: The keyword-only parameter *encoding* and *errors* are added. .. versionchanged:: 3.11 - The ``'rU'`` and ``'U'`` modes and the :meth:`__getitem__` method have + The ``'rU'`` and ``'U'`` modes and the :meth:`!__getitem__` method have been removed. diff --git a/Doc/library/fractions.rst b/Doc/library/fractions.rst index fe2e8ab655edf8..509c63686f5a7f 100644 --- a/Doc/library/fractions.rst +++ b/Doc/library/fractions.rst @@ -25,7 +25,7 @@ another rational number, or from a string. The first version requires that *numerator* and *denominator* are instances of :class:`numbers.Rational` and returns a new :class:`Fraction` instance - with value ``numerator/denominator``. If *denominator* is :const:`0`, it + with value ``numerator/denominator``. If *denominator* is ``0``, it raises a :exc:`ZeroDivisionError`. The second version requires that *other_fraction* is an instance of :class:`numbers.Rational` and returns a :class:`Fraction` instance with the same value. The next two versions accept diff --git a/Doc/library/ftplib.rst b/Doc/library/ftplib.rst index e7fb5b1ae26960..d1fe6414ea020c 100644 --- a/Doc/library/ftplib.rst +++ b/Doc/library/ftplib.rst @@ -105,7 +105,7 @@ The module defines the following items: .. versionchanged:: 3.4 The class now supports hostname check with :attr:`ssl.SSLContext.check_hostname` and *Server Name Indication* (see - :data:`ssl.HAS_SNI`). + :const:`ssl.HAS_SNI`). .. versionchanged:: 3.9 If the *timeout* parameter is set to be zero, it will raise a @@ -431,7 +431,7 @@ FTP_TLS Objects .. attribute:: FTP_TLS.ssl_version - The SSL version to use (defaults to :attr:`ssl.PROTOCOL_SSLv23`). + The SSL version to use (defaults to :data:`ssl.PROTOCOL_SSLv23`). .. method:: FTP_TLS.auth() @@ -441,7 +441,7 @@ FTP_TLS Objects .. versionchanged:: 3.4 The method now supports hostname check with :attr:`ssl.SSLContext.check_hostname` and *Server Name Indication* (see - :data:`ssl.HAS_SNI`). + :const:`ssl.HAS_SNI`). .. method:: FTP_TLS.ccc() diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index d8091f0b093aab..88a7fdfe6f0d50 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -14,8 +14,8 @@ are always available. They are listed here in alphabetical order. | | :func:`abs` | | :func:`enumerate` | | :func:`len` | | |func-range|_ | | | :func:`aiter` | | :func:`eval` | | |func-list|_ | | :func:`repr` | | | :func:`all` | | :func:`exec` | | :func:`locals` | | :func:`reversed` | -| | :func:`any` | | | | | | :func:`round` | -| | :func:`anext` | | **F** | | **M** | | | +| | :func:`anext` | | | | | | :func:`round` | +| | :func:`any` | | **F** | | **M** | | | | | :func:`ascii` | | :func:`filter` | | :func:`map` | | **S** | | | | | :func:`float` | | :func:`max` | | |func-set|_ | | | **B** | | :func:`format` | | |func-memoryview|_ | | :func:`setattr` | @@ -1231,7 +1231,7 @@ are always available. They are listed here in alphabetical order. * Binary files are buffered in fixed-size chunks; the size of the buffer is chosen using a heuristic trying to determine the underlying device's "block - size" and falling back on :attr:`io.DEFAULT_BUFFER_SIZE`. On many systems, + size" and falling back on :const:`io.DEFAULT_BUFFER_SIZE`. On many systems, the buffer will typically be 4096 or 8192 bytes long. * "Interactive" text files (files for which :meth:`~io.IOBase.isatty` @@ -1752,7 +1752,7 @@ are always available. They are listed here in alphabetical order. The *start* parameter can be specified as a keyword argument. .. versionchanged:: 3.12 Summation of floats switched to an algorithm - that gives higher accuracy on most builds. + that gives higher accuracy and better commutativity on most builds. .. class:: super() diff --git a/Doc/library/gc.rst b/Doc/library/gc.rst index 0961ca4aaa9422..331c071cda7692 100644 --- a/Doc/library/gc.rst +++ b/Doc/library/gc.rst @@ -260,7 +260,7 @@ values but should not rebind them): .. versionchanged:: 3.4 Following :pep:`442`, objects with a :meth:`~object.__del__` method don't end - up in :attr:`gc.garbage` anymore. + up in :data:`gc.garbage` anymore. .. data:: callbacks diff --git a/Doc/library/gettext.rst b/Doc/library/gettext.rst index 747f8703b750ec..88a65b980d310f 100644 --- a/Doc/library/gettext.rst +++ b/Doc/library/gettext.rst @@ -411,6 +411,7 @@ One difference between this module and Henstridge's: his catalog objects supported access through a mapping API, but this appears to be unused and so is not currently supported. +.. _i18n-howto: Internationalizing your programs and modules -------------------------------------------- diff --git a/Doc/library/graphlib.rst b/Doc/library/graphlib.rst index fe7932e7a61cb5..fdd8f39ef4e1c4 100644 --- a/Doc/library/graphlib.rst +++ b/Doc/library/graphlib.rst @@ -115,7 +115,7 @@ :meth:`TopologicalSorter.done` is less than the number that have been returned by :meth:`TopologicalSorter.get_ready`. - The :meth:`~TopologicalSorter.__bool__` method of this class defers to + The :meth:`~object.__bool__` method of this class defers to this function, so instead of:: if ts.is_active(): @@ -204,7 +204,7 @@ The :mod:`graphlib` module defines the following exception classes: in the working graph. If multiple cycles exist, only one undefined choice among them will be reported and included in the exception. - The detected cycle can be accessed via the second element in the :attr:`~CycleError.args` + The detected cycle can be accessed via the second element in the :attr:`~BaseException.args` attribute of the exception instance and consists in a list of nodes, such that each node is, in the graph, an immediate predecessor of the next node in the list. In the reported list, the first and the last node will be the same, to make it clear that it is cyclic. diff --git a/Doc/library/gzip.rst b/Doc/library/gzip.rst index 06cbd2567a0bc6..60236a1190e423 100644 --- a/Doc/library/gzip.rst +++ b/Doc/library/gzip.rst @@ -70,7 +70,7 @@ The module defines the following items: .. class:: GzipFile(filename=None, mode=None, compresslevel=9, fileobj=None, mtime=None) Constructor for the :class:`GzipFile` class, which simulates most of the - methods of a :term:`file object`, with the exception of the :meth:`truncate` + methods of a :term:`file object`, with the exception of the :meth:`~io.IOBase.truncate` method. At least one of *fileobj* and *filename* must be given a non-trivial value. @@ -113,7 +113,7 @@ The module defines the following items: :class:`GzipFile` supports the :class:`io.BufferedIOBase` interface, including iteration and the :keyword:`with` statement. Only the - :meth:`truncate` method isn't implemented. + :meth:`~io.IOBase.truncate` method isn't implemented. :class:`GzipFile` also provides the following method and attribute: @@ -268,7 +268,7 @@ Command line options .. cmdoption:: file - If *file* is not specified, read from :attr:`sys.stdin`. + If *file* is not specified, read from :data:`sys.stdin`. .. cmdoption:: --fast diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst index 8102767a43d6dd..69fb79b49ca2a0 100644 --- a/Doc/library/hashlib.rst +++ b/Doc/library/hashlib.rst @@ -244,7 +244,7 @@ by the SHAKE algorithm. .. method:: shake.digest(length) - Return the digest of the data passed to the :meth:`update` method so far. + Return the digest of the data passed to the :meth:`~hash.update` method so far. This is a bytes object of size *length* which may contain bytes in the whole range from 0 to 255. @@ -507,9 +507,9 @@ Simple hashing To calculate hash of some data, you should first construct a hash object by calling the appropriate constructor function (:func:`blake2b` or -:func:`blake2s`), then update it with the data by calling :meth:`update` on the +:func:`blake2s`), then update it with the data by calling :meth:`~hash.update` on the object, and, finally, get the digest out of the object by calling -:meth:`digest` (or :meth:`hexdigest` for hex-encoded string). +:meth:`~hash.digest` (or :meth:`~hash.hexdigest` for hex-encoded string). >>> from hashlib import blake2b >>> h = blake2b() diff --git a/Doc/library/http.client.rst b/Doc/library/http.client.rst index b9ceab699cef63..c46314fc5e253b 100644 --- a/Doc/library/http.client.rst +++ b/Doc/library/http.client.rst @@ -83,7 +83,7 @@ The module provides the following classes: .. versionchanged:: 3.2 This class now supports HTTPS virtual hosts if possible (that is, - if :data:`ssl.HAS_SNI` is true). + if :const:`ssl.HAS_SNI` is true). .. versionchanged:: 3.4 The *strict* parameter was removed. HTTP 0.9-style "Simple Responses" are diff --git a/Doc/library/imaplib.rst b/Doc/library/imaplib.rst index 59d7711f9cbd3c..1f774e64b0eae3 100644 --- a/Doc/library/imaplib.rst +++ b/Doc/library/imaplib.rst @@ -106,7 +106,7 @@ There's also a subclass for secure connections: .. versionchanged:: 3.4 The class now supports hostname check with :attr:`ssl.SSLContext.check_hostname` and *Server Name Indication* (see - :data:`ssl.HAS_SNI`). + :const:`ssl.HAS_SNI`). .. versionchanged:: 3.9 The optional *timeout* parameter was added. @@ -503,7 +503,7 @@ An :class:`IMAP4` instance has the following methods: .. versionchanged:: 3.4 The method now supports hostname check with :attr:`ssl.SSLContext.check_hostname` and *Server Name Indication* (see - :data:`ssl.HAS_SNI`). + :const:`ssl.HAS_SNI`). .. method:: IMAP4.status(mailbox, names) diff --git a/Doc/library/importlib.resources.abc.rst b/Doc/library/importlib.resources.abc.rst index 2d0f137ffc7996..65c42858bbbb7d 100644 --- a/Doc/library/importlib.resources.abc.rst +++ b/Doc/library/importlib.resources.abc.rst @@ -130,7 +130,7 @@ suitable for reading (same as :attr:`pathlib.Path.open`). When opening as text, accepts encoding parameters such as those - accepted by :attr:`io.TextIOWrapper`. + accepted by :class:`io.TextIOWrapper`. .. method:: read_bytes() @@ -145,10 +145,10 @@ An abstract base class for resource readers capable of serving the :meth:`importlib.resources.files` interface. Subclasses - :class:`importlib.resources.abc.ResourceReader` and provides - concrete implementations of the :class:`importlib.resources.abc.ResourceReader`'s + :class:`ResourceReader` and provides + concrete implementations of the :class:`!ResourceReader`'s abstract methods. Therefore, any loader supplying - :class:`importlib.abc.TraversableResources` also supplies ResourceReader. + :class:`!TraversableResources` also supplies :class:`!ResourceReader`. Loaders that wish to support resource reading are expected to implement this interface. diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index 65aaad0df9ee66..1d378dbbdace5d 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -372,7 +372,7 @@ ABC hierarchy:: The list of locations where the package's submodules will be found. Most of the time this is a single directory. The import system passes this attribute to ``__import__()`` and to finders - in the same way as :attr:`sys.path` but just for the package. + in the same way as :data:`sys.path` but just for the package. It is not set on non-package modules so it can be used as an indicator that the module is a package. @@ -609,7 +609,7 @@ ABC hierarchy:: automatically. When writing to the path fails because the path is read-only - (:attr:`errno.EACCES`/:exc:`PermissionError`), do not propagate the + (:const:`errno.EACCES`/:exc:`PermissionError`), do not propagate the exception. .. versionchanged:: 3.4 @@ -843,7 +843,7 @@ find and load modules. .. classmethod:: path_hook(*loader_details) - A class method which returns a closure for use on :attr:`sys.path_hooks`. + A class method which returns a closure for use on :data:`sys.path_hooks`. An instance of :class:`FileFinder` is returned by the closure using the path argument given to the closure directly and *loader_details* indirectly. @@ -941,8 +941,15 @@ find and load modules. The *fullname* argument specifies the name of the module the loader is to support. The *path* argument is the path to the extension module's file. + Note that, by default, importing an extension module will fail + in subinterpreters if it doesn't implement multi-phase init + (see :pep:`489`), even if it would otherwise import successfully. + .. versionadded:: 3.3 + .. versionchanged:: 3.12 + Multi-phase init is now required for use in subinterpreters. + .. attribute:: name Name of the module the loader supports. @@ -1184,10 +1191,10 @@ an :term:`importer`. .. function:: find_spec(name, package=None) Find the :term:`spec ` for a module, optionally relative to - the specified **package** name. If the module is in :attr:`sys.modules`, + the specified **package** name. If the module is in :data:`sys.modules`, then ``sys.modules[name].__spec__`` is returned (unless the spec would be ``None`` or is not set, in which case :exc:`ValueError` is raised). - Otherwise a search using :attr:`sys.meta_path` is done. ``None`` is + Otherwise a search using :data:`sys.meta_path` is done. ``None`` is returned if no spec is found. If **name** is for a submodule (contains a dot), the parent module is @@ -1248,6 +1255,30 @@ an :term:`importer`. .. versionadded:: 3.7 +.. function:: _incompatible_extension_module_restrictions(*, disable_check) + + A context manager that can temporarily skip the compatibility check + for extension modules. By default the check is enabled and will fail + when a single-phase init module is imported in a subinterpreter. + It will also fail for a multi-phase init module that doesn't + explicitly support a per-interpreter GIL, when imported + in an interpreter with its own GIL. + + Note that this function is meant to accommodate an unusual case; + one which is likely to eventually go away. There's is a pretty good + chance this is not what you were looking for. + + You can get the same effect as this function by implementing the + basic interface of multi-phase init (:pep:`489`) and lying about + support for mulitple interpreters (or per-interpreter GIL). + + .. warning:: + Using this function to disable the check can lead to + unexpected behavior and even crashes. It should only be used during + extension module development. + + .. versionadded:: 3.12 + .. class:: LazyLoader(loader) A class which postpones the execution of the loader of a module until the @@ -1259,7 +1290,7 @@ an :term:`importer`. :meth:`~importlib.abc.Loader.create_module` method must return ``None`` or a type for which its ``__class__`` attribute can be mutated along with not using :term:`slots <__slots__>`. Finally, modules which substitute the object - placed into :attr:`sys.modules` will not work as there is no way to properly + placed into :data:`sys.modules` will not work as there is no way to properly replace the module references throughout the interpreter safely; :exc:`ValueError` is raised if such a substitution is detected. @@ -1383,9 +1414,9 @@ For deep customizations of import, you typically want to implement an :term:`importer`. This means managing both the :term:`finder` and :term:`loader` side of things. For finders there are two flavours to choose from depending on your needs: a :term:`meta path finder` or a :term:`path entry finder`. The -former is what you would put on :attr:`sys.meta_path` while the latter is what -you create using a :term:`path entry hook` on :attr:`sys.path_hooks` which works -with :attr:`sys.path` entries to potentially create a finder. This example will +former is what you would put on :data:`sys.meta_path` while the latter is what +you create using a :term:`path entry hook` on :data:`sys.path_hooks` which works +with :data:`sys.path` entries to potentially create a finder. This example will show you how to register your own importers so that import will use them (for creating an importer for yourself, read the documentation for the appropriate classes defined within this package):: diff --git a/Doc/library/io.rst b/Doc/library/io.rst index c9249da1c3c3d2..7eec1f87583b87 100644 --- a/Doc/library/io.rst +++ b/Doc/library/io.rst @@ -423,7 +423,7 @@ I/O Base Classes .. versionadded:: 3.3 Some operating systems could support additional values, like - :data:`os.SEEK_HOLE` or :data:`os.SEEK_DATA`. The valid values + :const:`os.SEEK_HOLE` or :const:`os.SEEK_DATA`. The valid values for a file could depend on it being open in text or binary mode. .. method:: seekable() diff --git a/Doc/library/json.rst b/Doc/library/json.rst index 5383614575c213..6c3059381776c9 100644 --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -192,7 +192,7 @@ Basic Usage dictionaries will be sorted by key. To use a custom :class:`JSONEncoder` subclass (e.g. one that overrides the - :meth:`default` method to serialize additional types), specify it with the + :meth:`~JSONEncoder.default` method to serialize additional types), specify it with the *cls* kwarg; otherwise :class:`JSONEncoder` is used. .. versionchanged:: 3.6 @@ -422,7 +422,7 @@ Encoders and Decoders Added support for int- and float-derived Enum classes. To extend this to recognize other objects, subclass and implement a - :meth:`default` method with another method that returns a serializable object + :meth:`~JSONEncoder.default` method with another method that returns a serializable object for ``o`` if possible, otherwise it should call the superclass implementation (to raise :exc:`TypeError`). @@ -483,7 +483,7 @@ Encoders and Decoders :exc:`TypeError`). For example, to support arbitrary iterators, you could implement - :meth:`default` like this:: + :meth:`~JSONEncoder.default` like this:: def default(self, o): try: @@ -683,7 +683,7 @@ The :mod:`json.tool` module provides a simple command line interface to validate and pretty-print JSON objects. If the optional ``infile`` and ``outfile`` arguments are not -specified, :attr:`sys.stdin` and :attr:`sys.stdout` will be used respectively: +specified, :data:`sys.stdin` and :data:`sys.stdout` will be used respectively: .. code-block:: shell-session @@ -721,12 +721,12 @@ Command line options } ] - If *infile* is not specified, read from :attr:`sys.stdin`. + If *infile* is not specified, read from :data:`sys.stdin`. .. cmdoption:: outfile Write the output of the *infile* to the given *outfile*. Otherwise, write it - to :attr:`sys.stdout`. + to :data:`sys.stdout`. .. cmdoption:: --sort-keys diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst index d4429d3d0a4f73..2a825db54aed5c 100644 --- a/Doc/library/logging.handlers.rst +++ b/Doc/library/logging.handlers.rst @@ -97,7 +97,7 @@ sends logging output to a disk file. It inherits the output functionality from Returns a new instance of the :class:`FileHandler` class. The specified file is opened and used as the stream for logging. If *mode* is not specified, - :const:`'a'` is used. If *encoding* is not ``None``, it is used to open the file + ``'a'`` is used. If *encoding* is not ``None``, it is used to open the file with that encoding. If *delay* is true, then file opening is deferred until the first call to :meth:`emit`. By default, the file grows indefinitely. If *errors* is specified, it's used to determine how encoding errors are handled. @@ -182,7 +182,7 @@ for this value. Returns a new instance of the :class:`WatchedFileHandler` class. The specified file is opened and used as the stream for logging. If *mode* is not specified, - :const:`'a'` is used. If *encoding* is not ``None``, it is used to open the file + ``'a'`` is used. If *encoding* is not ``None``, it is used to open the file with that encoding. If *delay* is true, then file opening is deferred until the first call to :meth:`emit`. By default, the file grows indefinitely. If *errors* is provided, it determines how encoding errors are handled. @@ -917,8 +917,9 @@ should, then :meth:`flush` is expected to do the flushing. .. method:: flush() - You can override this to implement custom flushing behavior. This version - just zaps the buffer to empty. + For a :class:`BufferingHandler` instance, flushing means that it sets the + buffer to an empty list. This method can be overwritten to implement more useful + flushing behavior. .. method:: shouldFlush(record) @@ -950,9 +951,9 @@ should, then :meth:`flush` is expected to do the flushing. .. method:: flush() - For a :class:`MemoryHandler`, flushing means just sending the buffered + For a :class:`MemoryHandler` instance, flushing means just sending the buffered records to the target, if there is one. The buffer is also cleared when - this happens. Override if you want different behavior. + buffered records are sent to the target. Override if you want different behavior. .. method:: setTarget(target) @@ -1051,8 +1052,8 @@ possible, while any potentially slow operations (such as sending an email via occur (e.g. because a bounded queue has filled up), the :meth:`~logging.Handler.handleError` method is called to handle the error. This can result in the record silently being dropped (if - :attr:`logging.raiseExceptions` is ``False``) or a message printed to - ``sys.stderr`` (if :attr:`logging.raiseExceptions` is ``True``). + :data:`logging.raiseExceptions` is ``False``) or a message printed to + ``sys.stderr`` (if :data:`logging.raiseExceptions` is ``True``). .. method:: prepare(record) diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst index 4e07eabd57f5e9..4c6e74ff66a11a 100644 --- a/Doc/library/logging.rst +++ b/Doc/library/logging.rst @@ -397,21 +397,21 @@ have specific values relative to the predefined levels. If you define a level with the same numeric value, it overwrites the predefined value; the predefined name is lost. -+--------------+---------------+ -| Level | Numeric value | -+==============+===============+ -| ``CRITICAL`` | 50 | -+--------------+---------------+ -| ``ERROR`` | 40 | -+--------------+---------------+ -| ``WARNING`` | 30 | -+--------------+---------------+ -| ``INFO`` | 20 | -+--------------+---------------+ -| ``DEBUG`` | 10 | -+--------------+---------------+ -| ``NOTSET`` | 0 | -+--------------+---------------+ ++-----------------------+---------------+ +| Level | Numeric value | ++=======================+===============+ +| .. py:data:: CRITICAL | 50 | ++-----------------------+---------------+ +| .. py:data:: ERROR | 40 | ++-----------------------+---------------+ +| .. py:data:: WARNING | 30 | ++-----------------------+---------------+ +| .. py:data:: INFO | 20 | ++-----------------------+---------------+ +| .. py:data:: DEBUG | 10 | ++-----------------------+---------------+ +| .. py:data:: NOTSET | 0 | ++-----------------------+---------------+ .. _handler: diff --git a/Doc/library/lzma.rst b/Doc/library/lzma.rst index 868d4dcfb6c996..434e7ac9061186 100644 --- a/Doc/library/lzma.rst +++ b/Doc/library/lzma.rst @@ -100,7 +100,8 @@ Reading and writing compressed files *filters* arguments have the same meanings as for :class:`LZMACompressor`. :class:`LZMAFile` supports all the members specified by - :class:`io.BufferedIOBase`, except for :meth:`detach` and :meth:`truncate`. + :class:`io.BufferedIOBase`, except for :meth:`~io.BufferedIOBase.detach` + and :meth:`~io.IOBase.truncate`. Iteration and the :keyword:`with` statement are supported. The following method is also provided: diff --git a/Doc/library/mailbox.rst b/Doc/library/mailbox.rst index 56908dedea1b40..91df07d914cae2 100644 --- a/Doc/library/mailbox.rst +++ b/Doc/library/mailbox.rst @@ -477,7 +477,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. unlock() Three locking mechanisms are used---dot locking and, if available, the - :c:func:`flock` and :c:func:`lockf` system calls. + :c:func:`!flock` and :c:func:`!lockf` system calls. .. seealso:: @@ -588,7 +588,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. unlock() Three locking mechanisms are used---dot locking and, if available, the - :c:func:`flock` and :c:func:`lockf` system calls. For MH mailboxes, locking + :c:func:`!flock` and :c:func:`!lockf` system calls. For MH mailboxes, locking the mailbox means locking the :file:`.mh_sequences` file and, only for the duration of any operations that affect them, locking individual message files. @@ -686,7 +686,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. unlock() Three locking mechanisms are used---dot locking and, if available, the - :c:func:`flock` and :c:func:`lockf` system calls. + :c:func:`!flock` and :c:func:`!lockf` system calls. .. seealso:: @@ -737,7 +737,7 @@ Supported mailbox formats are Maildir, mbox, MH, Babyl, and MMDF. unlock() Three locking mechanisms are used---dot locking and, if available, the - :c:func:`flock` and :c:func:`lockf` system calls. + :c:func:`!flock` and :c:func:`!lockf` system calls. .. seealso:: diff --git a/Doc/library/msvcrt.rst b/Doc/library/msvcrt.rst index 42fffee6a0f449..32693e3d007c05 100644 --- a/Doc/library/msvcrt.rst +++ b/Doc/library/msvcrt.rst @@ -38,7 +38,7 @@ File Operations Lock part of a file based on file descriptor *fd* from the C runtime. Raises :exc:`OSError` on failure. The locked region of the file extends from the current file position for *nbytes* bytes, and may continue beyond the end of the - file. *mode* must be one of the :const:`LK_\*` constants listed below. Multiple + file. *mode* must be one of the :const:`!LK_\*` constants listed below. Multiple regions in a file may be locked at the same time, but may not overlap. Adjacent regions are not merged; they must be unlocked individually. diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst index 8454296b815b41..2efc08f130af32 100644 --- a/Doc/library/multiprocessing.rst +++ b/Doc/library/multiprocessing.rst @@ -2707,7 +2707,7 @@ handler type) for messages from different processes to get mixed up. Returns the logger used by :mod:`multiprocessing`. If necessary, a new one will be created. - When first created the logger has level :data:`logging.NOTSET` and no + When first created the logger has level :const:`logging.NOTSET` and no default handler. Messages sent to this logger will not by default propagate to the root logger. diff --git a/Doc/library/multiprocessing.shared_memory.rst b/Doc/library/multiprocessing.shared_memory.rst index 76046b34610abe..f453e6403d932d 100644 --- a/Doc/library/multiprocessing.shared_memory.rst +++ b/Doc/library/multiprocessing.shared_memory.rst @@ -255,16 +255,17 @@ shared memory blocks created using that manager are all released when the :keyword:`with` statement's code block finishes execution. -.. class:: ShareableList(sequence=None, *, name=None) +.. class:: ShareableList(sequence=None, \*, name=None) Provides a mutable list-like object where all values stored within are stored in a shared memory block. This constrains storable values to - only the ``int``, ``float``, ``bool``, ``str`` (less than 10M bytes each), - ``bytes`` (less than 10M bytes each), and ``None`` built-in data types. - It also notably differs from the built-in ``list`` type in that these - lists can not change their overall length (i.e. no append, insert, etc.) - and do not support the dynamic creation of new :class:`ShareableList` - instances via slicing. + only the ``int`` (signed 64-bit), ``float``, ``bool``, ``str`` (less + than 10M bytes each when encoded as utf-8), ``bytes`` (less than 10M + bytes each), and ``None`` built-in data types. It also notably + differs from the built-in ``list`` type in that these lists can not + change their overall length (i.e. no append, insert, etc.) and do not + support the dynamic creation of new :class:`ShareableList` instances + via slicing. *sequence* is used in populating a new ``ShareableList`` full of values. Set to ``None`` to instead attach to an already existing @@ -275,6 +276,35 @@ shared memory blocks created using that manager are all released when the existing ``ShareableList``, specify its shared memory block's unique name while leaving ``sequence`` set to ``None``. + .. note:: + + A known issue exists for :class:`bytes` and :class:`str` values. + If they end with ``\x00`` nul bytes or characters, those may be + *silently stripped* when fetching them by index from the + :class:`ShareableList`. This ``.rstrip(b'\x00')`` behavior is + considered a bug and may go away in the future. See :gh:`106939`. + + For applications where rstripping of trailing nulls is a problem, + work around it by always unconditionally appending an extra non-0 + byte to the end of such values when storing and unconditionally + removing it when fetching: + + .. doctest:: + + >>> from multiprocessing import shared_memory + >>> nul_bug_demo = shared_memory.ShareableList(['?\x00', b'\x03\x02\x01\x00\x00\x00']) + >>> nul_bug_demo[0] + '?' + >>> nul_bug_demo[1] + b'\x03\x02\x01' + >>> nul_bug_demo.shm.unlink() + >>> padded = shared_memory.ShareableList(['?\x00\x07', b'\x03\x02\x01\x00\x00\x00\x07']) + >>> padded[0][:-1] + '?\x00' + >>> padded[1][:-1] + b'\x03\x02\x01\x00\x00\x00' + >>> padded.shm.unlink() + .. method:: count(value) Returns the number of occurrences of ``value``. diff --git a/Doc/library/netrc.rst b/Doc/library/netrc.rst index 88265d9b9e9e93..c36e5cfecfc6a8 100644 --- a/Doc/library/netrc.rst +++ b/Doc/library/netrc.rst @@ -51,9 +51,19 @@ the Unix :program:`ftp` program and other FTP clients. Exception raised by the :class:`~netrc.netrc` class when syntactical errors are encountered in source text. Instances of this exception provide three - interesting attributes: :attr:`msg` is a textual explanation of the error, - :attr:`filename` is the name of the source file, and :attr:`lineno` gives the - line number on which the error was found. + interesting attributes: + + .. attribute:: msg + + Textual explanation of the error. + + .. attribute:: filename + + The name of the source file. + + .. attribute:: lineno + + The line number on which the error was found. .. _netrc-objects: diff --git a/Doc/library/operator.rst b/Doc/library/operator.rst index dab4de9eb6abb7..57c67bcf3aa12e 100644 --- a/Doc/library/operator.rst +++ b/Doc/library/operator.rst @@ -59,9 +59,9 @@ truth tests, identity tests, and boolean operations: __not__(obj) Return the outcome of :keyword:`not` *obj*. (Note that there is no - :meth:`__not__` method for object instances; only the interpreter core defines - this operation. The result is affected by the :meth:`__bool__` and - :meth:`__len__` methods.) + :meth:`!__not__` method for object instances; only the interpreter core defines + this operation. The result is affected by the :meth:`~object.__bool__` and + :meth:`~object.__len__` methods.) .. function:: truth(obj) diff --git a/Doc/library/optparse.rst b/Doc/library/optparse.rst index 01177a04ab434d..015e83ed2ce5f7 100644 --- a/Doc/library/optparse.rst +++ b/Doc/library/optparse.rst @@ -813,7 +813,7 @@ The first step in using :mod:`optparse` is to create an OptionParser instance. help option. When :mod:`optparse` prints the usage string, it expands ``%prog`` to ``os.path.basename(sys.argv[0])`` (or to ``prog`` if you passed that keyword argument). To suppress a usage message, pass the - special value :data:`optparse.SUPPRESS_USAGE`. + special value :const:`optparse.SUPPRESS_USAGE`. ``option_list`` (default: ``[]``) A list of Option objects to populate the parser with. The options in @@ -1079,7 +1079,7 @@ relevant to a particular option, or fail to pass a required option attribute, Help text to print for this option when listing all available options after the user supplies a :attr:`~Option.help` option (such as ``--help``). If no help text is supplied, the option will be listed without help text. To - hide this option, use the special value :data:`optparse.SUPPRESS_HELP`. + hide this option, use the special value :const:`optparse.SUPPRESS_HELP`. .. attribute:: Option.metavar @@ -1251,7 +1251,7 @@ must specify for any option using that action. If no :attr:`~Option.help` string is supplied for an option, it will still be listed in the help message. To omit an option entirely, use the special value - :data:`optparse.SUPPRESS_HELP`. + :const:`optparse.SUPPRESS_HELP`. :mod:`optparse` automatically adds a :attr:`~Option.help` option to all OptionParsers, so you do not normally need to create one. @@ -1522,7 +1522,7 @@ OptionParser supports several other public methods: Set the usage string according to the rules described above for the ``usage`` constructor keyword argument. Passing ``None`` sets the default usage - string; use :data:`optparse.SUPPRESS_USAGE` to suppress a usage message. + string; use :const:`optparse.SUPPRESS_USAGE` to suppress a usage message. .. method:: OptionParser.print_usage(file=None) diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst index 3a668e28f2e268..6f9e0853bc8947 100644 --- a/Doc/library/os.path.rst +++ b/Doc/library/os.path.rst @@ -410,7 +410,7 @@ the :mod:`glob` module.) *start*. On Windows, :exc:`ValueError` is raised when *path* and *start* are on different drives. - *start* defaults to :attr:`os.curdir`. + *start* defaults to :data:`os.curdir`. .. availability:: Unix, Windows. diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 83abb5d5ca1e42..9735baa5bc0f3a 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -60,7 +60,7 @@ Notes on the availability of these functions: ``'java'``. .. seealso:: - :attr:`sys.platform` has a finer granularity. :func:`os.uname` gives + :data:`sys.platform` has a finer granularity. :func:`os.uname` gives system-dependent version information. The :mod:`platform` module provides detailed checks for the @@ -233,7 +233,7 @@ process and user. :data:`environ` and :data:`environb` are synchronized (modifying :data:`environb` updates :data:`environ`, and vice versa). - :data:`environb` is only available if :data:`supports_bytes_environ` is + :data:`environb` is only available if :const:`supports_bytes_environ` is ``True``. .. versionadded:: 3.2 @@ -331,7 +331,7 @@ process and user. future environment changes. - :func:`getenvb` is only available if :data:`supports_bytes_environ` + :func:`getenvb` is only available if :const:`supports_bytes_environ` is ``True``. .. availability:: Unix. @@ -401,11 +401,11 @@ process and user. On macOS, :func:`getgroups` behavior differs somewhat from other Unix platforms. If the Python interpreter was built with a - deployment target of :const:`10.5` or earlier, :func:`getgroups` returns + deployment target of ``10.5`` or earlier, :func:`getgroups` returns the list of effective group ids associated with the current user process; this list is limited to a system-defined number of entries, typically 16, and may be modified by calls to :func:`setgroups` if suitably privileged. - If built with a deployment target greater than :const:`10.5`, + If built with a deployment target greater than ``10.5``, :func:`getgroups` returns the current group access list for the user associated with the effective user id of the process; the group access list may change over the lifetime of the process, it is not affected by @@ -714,14 +714,14 @@ process and user. .. function:: getsid(pid, /) - Call the system call :c:func:`getsid`. See the Unix manual for the semantics. + Call the system call :c:func:`!getsid`. See the Unix manual for the semantics. .. availability:: Unix, not Emscripten, not WASI. .. function:: setsid() - Call the system call :c:func:`setsid`. See the Unix manual for the semantics. + Call the system call :c:func:`!setsid`. See the Unix manual for the semantics. .. availability:: Unix, not Emscripten, not WASI. @@ -739,7 +739,7 @@ process and user. .. function:: strerror(code, /) Return the error message corresponding to the error code in *code*. - On platforms where :c:func:`strerror` returns ``NULL`` when given an unknown + On platforms where :c:func:`!strerror` returns ``NULL`` when given an unknown error number, :exc:`ValueError` is raised. @@ -923,7 +923,7 @@ as internal buffering of data. In Linux kernel older than 5.3, the files pointed by *src* and *dst* must reside in the same filesystem, otherwise an :exc:`OSError` is - raised with :attr:`~OSError.errno` set to :data:`errno.EXDEV`. + raised with :attr:`~OSError.errno` set to :const:`errno.EXDEV`. This copy is done without the additional cost of transferring data from the kernel to user space and then back into the kernel. Additionally, @@ -1181,7 +1181,7 @@ as internal buffering of data. .. versionadded:: 3.3 Some operating systems could support additional values, like - :data:`os.SEEK_HOLE` or :data:`os.SEEK_DATA`. + :const:`os.SEEK_HOLE` or :const:`os.SEEK_DATA`. .. function:: open(path, flags, mode=0o777, *, dir_fd=None) @@ -1422,7 +1422,7 @@ or `the MSDN `_ on Windo If some data was successfully read, it will return the number of bytes read. If no bytes were read, it will return ``-1`` and set errno to - :data:`errno.EAGAIN`. + :const:`errno.EAGAIN`. .. availability:: Linux >= 4.14. @@ -1627,7 +1627,7 @@ or `the MSDN `_ on Windo *offset_dst*. The offset associated to the file descriptor that refers to a pipe must be ``None``. The files pointed by *src* and *dst* must reside in the same filesystem, otherwise an :exc:`OSError` is raised with - :attr:`~OSError.errno` set to :data:`errno.EXDEV`. + :attr:`~OSError.errno` set to :const:`errno.EXDEV`. This copy is done without the additional cost of transferring data from the kernel to user space and then back into the kernel. Additionally, @@ -1960,18 +1960,18 @@ features: Set the flags of *path* to the numeric *flags*. *flags* may take a combination (bitwise OR) of the following values (as defined in the :mod:`stat` module): - * :data:`stat.UF_NODUMP` - * :data:`stat.UF_IMMUTABLE` - * :data:`stat.UF_APPEND` - * :data:`stat.UF_OPAQUE` - * :data:`stat.UF_NOUNLINK` - * :data:`stat.UF_COMPRESSED` - * :data:`stat.UF_HIDDEN` - * :data:`stat.SF_ARCHIVED` - * :data:`stat.SF_IMMUTABLE` - * :data:`stat.SF_APPEND` - * :data:`stat.SF_NOUNLINK` - * :data:`stat.SF_SNAPSHOT` + * :const:`stat.UF_NODUMP` + * :const:`stat.UF_IMMUTABLE` + * :const:`stat.UF_APPEND` + * :const:`stat.UF_OPAQUE` + * :const:`stat.UF_NOUNLINK` + * :const:`stat.UF_COMPRESSED` + * :const:`stat.UF_HIDDEN` + * :const:`stat.SF_ARCHIVED` + * :const:`stat.SF_IMMUTABLE` + * :const:`stat.SF_APPEND` + * :const:`stat.SF_NOUNLINK` + * :const:`stat.SF_SNAPSHOT` This function can support :ref:`not following symlinks `. @@ -1992,25 +1992,25 @@ features: following values (as defined in the :mod:`stat` module) or bitwise ORed combinations of them: - * :data:`stat.S_ISUID` - * :data:`stat.S_ISGID` - * :data:`stat.S_ENFMT` - * :data:`stat.S_ISVTX` - * :data:`stat.S_IREAD` - * :data:`stat.S_IWRITE` - * :data:`stat.S_IEXEC` - * :data:`stat.S_IRWXU` - * :data:`stat.S_IRUSR` - * :data:`stat.S_IWUSR` - * :data:`stat.S_IXUSR` - * :data:`stat.S_IRWXG` - * :data:`stat.S_IRGRP` - * :data:`stat.S_IWGRP` - * :data:`stat.S_IXGRP` - * :data:`stat.S_IRWXO` - * :data:`stat.S_IROTH` - * :data:`stat.S_IWOTH` - * :data:`stat.S_IXOTH` + * :const:`stat.S_ISUID` + * :const:`stat.S_ISGID` + * :const:`stat.S_ENFMT` + * :const:`stat.S_ISVTX` + * :const:`stat.S_IREAD` + * :const:`stat.S_IWRITE` + * :const:`stat.S_IEXEC` + * :const:`stat.S_IRWXU` + * :const:`stat.S_IRUSR` + * :const:`stat.S_IWUSR` + * :const:`stat.S_IXUSR` + * :const:`stat.S_IRWXG` + * :const:`stat.S_IRGRP` + * :const:`stat.S_IWGRP` + * :const:`stat.S_IXGRP` + * :const:`stat.S_IRWXO` + * :const:`stat.S_IROTH` + * :const:`stat.S_IWOTH` + * :const:`stat.S_IXOTH` This function can support :ref:`specifying a file descriptor `, :ref:`paths relative to directory descriptors ` and :ref:`not @@ -2150,7 +2150,7 @@ features: .. audit-event:: os.link src,dst,src_dir_fd,dst_dir_fd os.link - .. availability:: Unix, Windows. + .. availability:: Unix, Windows, not Emscripten. .. versionchanged:: 3.2 Added Windows support. @@ -2420,13 +2420,13 @@ features: .. function:: major(device, /) Extract the device major number from a raw device number (usually the - :attr:`st_dev` or :attr:`st_rdev` field from :c:type:`stat`). + :attr:`st_dev` or :attr:`st_rdev` field from :c:struct:`stat`). .. function:: minor(device, /) Extract the device minor number from a raw device number (usually the - :attr:`st_dev` or :attr:`st_rdev` field from :c:type:`stat`). + :attr:`st_dev` or :attr:`st_rdev` field from :c:struct:`stat`). .. function:: makedev(major, minor, /) @@ -2937,7 +2937,7 @@ features: .. class:: stat_result Object whose attributes correspond roughly to the members of the - :c:type:`stat` structure. It is used for the result of :func:`os.stat`, + :c:struct:`stat` structure. It is used for the result of :func:`os.stat`, :func:`os.fstat` and :func:`os.lstat`. Attributes: @@ -3117,12 +3117,12 @@ features: See the ``IO_REPARSE_TAG_*`` constants in the :mod:`stat` module. The standard module :mod:`stat` defines functions and constants that are - useful for extracting information from a :c:type:`stat` structure. (On + useful for extracting information from a :c:struct:`stat` structure. (On Windows, some items are filled with dummy values.) For backward compatibility, a :class:`stat_result` instance is also accessible as a tuple of at least 10 integers giving the most important (and - portable) members of the :c:type:`stat` structure, in the order + portable) members of the :c:struct:`stat` structure, in the order :attr:`st_mode`, :attr:`st_ino`, :attr:`st_dev`, :attr:`st_nlink`, :attr:`st_uid`, :attr:`st_gid`, :attr:`st_size`, :attr:`st_atime`, :attr:`st_mtime`, :attr:`st_ctime`. More items may be added at the end by @@ -3174,7 +3174,7 @@ features: Perform a :c:func:`statvfs` system call on the given path. The return value is an object whose attributes describe the filesystem on the given path, and - correspond to the members of the :c:type:`statvfs` structure, namely: + correspond to the members of the :c:struct:`statvfs` structure, namely: :attr:`f_bsize`, :attr:`f_frsize`, :attr:`f_blocks`, :attr:`f_bfree`, :attr:`f_bavail`, :attr:`f_files`, :attr:`f_ffree`, :attr:`f_favail`, :attr:`f_flag`, :attr:`f_namemax`, :attr:`f_fsid`. @@ -4151,8 +4151,8 @@ written in Python, such as a mail server's external command delivery program. Send signal *sig* to the process *pid*. Constants for the specific signals available on the host platform are defined in the :mod:`signal` module. - Windows: The :data:`signal.CTRL_C_EVENT` and - :data:`signal.CTRL_BREAK_EVENT` signals are special signals which can + Windows: The :const:`signal.CTRL_C_EVENT` and + :const:`signal.CTRL_BREAK_EVENT` signals are special signals which can only be sent to console processes which share a common console window, e.g., some subprocesses. Any other value for *sig* will cause the process to be unconditionally killed by the TerminateProcess API, and the exit code @@ -4205,7 +4205,7 @@ written in Python, such as a mail server's external command delivery program. This flag indicates that the file descriptor will be non-blocking. If the process referred to by the file descriptor has not yet terminated, then an attempt to wait on the file descriptor using :manpage:`waitid(2)` - will immediately return the error :data:`~errno.EAGAIN` rather than blocking. + will immediately return the error :const:`~errno.EAGAIN` rather than blocking. .. availability:: Linux >= 5.10 .. versionadded:: 3.12 @@ -4308,7 +4308,7 @@ written in Python, such as a mail server's external command delivery program. specified. If the value specified is 0, the child's process group ID will be made the same as its process ID. If the value of *setpgroup* is not set, the child will inherit the parent's process group ID. This argument corresponds - to the C library :c:data:`POSIX_SPAWN_SETPGROUP` flag. + to the C library :c:macro:`POSIX_SPAWN_SETPGROUP` flag. If the *resetids* argument is ``True`` it will reset the effective UID and GID of the child to the real UID and GID of the parent process. If the @@ -4316,27 +4316,27 @@ written in Python, such as a mail server's external command delivery program. the parent. In either case, if the set-user-ID and set-group-ID permission bits are enabled on the executable file, their effect will override the setting of the effective UID and GID. This argument corresponds to the C - library :c:data:`POSIX_SPAWN_RESETIDS` flag. + library :c:macro:`POSIX_SPAWN_RESETIDS` flag. If the *setsid* argument is ``True``, it will create a new session ID - for ``posix_spawn``. *setsid* requires :c:data:`POSIX_SPAWN_SETSID` - or :c:data:`POSIX_SPAWN_SETSID_NP` flag. Otherwise, :exc:`NotImplementedError` + for ``posix_spawn``. *setsid* requires :c:macro:`POSIX_SPAWN_SETSID` + or :c:macro:`POSIX_SPAWN_SETSID_NP` flag. Otherwise, :exc:`NotImplementedError` is raised. The *setsigmask* argument will set the signal mask to the signal set specified. If the parameter is not used, then the child inherits the parent's signal mask. This argument corresponds to the C library - :c:data:`POSIX_SPAWN_SETSIGMASK` flag. + :c:macro:`POSIX_SPAWN_SETSIGMASK` flag. The *sigdef* argument will reset the disposition of all signals in the set specified. This argument corresponds to the C library - :c:data:`POSIX_SPAWN_SETSIGDEF` flag. + :c:macro:`POSIX_SPAWN_SETSIGDEF` flag. The *scheduler* argument must be a tuple containing the (optional) scheduler policy and an instance of :class:`sched_param` with the scheduler parameters. A value of ``None`` in the place of the scheduler policy indicates that is not being provided. This argument is a combination of the C library - :c:data:`POSIX_SPAWN_SETSCHEDPARAM` and :c:data:`POSIX_SPAWN_SETSCHEDULER` + :c:macro:`POSIX_SPAWN_SETSCHEDPARAM` and :c:macro:`POSIX_SPAWN_SETSCHEDULER` flags. .. audit-event:: os.posix_spawn path,argv,env os.posix_spawn @@ -4650,11 +4650,11 @@ written in Python, such as a mail server's external command delivery program. :data:`WNOHANG` and :data:`WNOWAIT` are additional optional flags. The return value is an object representing the data contained in the - :c:type:`!siginfo_t` structure with the following attributes: + :c:type:`siginfo_t` structure with the following attributes: * :attr:`!si_pid` (process ID) * :attr:`!si_uid` (real user ID of the child) - * :attr:`!si_signo` (always :data:`~signal.SIGCHLD`) + * :attr:`!si_signo` (always :const:`~signal.SIGCHLD`) * :attr:`!si_status` (the exit status or signal number, depending on :attr:`!si_code`) * :attr:`!si_code` (see :data:`CLD_EXITED` for possible values) @@ -4892,7 +4892,7 @@ used to determine the disposition of a process. .. function:: WIFCONTINUED(status) Return ``True`` if a stopped child has been resumed by delivery of - :data:`~signal.SIGCONT` (if the process has been continued from a job + :const:`~signal.SIGCONT` (if the process has been continued from a job control stop), otherwise return ``False``. See :data:`WCONTINUED` option. @@ -5264,7 +5264,7 @@ Random numbers ``/dev/urandom`` devices. The flags argument is a bit mask that can contain zero or more of the - following values ORed together: :py:data:`os.GRND_RANDOM` and + following values ORed together: :py:const:`os.GRND_RANDOM` and :py:data:`GRND_NONBLOCK`. See also the `Linux getrandom() manual page diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 01dabe286969bb..22360b22fd924b 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -976,6 +976,11 @@ call fails (for example because the path doesn't exist). .. versionchanged:: 3.13 The *follow_symlinks* parameter was added. + .. versionchanged:: 3.13 + Emits :exc:`FutureWarning` if the pattern ends with "``**``". In a + future Python release, patterns with this ending will match both files + and directories. Add a trailing slash to match only directories. + .. method:: Path.group() Return the name of the group owning the file. :exc:`KeyError` is raised diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst index 69c4dfc422c98e..ec2a7ebd5d6e0b 100644 --- a/Doc/library/platform.rst +++ b/Doc/library/platform.rst @@ -46,7 +46,7 @@ Cross Platform universal files containing multiple architectures. To get at the "64-bitness" of the current interpreter, it is more - reliable to query the :attr:`sys.maxsize` attribute:: + reliable to query the :data:`sys.maxsize` attribute:: is_64bits = sys.maxsize > 2**32 diff --git a/Doc/library/poplib.rst b/Doc/library/poplib.rst index 260c4a63d12031..943eb21f6eec02 100644 --- a/Doc/library/poplib.rst +++ b/Doc/library/poplib.rst @@ -77,7 +77,7 @@ The :mod:`poplib` module provides two classes: .. versionchanged:: 3.4 The class now supports hostname check with :attr:`ssl.SSLContext.check_hostname` and *Server Name Indication* (see - :data:`ssl.HAS_SNI`). + :const:`ssl.HAS_SNI`). .. versionchanged:: 3.9 If the *timeout* parameter is set to be zero, it will raise a @@ -148,7 +148,7 @@ A :class:`POP3` instance has the following methods: .. method:: POP3.pass_(password) Send password, response includes message count and mailbox size. Note: the - mailbox on the server is locked until :meth:`~poplib.quit` is called. + mailbox on the server is locked until :meth:`~POP3.quit` is called. .. method:: POP3.apop(user, secret) @@ -240,7 +240,7 @@ A :class:`POP3` instance has the following methods: This method supports hostname checking via :attr:`ssl.SSLContext.check_hostname` and *Server Name Indication* (see - :data:`ssl.HAS_SNI`). + :const:`ssl.HAS_SNI`). .. versionadded:: 3.4 diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst index d8269ef48cb36a..e883acd67d6c72 100644 --- a/Doc/library/pprint.rst +++ b/Doc/library/pprint.rst @@ -45,7 +45,7 @@ The :mod:`pprint` module defines one class: several keyword parameters. *stream* (default ``sys.stdout``) is a :term:`file-like object` to - which the output will be written by calling its :meth:`write` method. + which the output will be written by calling its :meth:`!write` method. If both *stream* and ``sys.stdout`` are ``None``, then :meth:`~PrettyPrinter.pprint` silently returns. diff --git a/Doc/library/pty.rst b/Doc/library/pty.rst index 7f4da41e93802d..ad4981c97119fa 100644 --- a/Doc/library/pty.rst +++ b/Doc/library/pty.rst @@ -71,7 +71,7 @@ The :mod:`pty` module defines the following functions: Return the exit status value from :func:`os.waitpid` on the child process. - :func:`waitstatus_to_exitcode` can be used to convert the exit status into + :func:`os.waitstatus_to_exitcode` can be used to convert the exit status into an exit code. .. audit-event:: pty.spawn argv pty.spawn diff --git a/Doc/library/re.rst b/Doc/library/re.rst index b7510b93d75427..3f03f0341d8166 100644 --- a/Doc/library/re.rst +++ b/Doc/library/re.rst @@ -501,6 +501,8 @@ The special characters are: in the ASCII range (``b'\x00'``-``b'\x7f'``). +.. _re-special-sequences: + The special sequences consist of ``'\'`` and a character from the list below. If the ordinary character is not an ASCII digit or an ASCII letter, then the resulting RE will match the second character. For example, ``\$`` matches the @@ -632,8 +634,8 @@ character ``'$'``. single: \x; in regular expressions single: \\; in regular expressions -Most of the standard escapes supported by Python string literals are also -accepted by the regular expression parser:: +Most of the :ref:`escape sequences ` supported by Python +string literals are also accepted by the regular expression parser:: \a \b \f \n \N \r \t \u @@ -779,6 +781,17 @@ Flags Corresponds to the inline flag ``(?s)``. +.. data:: U + UNICODE + + In Python 2, this flag made :ref:`special sequences ` + include Unicode characters in matches. Since Python 3, Unicode characters + are matched by default. + + See :const:`A` for restricting matching on ASCII characters instead. + + This flag is only kept for backward compatibility. + .. data:: X VERBOSE @@ -1520,14 +1533,14 @@ Simulating scanf() .. index:: single: scanf() -Python does not currently have an equivalent to :c:func:`scanf`. Regular +Python does not currently have an equivalent to :c:func:`!scanf`. Regular expressions are generally more powerful, though also more verbose, than -:c:func:`scanf` format strings. The table below offers some more-or-less -equivalent mappings between :c:func:`scanf` format tokens and regular +:c:func:`!scanf` format strings. The table below offers some more-or-less +equivalent mappings between :c:func:`!scanf` format tokens and regular expressions. +--------------------------------+---------------------------------------------+ -| :c:func:`scanf` Token | Regular Expression | +| :c:func:`!scanf` Token | Regular Expression | +================================+=============================================+ | ``%c`` | ``.`` | +--------------------------------+---------------------------------------------+ @@ -1552,7 +1565,7 @@ To extract the filename and numbers from a string like :: /usr/sbin/sendmail - 0 errors, 4 warnings -you would use a :c:func:`scanf` format like :: +you would use a :c:func:`!scanf` format like :: %s - %d errors, %d warnings diff --git a/Doc/library/runpy.rst b/Doc/library/runpy.rst index 42ed8c253b8027..406b080b7be30f 100644 --- a/Doc/library/runpy.rst +++ b/Doc/library/runpy.rst @@ -39,7 +39,7 @@ The :mod:`runpy` module provides two functions: The *mod_name* argument should be an absolute module name. If the module name refers to a package rather than a normal - module, then that package is imported and the ``__main__`` submodule within + module, then that package is imported and the :mod:`__main__` submodule within that package is then executed and the resulting module globals dictionary returned. @@ -74,7 +74,7 @@ The :mod:`runpy` module provides two functions: Note that this manipulation of :mod:`sys` is not thread-safe. Other threads may see the partially initialised module, as well as the altered list of - arguments. It is recommended that the :mod:`sys` module be left alone when + arguments. It is recommended that the ``sys`` module be left alone when invoking this function from threaded code. .. seealso:: @@ -82,7 +82,7 @@ The :mod:`runpy` module provides two functions: command line. .. versionchanged:: 3.1 - Added ability to execute packages by looking for a ``__main__`` submodule. + Added ability to execute packages by looking for a :mod:`__main__` submodule. .. versionchanged:: 3.2 Added ``__cached__`` global variable (see :pep:`3147`). @@ -106,15 +106,16 @@ The :mod:`runpy` module provides two functions: Execute the code at the named filesystem location and return the resulting module globals dictionary. As with a script name supplied to the CPython command line, the supplied path may refer to a Python source file, a - compiled bytecode file or a valid sys.path entry containing a ``__main__`` - module (e.g. a zipfile containing a top-level ``__main__.py`` file). + compiled bytecode file or a valid :data:`sys.path` entry containing a + :mod:`__main__` module + (e.g. a zipfile containing a top-level ``__main__.py`` file). For a simple script, the specified code is simply executed in a fresh - module namespace. For a valid sys.path entry (typically a zipfile or + module namespace. For a valid :data:`sys.path` entry (typically a zipfile or directory), the entry is first added to the beginning of ``sys.path``. The function then looks for and executes a :mod:`__main__` module using the updated path. Note that there is no special protection against invoking - an existing :mod:`__main__` entry located elsewhere on ``sys.path`` if + an existing ``__main__`` entry located elsewhere on ``sys.path`` if there is no such module at the specified location. The optional dictionary argument *init_globals* may be used to pre-populate @@ -137,14 +138,14 @@ The :mod:`runpy` module provides two functions: supplied path, and ``__spec__``, ``__cached__``, ``__loader__`` and ``__package__`` will all be set to :const:`None`. - If the supplied path is a reference to a valid sys.path entry, then - ``__spec__`` will be set appropriately for the imported ``__main__`` + If the supplied path is a reference to a valid :data:`sys.path` entry, then + ``__spec__`` will be set appropriately for the imported :mod:`__main__` module (that is, ``__spec__.name`` will always be ``__main__``). ``__file__``, ``__cached__``, ``__loader__`` and ``__package__`` will be :ref:`set as normal ` based on the module spec. A number of alterations are also made to the :mod:`sys` module. Firstly, - ``sys.path`` may be altered as described above. ``sys.argv[0]`` is updated + :data:`sys.path` may be altered as described above. ``sys.argv[0]`` is updated with the value of ``path_name`` and ``sys.modules[__name__]`` is updated with a temporary module object for the module being executed. All modifications to items in :mod:`sys` are reverted before the function @@ -152,7 +153,7 @@ The :mod:`runpy` module provides two functions: Note that, unlike :func:`run_module`, the alterations made to :mod:`sys` are not optional in this function as these adjustments are essential to - allowing the execution of sys.path entries. As the thread-safety + allowing the execution of :data:`sys.path` entries. As the thread-safety limitations still apply, use of this function in threaded code should be either serialised with the import lock or delegated to a separate process. @@ -165,7 +166,7 @@ The :mod:`runpy` module provides two functions: .. versionchanged:: 3.4 Updated to take advantage of the module spec feature added by :pep:`451`. This allows ``__cached__`` to be set correctly in the - case where ``__main__`` is imported from a valid sys.path entry rather + case where ``__main__`` is imported from a valid :data:`sys.path` entry rather than being executed directly. .. versionchanged:: 3.12 diff --git a/Doc/library/sched.rst b/Doc/library/sched.rst index 04215d31ba10ca..01bac5afd0b9b3 100644 --- a/Doc/library/sched.rst +++ b/Doc/library/sched.rst @@ -115,7 +115,7 @@ Scheduler Objects .. method:: scheduler.run(blocking=True) - Run all scheduled events. This method will wait (using the :func:`delayfunc` + Run all scheduled events. This method will wait (using the *delayfunc* function passed to the constructor) for the next event, then execute it and so on until there are no more scheduled events. diff --git a/Doc/library/select.rst b/Doc/library/select.rst index b0891b0c8f584a..c2941e628d9d78 100644 --- a/Doc/library/select.rst +++ b/Doc/library/select.rst @@ -6,10 +6,10 @@ -------------- -This module provides access to the :c:func:`select` and :c:func:`poll` functions -available in most operating systems, :c:func:`devpoll` available on -Solaris and derivatives, :c:func:`epoll` available on Linux 2.5+ and -:c:func:`kqueue` available on most BSD. +This module provides access to the :c:func:`!select` and :c:func:`!poll` functions +available in most operating systems, :c:func:`!devpoll` available on +Solaris and derivatives, :c:func:`!epoll` available on Linux 2.5+ and +:c:func:`!kqueue` available on most BSD. Note that on Windows, it only works for sockets; on other operating systems, it also works for other file types (in particular, on Unix, it works on pipes). It cannot be used on regular files to determine whether a file has grown since @@ -41,10 +41,10 @@ The module defines the following: polling object; see section :ref:`devpoll-objects` below for the methods supported by devpoll objects. - :c:func:`devpoll` objects are linked to the number of file + :c:func:`!devpoll` objects are linked to the number of file descriptors allowed at the time of instantiation. If your program - reduces this value, :c:func:`devpoll` will fail. If your program - increases this value, :c:func:`devpoll` may return an + reduces this value, :c:func:`!devpoll` will fail. If your program + increases this value, :c:func:`!devpoll` may return an incomplete list of active file descriptors. The new file descriptor is :ref:`non-inheritable `. @@ -62,7 +62,7 @@ The module defines the following: *sizehint* informs epoll about the expected number of events to be registered. It must be positive, or ``-1`` to use the default. It is only - used on older systems where :c:func:`epoll_create1` is not available; + used on older systems where :c:func:`!epoll_create1` is not available; otherwise it has no effect (though its value is still checked). *flags* is deprecated and completely ignored. However, when supplied, its @@ -117,7 +117,7 @@ The module defines the following: .. function:: select(rlist, wlist, xlist[, timeout]) - This is a straightforward interface to the Unix :c:func:`select` system call. + This is a straightforward interface to the Unix :c:func:`!select` system call. The first three arguments are iterables of 'waitable objects': either integers representing file descriptors or objects with a parameterless method named :meth:`~io.IOBase.fileno` returning such an integer: @@ -154,7 +154,7 @@ The module defines the following: .. index:: single: WinSock File objects on Windows are not acceptable, but sockets are. On Windows, - the underlying :c:func:`select` function is provided by the WinSock + the underlying :c:func:`!select` function is provided by the WinSock library, and does not handle file descriptors that don't originate from WinSock. @@ -169,7 +169,7 @@ The module defines the following: The minimum number of bytes which can be written without blocking to a pipe when the pipe has been reported as ready for writing by :func:`~select.select`, - :func:`poll` or another interface in this module. This doesn't apply + :func:`!poll` or another interface in this module. This doesn't apply to other kind of file-like objects such as sockets. This value is guaranteed by POSIX to be at least 512. @@ -184,11 +184,11 @@ The module defines the following: ``/dev/poll`` Polling Objects ----------------------------- -Solaris and derivatives have ``/dev/poll``. While :c:func:`select` is -O(highest file descriptor) and :c:func:`poll` is O(number of file +Solaris and derivatives have ``/dev/poll``. While :c:func:`!select` is +O(highest file descriptor) and :c:func:`!poll` is O(number of file descriptors), ``/dev/poll`` is O(active file descriptors). -``/dev/poll`` behaviour is very close to the standard :c:func:`poll` +``/dev/poll`` behaviour is very close to the standard :c:func:`!poll` object. @@ -222,7 +222,7 @@ object. implement :meth:`!fileno`, so they can also be used as the argument. *eventmask* is an optional bitmask describing the type of events you want to - check for. The constants are the same that with :c:func:`poll` + check for. The constants are the same that with :c:func:`!poll` object. The default value is a combination of the constants :const:`POLLIN`, :const:`POLLPRI`, and :const:`POLLOUT`. @@ -231,7 +231,7 @@ object. Registering a file descriptor that's already registered is not an error, but the result is undefined. The appropriate action is to unregister or modify it first. This is an important difference - compared with :c:func:`poll`. + compared with :c:func:`!poll`. .. method:: devpoll.modify(fd[, eventmask]) @@ -376,13 +376,13 @@ Edge and Level Trigger Polling (epoll) Objects Polling Objects --------------- -The :c:func:`poll` system call, supported on most Unix systems, provides better +The :c:func:`!poll` system call, supported on most Unix systems, provides better scalability for network servers that service many, many clients at the same -time. :c:func:`poll` scales better because the system call only requires listing -the file descriptors of interest, while :c:func:`select` builds a bitmap, turns +time. :c:func:`!poll` scales better because the system call only requires listing +the file descriptors of interest, while :c:func:`!select` builds a bitmap, turns on bits for the fds of interest, and then afterward the whole bitmap has to be -linearly scanned again. :c:func:`select` is O(highest file descriptor), while -:c:func:`poll` is O(number of file descriptors). +linearly scanned again. :c:func:`!select` is O(highest file descriptor), while +:c:func:`!poll` is O(number of file descriptors). .. method:: poll.register(fd[, eventmask]) diff --git a/Doc/library/selectors.rst b/Doc/library/selectors.rst index 0deb15cf4c5037..dd50bac37e49b8 100644 --- a/Doc/library/selectors.rst +++ b/Doc/library/selectors.rst @@ -60,9 +60,9 @@ constants below: +-----------------------+-----------------------------------------------+ | Constant | Meaning | +=======================+===============================================+ - | :const:`EVENT_READ` | Available for read | + | .. data:: EVENT_READ | Available for read | +-----------------------+-----------------------------------------------+ - | :const:`EVENT_WRITE` | Available for write | + | .. data:: EVENT_WRITE | Available for write | +-----------------------+-----------------------------------------------+ @@ -132,8 +132,8 @@ constants below: Change a registered file object's monitored events or attached data. - This is equivalent to :meth:`BaseSelector.unregister(fileobj)` followed - by :meth:`BaseSelector.register(fileobj, events, data)`, except that it + This is equivalent to ``BaseSelector.unregister(fileobj)`` followed + by ``BaseSelector.register(fileobj, events, data)``, except that it can be implemented more efficiently. This returns a new :class:`SelectorKey` instance, or raises a diff --git a/Doc/library/shelve.rst b/Doc/library/shelve.rst index dc87af398ed757..01314f491f47a7 100644 --- a/Doc/library/shelve.rst +++ b/Doc/library/shelve.rst @@ -25,7 +25,7 @@ lots of shared sub-objects. The keys are ordinary strings. database file is opened for reading and writing. The optional *flag* parameter has the same interpretation as the *flag* parameter of :func:`dbm.open`. - By default, pickles created with :data:`pickle.DEFAULT_PROTOCOL` are used + By default, pickles created with :const:`pickle.DEFAULT_PROTOCOL` are used to serialize values. The version of the pickle protocol can be specified with the *protocol* parameter. @@ -42,7 +42,7 @@ lots of shared sub-objects. The keys are ordinary strings. mutated). .. versionchanged:: 3.10 - :data:`pickle.DEFAULT_PROTOCOL` is now used as the default pickle + :const:`pickle.DEFAULT_PROTOCOL` is now used as the default pickle protocol. .. versionchanged:: 3.11 @@ -119,7 +119,7 @@ Restrictions A subclass of :class:`collections.abc.MutableMapping` which stores pickled values in the *dict* object. - By default, pickles created with :data:`pickle.DEFAULT_PROTOCOL` are used + By default, pickles created with :const:`pickle.DEFAULT_PROTOCOL` are used to serialize values. The version of the pickle protocol can be specified with the *protocol* parameter. See the :mod:`pickle` documentation for a discussion of the pickle protocols. @@ -143,7 +143,7 @@ Restrictions Added context manager support. .. versionchanged:: 3.10 - :data:`pickle.DEFAULT_PROTOCOL` is now used as the default pickle + :const:`pickle.DEFAULT_PROTOCOL` is now used as the default pickle protocol. diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index 7f408be2336824..4390a8e22306fa 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -369,7 +369,7 @@ Directory and files operations If *copy_function* is given, it must be a callable that takes two arguments *src* and *dst*, and will be used to copy *src* to *dst* if :func:`os.rename` cannot be used. If the source is a directory, - :func:`copytree` is called, passing it the :func:`copy_function`. The + :func:`copytree` is called, passing it the *copy_function*. The default *copy_function* is :func:`copy2`. Using :func:`~shutil.copy` as the *copy_function* allows the move to succeed when it is not possible to also copy the metadata, at the expense of not copying any of the metadata. @@ -399,6 +399,12 @@ Directory and files operations total, used and free space, in bytes. *path* may be a file or a directory. + .. note:: + + On Unix filesystems, *path* must point to a path within a **mounted** + filesystem partition. On those platforms, CPython doesn't attempt to + retrieve disk usage information from non-mounted filesystems. + .. versionadded:: 3.3 .. versionchanged:: 3.8 @@ -431,7 +437,7 @@ Directory and files operations determining if the file exists and executable. When no *path* is specified, the results of :func:`os.environ` are used, - returning either the "PATH" value or a fallback of :attr:`os.defpath`. + returning either the "PATH" value or a fallback of :data:`os.defpath`. On Windows, the current directory is prepended to the *path* if *mode* does not include ``os.X_OK``. When the *mode* does include ``os.X_OK``, the diff --git a/Doc/library/signal.rst b/Doc/library/signal.rst index 523d1ac5001360..7ee5ece8859825 100644 --- a/Doc/library/signal.rst +++ b/Doc/library/signal.rst @@ -562,7 +562,7 @@ The :mod:`signal` module defines the following functions: Note that installing a signal handler with :func:`signal` will reset the restart behaviour to interruptible by implicitly calling - :c:func:`siginterrupt` with a true *flag* value for the given signal. + :c:func:`!siginterrupt` with a true *flag* value for the given signal. .. function:: signal(signalnum, handler) @@ -656,7 +656,7 @@ The :mod:`signal` module defines the following functions: .. function:: sigtimedwait(sigset, timeout) Like :func:`sigwaitinfo`, but takes an additional *timeout* argument - specifying a timeout. If *timeout* is specified as :const:`0`, a poll is + specifying a timeout. If *timeout* is specified as ``0``, a poll is performed. Returns :const:`None` if a timeout occurs. .. availability:: Unix. diff --git a/Doc/library/smtplib.rst b/Doc/library/smtplib.rst index f90274feb6bf9a..aaec2aa1ef1dbe 100644 --- a/Doc/library/smtplib.rst +++ b/Doc/library/smtplib.rst @@ -98,7 +98,7 @@ Protocol) and :rfc:`1869` (SMTP Service Extensions). .. versionchanged:: 3.4 The class now supports hostname check with :attr:`ssl.SSLContext.check_hostname` and *Server Name Indication* (see - :data:`ssl.HAS_SNI`). + :const:`ssl.HAS_SNI`). .. versionchanged:: 3.9 If the *timeout* parameter is set to be zero, it will raise a @@ -418,7 +418,7 @@ An :class:`SMTP` instance has the following methods: .. versionchanged:: 3.4 The method now supports hostname check with :attr:`SSLContext.check_hostname` and *Server Name Indicator* (see - :data:`~ssl.HAS_SNI`). + :const:`~ssl.HAS_SNI`). .. versionchanged:: 3.5 The error raised for lack of STARTTLS support is now the diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index f2408cb95ff314..4f220e8a098979 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -2252,7 +2252,7 @@ This is because the previous execution has left the socket in a ``TIME_WAIT`` state, and can't be immediately reused. There is a :mod:`socket` flag to set, in order to prevent this, -:data:`socket.SO_REUSEADDR`:: +:const:`socket.SO_REUSEADDR`:: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst index 356888d64b8876..50344058c26041 100644 --- a/Doc/library/sqlite3.rst +++ b/Doc/library/sqlite3.rst @@ -299,7 +299,7 @@ Module functions Can be ``"DEFERRED"`` (default), ``"EXCLUSIVE"`` or ``"IMMEDIATE"``; or ``None`` to disable opening transactions implicitly. Has no effect unless :attr:`Connection.autocommit` is set to - :data:`~sqlite3.LEGACY_TRANSACTION_CONTROL` (the default). + :const:`~sqlite3.LEGACY_TRANSACTION_CONTROL` (the default). :type isolation_level: str | None :param bool check_same_thread: @@ -334,7 +334,7 @@ Module functions See :attr:`Connection.autocommit` and :ref:`sqlite3-transaction-control-autocommit` for more information. *autocommit* currently defaults to - :data:`~sqlite3.LEGACY_TRANSACTION_CONTROL`. + :const:`~sqlite3.LEGACY_TRANSACTION_CONTROL`. The default will change to ``False`` in a future Python release. :type autocommit: bool @@ -1818,9 +1818,9 @@ Blob objects .. method:: seek(offset, origin=os.SEEK_SET, /) Set the current access position of the blob to *offset*. The *origin* - argument defaults to :data:`os.SEEK_SET` (absolute blob positioning). - Other values for *origin* are :data:`os.SEEK_CUR` (seek relative to the - current position) and :data:`os.SEEK_END` (seek relative to the blob’s + argument defaults to :const:`os.SEEK_SET` (absolute blob positioning). + Other values for *origin* are :const:`os.SEEK_CUR` (seek relative to the + current position) and :const:`os.SEEK_END` (seek relative to the blob’s end). diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index 18a6c5ab4858a4..5d6bc829d68878 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -139,7 +139,7 @@ purposes. The settings are: :data:`PROTOCOL_TLS_CLIENT` or :data:`PROTOCOL_TLS_SERVER`, :data:`OP_NO_SSLv2`, and :data:`OP_NO_SSLv3` with high encryption cipher suites without RC4 and - without unauthenticated cipher suites. Passing :data:`~Purpose.SERVER_AUTH` + without unauthenticated cipher suites. Passing :const:`~Purpose.SERVER_AUTH` as *purpose* sets :data:`~SSLContext.verify_mode` to :data:`CERT_REQUIRED` and either loads CA certificates (when at least one of *cafile*, *capath* or *cadata* is given) or uses :meth:`SSLContext.load_default_certs` to load @@ -320,7 +320,7 @@ Random generation Mix the given *bytes* into the SSL pseudo-random number generator. The parameter *entropy* (a float) is a lower bound on the entropy contained in - string (so you can always use :const:`0.0`). See :rfc:`1750` for more + string (so you can always use ``0.0``). See :rfc:`1750` for more information on sources of entropy. .. versionchanged:: 3.5 @@ -1484,9 +1484,9 @@ to speed up repeated connections from the same clients. load CA certificates from other locations, too. The *purpose* flag specifies what kind of CA certificates are loaded. The - default settings :data:`Purpose.SERVER_AUTH` loads certificates, that are + default settings :const:`Purpose.SERVER_AUTH` loads certificates, that are flagged and trusted for TLS web server authentication (client side - sockets). :data:`Purpose.CLIENT_AUTH` loads CA certificates for client + sockets). :const:`Purpose.CLIENT_AUTH` loads CA certificates for client certificate verification on the server side. .. versionadded:: 3.4 @@ -1729,7 +1729,7 @@ to speed up repeated connections from the same clients. Wrap an existing Python socket *sock* and return an instance of :attr:`SSLContext.sslsocket_class` (default :class:`SSLSocket`). The returned SSL socket is tied to the context, its settings and certificates. - *sock* must be a :data:`~socket.SOCK_STREAM` socket; other + *sock* must be a :const:`~socket.SOCK_STREAM` socket; other socket types are unsupported. The parameter ``server_side`` is a boolean which identifies whether @@ -2592,7 +2592,7 @@ disabled by default. >>> client_context.maximum_version = ssl.TLSVersion.TLSv1_3 -The SSL context created above will only allow TLSv1.2 and later (if +The SSL context created above will only allow TLSv1.3 and later (if supported by your system) connections to a server. :const:`PROTOCOL_TLS_CLIENT` implies certificate validation and hostname checks by default. You have to load certificates into the context. diff --git a/Doc/library/stat.rst b/Doc/library/stat.rst index 083dc5e3bcfd68..77538514598a50 100644 --- a/Doc/library/stat.rst +++ b/Doc/library/stat.rst @@ -13,8 +13,8 @@ The :mod:`stat` module defines constants and functions for interpreting the results of :func:`os.stat`, :func:`os.fstat` and :func:`os.lstat` (if they -exist). For complete details about the :c:func:`stat`, :c:func:`fstat` and -:c:func:`lstat` calls, consult the documentation for your system. +exist). For complete details about the :c:func:`stat`, :c:func:`!fstat` and +:c:func:`!lstat` calls, consult the documentation for your system. .. versionchanged:: 3.4 The stat module is backed by a C implementation. @@ -89,9 +89,9 @@ mode: .. function:: S_IFMT(mode) Return the portion of the file's mode that describes the file type (used by the - :func:`S_IS\*` functions above). + :func:`!S_IS\*` functions above). -Normally, you would use the :func:`os.path.is\*` functions for testing the type +Normally, you would use the :func:`!os.path.is\*` functions for testing the type of a file; the functions here are useful when you are doing multiple tests of the same file and wish to avoid the overhead of the :c:func:`stat` system call for each test. These are also useful when checking for information about a file diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index fd51b1187576b1..6c07ee585480e9 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -44,7 +44,7 @@ Any object can be tested for truth value, for use in an :keyword:`if` or .. index:: single: true By default, an object is considered true unless its class defines either a -:meth:`__bool__` method that returns ``False`` or a :meth:`__len__` method that +:meth:`~object.__bool__` method that returns ``False`` or a :meth:`__len__` method that returns zero, when called with the object. [1]_ Here are most of the built-in objects considered false: @@ -5632,7 +5632,7 @@ From code, you can inspect the current limit and set a new one using these a getter and setter for the interpreter-wide limit. Subinterpreters have their own limit. -Information about the default and minimum can be found in :attr:`sys.int_info`: +Information about the default and minimum can be found in :data:`sys.int_info`: * :data:`sys.int_info.default_max_str_digits ` is the compiled-in default limit. diff --git a/Doc/library/struct.rst b/Doc/library/struct.rst index 6d2739b4557fbf..c94dfde4d55763 100644 --- a/Doc/library/struct.rst +++ b/Doc/library/struct.rst @@ -231,9 +231,9 @@ platform-dependent. | ``Q`` | :c:expr:`unsigned long | integer | 8 | \(2) | | | long` | | | | +--------+--------------------------+--------------------+----------------+------------+ -| ``n`` | :c:expr:`ssize_t` | integer | | \(3) | +| ``n`` | :c:type:`ssize_t` | integer | | \(3) | +--------+--------------------------+--------------------+----------------+------------+ -| ``N`` | :c:expr:`size_t` | integer | | \(3) | +| ``N`` | :c:type:`size_t` | integer | | \(3) | +--------+--------------------------+--------------------+----------------+------------+ | ``e`` | \(6) | float | 2 | \(4) | +--------+--------------------------+--------------------+----------------+------------+ diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index 738e611c05adbf..04340cca9e4a59 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -465,9 +465,9 @@ functions. :func:`open` function when creating the stdin/stdout/stderr pipe file objects: - - :const:`0` means unbuffered (read and write are one + - ``0`` means unbuffered (read and write are one system call and can return short) - - :const:`1` means line buffered + - ``1`` means line buffered (only usable if ``text=True`` or ``universal_newlines=True``) - any other positive value means use a buffer of approximately that size @@ -477,7 +477,7 @@ functions. .. versionchanged:: 3.3.1 *bufsize* now defaults to -1 to enable buffering by default to match the behavior that most code expects. In versions prior to Python 3.2.4 and - 3.3.1 it incorrectly defaulted to :const:`0` which was unbuffered + 3.3.1 it incorrectly defaulted to ``0`` which was unbuffered and allowed short reads. This was unintentional and did not match the behavior of Python 2 as most code expected. @@ -541,8 +541,8 @@ functions. :exc:`RuntimeError`. The new restriction may affect applications that are deployed in mod_wsgi, uWSGI, and other embedded environments. - If *close_fds* is true, all file descriptors except :const:`0`, :const:`1` and - :const:`2` will be closed before the child process is executed. Otherwise + If *close_fds* is true, all file descriptors except ``0``, ``1`` and + ``2`` will be closed before the child process is executed. Otherwise when *close_fds* is false, file descriptors obey their inheritable flag as described in :ref:`fd_inheritance`. @@ -1610,7 +1610,7 @@ improves performance. If you ever encounter a presumed highly unusual situation where you need to prevent ``vfork()`` from being used by Python, you can set the -:attr:`subprocess._USE_VFORK` attribute to a false value. +:const:`subprocess._USE_VFORK` attribute to a false value. :: @@ -1618,7 +1618,7 @@ prevent ``vfork()`` from being used by Python, you can set the Setting this has no impact on use of ``posix_spawn()`` which could use ``vfork()`` internally within its libc implementation. There is a similar -:attr:`subprocess._USE_POSIX_SPAWN` attribute if you need to prevent use of +:const:`subprocess._USE_POSIX_SPAWN` attribute if you need to prevent use of that. :: diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index bacf8ceac5041e..33391d11ab392d 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -166,7 +166,7 @@ always available. Python interpreter. (This information is not available in any other way --- ``modules.keys()`` only lists the imported modules.) - See also the :attr:`sys.stdlib_module_names` list. + See also the :data:`sys.stdlib_module_names` list. .. function:: call_tracing(func, args) @@ -515,27 +515,28 @@ always available. The :term:`named tuple` *flags* exposes the status of command line flags. The attributes are read only. - ============================= ============================================================================================================== - attribute flag - ============================= ============================================================================================================== - :const:`debug` :option:`-d` - :const:`inspect` :option:`-i` - :const:`interactive` :option:`-i` - :const:`isolated` :option:`-I` - :const:`optimize` :option:`-O` or :option:`-OO` - :const:`dont_write_bytecode` :option:`-B` - :const:`no_user_site` :option:`-s` - :const:`no_site` :option:`-S` - :const:`ignore_environment` :option:`-E` - :const:`verbose` :option:`-v` - :const:`bytes_warning` :option:`-b` - :const:`quiet` :option:`-q` - :const:`hash_randomization` :option:`-R` - :const:`dev_mode` :option:`-X dev <-X>` (:ref:`Python Development Mode `) - :const:`utf8_mode` :option:`-X utf8 <-X>` - :const:`safe_path` :option:`-P` - :const:`int_max_str_digits` :option:`-X int_max_str_digits <-X>` (:ref:`integer string conversion length limitation `) - ============================= ============================================================================================================== + ============================== ============================================================================================================== + attribute flag + ============================== ============================================================================================================== + :const:`debug` :option:`-d` + :const:`inspect` :option:`-i` + :const:`interactive` :option:`-i` + :const:`isolated` :option:`-I` + :const:`optimize` :option:`-O` or :option:`-OO` + :const:`dont_write_bytecode` :option:`-B` + :const:`no_user_site` :option:`-s` + :const:`no_site` :option:`-S` + :const:`ignore_environment` :option:`-E` + :const:`verbose` :option:`-v` + :const:`bytes_warning` :option:`-b` + :const:`quiet` :option:`-q` + :const:`hash_randomization` :option:`-R` + :const:`dev_mode` :option:`-X dev <-X>` (:ref:`Python Development Mode `) + :const:`utf8_mode` :option:`-X utf8 <-X>` + :const:`safe_path` :option:`-P` + :const:`int_max_str_digits` :option:`-X int_max_str_digits <-X>` (:ref:`integer string conversion length limitation `) + :const:`warn_default_encoding` :option:`-X warn_default_encoding <-X>` + ============================== ============================================================================================================== .. versionchanged:: 3.2 Added ``quiet`` attribute for the new :option:`-q` flag. @@ -554,6 +555,9 @@ always available. Mode ` and the ``utf8_mode`` attribute for the new :option:`-X` ``utf8`` flag. + .. versionchanged:: 3.10 + Added ``warn_default_encoding`` attribute for :option:`-X` ``warn_default_encoding`` flag. + .. versionchanged:: 3.11 Added the ``safe_path`` attribute for :option:`-P` option. @@ -697,7 +701,7 @@ always available. Return the current value of the flags that are used for :c:func:`dlopen` calls. Symbolic names for the flag values can be found in the :mod:`os` module (``RTLD_xxx`` constants, e.g. - :data:`os.RTLD_LAZY`). + :const:`os.RTLD_LAZY`). .. availability:: Unix. @@ -766,6 +770,15 @@ always available. higher than you might expect, because it includes the (temporary) reference as an argument to :func:`getrefcount`. + Note that the returned value may not actually reflect how many + references to the object are actually held. For example, some + objects are "immortal" and have a very high refcount that does not + reflect the actual number of references. Consequently, do not rely + on the returned value to be accurate, other than a value of 0 or 1. + + .. versionchanged:: 3.12 + Immortal objects have very large refcounts that do not match + the actual number of references to the object. .. function:: getrecursionlimit() @@ -873,19 +886,19 @@ always available. ``sys.getwindowsversion().major``. For compatibility with prior versions, only the first 5 elements are retrievable by indexing. - *platform* will be :const:`2 (VER_PLATFORM_WIN32_NT)`. + *platform* will be ``2`` (VER_PLATFORM_WIN32_NT). *product_type* may be one of the following values: +---------------------------------------+---------------------------------+ | Constant | Meaning | +=======================================+=================================+ - | :const:`1 (VER_NT_WORKSTATION)` | The system is a workstation. | + | ``1`` (VER_NT_WORKSTATION) | The system is a workstation. | +---------------------------------------+---------------------------------+ - | :const:`2 (VER_NT_DOMAIN_CONTROLLER)` | The system is a domain | + | ``2`` (VER_NT_DOMAIN_CONTROLLER) | The system is a domain | | | controller. | +---------------------------------------+---------------------------------+ - | :const:`3 (VER_NT_SERVER)` | The system is a server, but not | + | ``3`` (VER_NT_SERVER) | The system is a server, but not | | | a domain controller. | +---------------------------------------+---------------------------------+ @@ -1287,20 +1300,20 @@ always available. ================ =========================== .. versionchanged:: 3.3 - On Linux, :attr:`sys.platform` doesn't contain the major version anymore. + On Linux, :data:`sys.platform` doesn't contain the major version anymore. It is always ``'linux'``, instead of ``'linux2'`` or ``'linux3'``. Since older Python versions include the version number, it is recommended to always use the ``startswith`` idiom presented above. .. versionchanged:: 3.8 - On AIX, :attr:`sys.platform` doesn't contain the major version anymore. + On AIX, :data:`sys.platform` doesn't contain the major version anymore. It is always ``'aix'``, instead of ``'aix5'`` or ``'aix7'``. Since older Python versions include the version number, it is recommended to always use the ``startswith`` idiom presented above. .. seealso:: - :attr:`os.name` has a coarser granularity. :func:`os.uname` gives + :data:`os.name` has a coarser granularity. :func:`os.uname` gives system-dependent version information. The :mod:`platform` module provides detailed checks for the @@ -1368,7 +1381,7 @@ always available. ``sys.setdlopenflags(0)``. To share symbols across extension modules, call as ``sys.setdlopenflags(os.RTLD_GLOBAL)``. Symbolic names for the flag values can be found in the :mod:`os` module (``RTLD_xxx`` constants, e.g. - :data:`os.RTLD_LAZY`). + :const:`os.RTLD_LAZY`). .. availability:: Unix. @@ -1743,7 +1756,7 @@ always available. ``email.mime`` sub-package and the ``email.message`` sub-module are not listed. - See also the :attr:`sys.builtin_module_names` list. + See also the :data:`sys.builtin_module_names` list. .. versionadded:: 3.10 diff --git a/Doc/library/sysconfig.rst b/Doc/library/sysconfig.rst index 839c2c015b49ae..c805c50ffc689f 100644 --- a/Doc/library/sysconfig.rst +++ b/Doc/library/sysconfig.rst @@ -69,7 +69,7 @@ Python uses an installation scheme that differs depending on the platform and on the installation options. These schemes are stored in :mod:`sysconfig` under unique identifiers based on the value returned by :const:`os.name`. -Every new component that is installed using :mod:`distutils` or a +Every new component that is installed using :mod:`!distutils` or a Distutils-based system will follow the same scheme to copy its file in the right places. diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst index fd4820e78d68d1..00f3070324ec1e 100644 --- a/Doc/library/tarfile.rst +++ b/Doc/library/tarfile.rst @@ -938,7 +938,7 @@ reused in custom filters: Implements the ``'tar'`` filter. - - Strip leading slashes (``/`` and :attr:`os.sep`) from filenames. + - Strip leading slashes (``/`` and :data:`os.sep`) from filenames. - :ref:`Refuse ` to extract files with absolute paths (in case the name is absolute even after stripping slashes, e.g. ``C:/foo`` on Windows). @@ -947,7 +947,7 @@ reused in custom filters: path (after following symlinks) would end up outside the destination. This raises :class:`~tarfile.OutsideDestinationError`. - Clear high mode bits (setuid, setgid, sticky) and group/other write bits - (:attr:`~stat.S_IWGRP`|:attr:`~stat.S_IWOTH`). + (:const:`~stat.S_IWGRP`|:const:`~stat.S_IWOTH`). Return the modified ``TarInfo`` member. @@ -972,10 +972,10 @@ reused in custom filters: - For regular files, including hard links: - Set the owner read and write permissions - (:attr:`~stat.S_IRUSR`|:attr:`~stat.S_IWUSR`). + (:const:`~stat.S_IRUSR`|:const:`~stat.S_IWUSR`). - Remove the group & other executable permission - (:attr:`~stat.S_IXGRP`|:attr:`~stat.S_IXOTH`) - if the owner doesn’t have it (:attr:`~stat.S_IXUSR`). + (:const:`~stat.S_IXGRP`|:const:`~stat.S_IXOTH`) + if the owner doesn’t have it (:const:`~stat.S_IXUSR`). - For other files (directories), set ``mode`` to ``None``, so that extraction methods skip applying permission bits. diff --git a/Doc/library/tempfile.rst b/Doc/library/tempfile.rst index fd4c294613fd31..097f7087eccab9 100644 --- a/Doc/library/tempfile.rst +++ b/Doc/library/tempfile.rst @@ -59,7 +59,7 @@ The module defines the following user-callable items: platforms, it is a file-like object whose :attr:`!file` attribute is the underlying true file object. - The :py:data:`os.O_TMPFILE` flag is used if it is available and works + The :py:const:`os.O_TMPFILE` flag is used if it is available and works (Linux-specific, requires Linux kernel 3.11 or later). On platforms that are neither Posix nor Cygwin, TemporaryFile is an alias @@ -69,7 +69,7 @@ The module defines the following user-callable items: .. versionchanged:: 3.5 - The :py:data:`os.O_TMPFILE` flag is now used if available. + The :py:const:`os.O_TMPFILE` flag is now used if available. .. versionchanged:: 3.8 Added *errors* parameter. diff --git a/Doc/library/test.rst b/Doc/library/test.rst index 1b045c7de83a80..de60151bb32ce1 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -472,7 +472,7 @@ The :mod:`test.support` module defines the following functions: .. function:: with_pymalloc() - Return :data:`_testcapi.WITH_PYMALLOC`. + Return :const:`_testcapi.WITH_PYMALLOC`. .. function:: requires(resource, msg=None) @@ -1040,7 +1040,7 @@ The :mod:`test.support` module defines the following classes: `SetErrorMode `_. On UNIX, :func:`resource.setrlimit` is used to set - :attr:`resource.RLIMIT_CORE`'s soft limit to 0 to prevent coredump file + :const:`resource.RLIMIT_CORE`'s soft limit to 0 to prevent coredump file creation. On both platforms, the old value is restored by :meth:`__exit__`. diff --git a/Doc/library/textwrap.rst b/Doc/library/textwrap.rst index 1a9d5f98f78a7e..a150eefbf932ef 100644 --- a/Doc/library/textwrap.rst +++ b/Doc/library/textwrap.rst @@ -60,7 +60,7 @@ functions should be good enough; otherwise, you should use an instance of First the whitespace in *text* is collapsed (all whitespace is replaced by single spaces). If the result fits in the *width*, it is returned. Otherwise, enough words are dropped from the end so that the remaining words - plus the :attr:`placeholder` fit within :attr:`width`:: + plus the :attr:`.placeholder` fit within :attr:`.width`:: >>> textwrap.shorten("Hello world!", width=12) 'Hello world!' @@ -173,7 +173,7 @@ hyphenated words; only then will long words be broken if necessary, unless .. attribute:: expand_tabs (default: ``True``) If true, then all tab characters in *text* will be - expanded to spaces using the :meth:`expandtabs` method of *text*. + expanded to spaces using the :meth:`~str.expandtabs` method of *text*. .. attribute:: tabsize diff --git a/Doc/library/tkinter.rst b/Doc/library/tkinter.rst index 988b0cf3d70663..9f6c3e3e862c42 100644 --- a/Doc/library/tkinter.rst +++ b/Doc/library/tkinter.rst @@ -163,7 +163,7 @@ the modern themed widget set and API:: interpreter and calls :func:`exec` on the contents of :file:`.{className}.py` and :file:`.{baseName}.py`. The path for the profile files is the :envvar:`HOME` environment variable or, if that - isn't defined, then :attr:`os.curdir`. + isn't defined, then :data:`os.curdir`. .. attribute:: tk diff --git a/Doc/library/tkinter.ttk.rst b/Doc/library/tkinter.ttk.rst index 4ff2b2159c3622..9f2f9eb858afd4 100644 --- a/Doc/library/tkinter.ttk.rst +++ b/Doc/library/tkinter.ttk.rst @@ -102,7 +102,7 @@ themed widgets and is not supposed to be directly instantiated. Standard Options ^^^^^^^^^^^^^^^^ -All the :mod:`ttk` Widgets accepts the following options: +All the :mod:`ttk` Widgets accept the following options: .. tabularcolumns:: |l|L| diff --git a/Doc/library/token-list.inc b/Doc/library/token-list.inc index e885de88cad9ae..39df2927a0b7f2 100644 --- a/Doc/library/token-list.inc +++ b/Doc/library/token-list.inc @@ -207,10 +207,6 @@ .. data:: OP -.. data:: AWAIT - -.. data:: ASYNC - .. data:: TYPE_IGNORE .. data:: TYPE_COMMENT diff --git a/Doc/library/token.rst b/Doc/library/token.rst index 903847bb206d62..e6dc37d7ad852c 100644 --- a/Doc/library/token.rst +++ b/Doc/library/token.rst @@ -80,17 +80,21 @@ the :mod:`tokenize` module. .. versionchanged:: 3.5 - Added :data:`AWAIT` and :data:`ASYNC` tokens. + Added :data:`!AWAIT` and :data:`!ASYNC` tokens. .. versionchanged:: 3.7 Added :data:`COMMENT`, :data:`NL` and :data:`ENCODING` tokens. .. versionchanged:: 3.7 - Removed :data:`AWAIT` and :data:`ASYNC` tokens. "async" and "await" are + Removed :data:`!AWAIT` and :data:`!ASYNC` tokens. "async" and "await" are now tokenized as :data:`NAME` tokens. .. versionchanged:: 3.8 Added :data:`TYPE_COMMENT`, :data:`TYPE_IGNORE`, :data:`COLONEQUAL`. - Added :data:`AWAIT` and :data:`ASYNC` tokens back (they're needed + Added :data:`!AWAIT` and :data:`!ASYNC` tokens back (they're needed to support parsing older Python versions for :func:`ast.parse` with ``feature_version`` set to 6 or lower). + +.. versionchanged:: 3.13 + Removed :data:`!AWAIT` and :data:`!ASYNC` tokens again. + diff --git a/Doc/library/turtle.rst b/Doc/library/turtle.rst index c9ce955a6d2ba4..4c84ae6a820f02 100644 --- a/Doc/library/turtle.rst +++ b/Doc/library/turtle.rst @@ -19,9 +19,14 @@ Introduction ============ -Turtle graphics is a popular way for introducing programming to kids. It was -part of the original Logo programming language developed by Wally Feurzeig, -Seymour Papert and Cynthia Solomon in 1967. +Turtle graphics is an implementation of `the popular geometric drawing tools +introduced in Logo `_, developed by Wally Feurzeig, Seymour Papert and Cynthia Solomon +in 1967. + + +Get started +=========== Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it the command ``turtle.forward(15)``, and it moves (on-screen!) 15 pixels in the @@ -36,67 +41,261 @@ direction it is facing, drawing a line as it moves. Give it the command .. image:: turtle-star.* :align: center - .. literalinclude:: ../includes/turtle-star.py +In Python, turtle graphics provides a representation of a physical "turtle" +(a little robot with a pen) that draws on a sheet of paper on the floor. + +It's an effective and well-proven way for learners to encounter +programming concepts and interaction with software, as it provides instant, +visible feedback. It also provides convenient access to graphical output +in general. + +Turtle drawing was originally created as an educational tool, to be used by +teachers in the classroom. For the programmer who needs to produce some +graphical output it can be a way to do that without the overhead of +introducing more complex or external libraries into their work. + + +.. _turtle-tutorial: + +Tutorial +======== + +New users should start here. In this tutorial we'll explore some of the +basics of turtle drawing. + + +Starting a turtle environment +----------------------------- + +In a Python shell, import all the objects of the ``turtle`` module:: + + from turtle import * + +If you run into a ``No module named '_tkinter'`` error, you'll have to +install the :mod:`Tk interface package ` on your system. + + +Basic drawing +------------- + +Send the turtle forward 100 steps:: + + forward(100) + +You should see (most likely, in a new window on your display) a line +drawn by the turtle, heading East. Change the direction of the turtle, +so that it turns 120 degrees left (anti-clockwise):: + + left(120) + +Let's continue by drawing a triangle:: + + forward(100) + left(120) + forward(100) + +Notice how the turtle, represented by an arrow, points in different +directions as you steer it. + +Experiment with those commands, and also with ``backward()`` and +``right()``. + + +Pen control +~~~~~~~~~~~ + +Try changing the color - for example, ``color('blue')`` - and +width of the line - for example, ``width(3)`` - and then drawing again. + +You can also move the turtle around without drawing, by lifting up the pen: +``up()`` before moving. To start drawing again, use ``down()``. + + +The turtle's position +~~~~~~~~~~~~~~~~~~~~~ + +Send your turtle back to its starting-point (useful if it has disappeared +off-screen):: + + home() + +The home position is at the center of the turtle's screen. If you ever need to +know them, get the turtle's x-y co-ordinates with:: + + pos() + +Home is at ``(0, 0)``. + +And after a while, it will probably help to clear the window so we can start +anew:: + + clearscreen() + + +Making algorithmic patterns +--------------------------- + +Using loops, it's possible to build up geometric patterns:: + + for steps in range(100): + for c in ('blue', 'red', 'green'): + color(c) + forward(steps) + right(30) + + +\ - which of course, are limited only by the imagination! + +Let's draw the star shape at the top of this page. We want red lines, +filled in with yellow:: + + color('red') + fillcolor('yellow') -By combining together these and similar commands, intricate shapes and pictures -can easily be drawn. +Just as ``up()`` and ``down()`` determine whether lines will be drawn, +filling can be turned on and off:: -The :mod:`turtle` module is an extended reimplementation of the same-named -module from the Python standard distribution up to version Python 2.5. + begin_fill() -It tries to keep the merits of the old turtle module and to be (nearly) 100% -compatible with it. This means in the first place to enable the learning -programmer to use all the commands, classes and methods interactively when using -the module from within IDLE run with the ``-n`` switch. +Next we'll create a loop:: -The turtle module provides turtle graphics primitives, in both object-oriented -and procedure-oriented ways. Because it uses :mod:`tkinter` for the underlying -graphics, it needs a version of Python installed with Tk support. + while True: + forward(200) + left(170) + if abs(pos()) < 1: + break -The object-oriented interface uses essentially two+two classes: +``abs(pos()) < 1`` is a good way to know when the turtle is back at its +home position. -1. The :class:`TurtleScreen` class defines graphics windows as a playground for - the drawing turtles. Its constructor needs a :class:`tkinter.Canvas` or a - :class:`ScrolledCanvas` as argument. It should be used when :mod:`turtle` is - used as part of some application. +Finally, complete the filling:: - The function :func:`Screen` returns a singleton object of a - :class:`TurtleScreen` subclass. This function should be used when - :mod:`turtle` is used as a standalone tool for doing graphics. - As a singleton object, inheriting from its class is not possible. + end_fill() - All methods of TurtleScreen/Screen also exist as functions, i.e. as part of - the procedure-oriented interface. +(Note that filling only actually takes place when you give the +``end_fill()`` command.) -2. :class:`RawTurtle` (alias: :class:`RawPen`) defines Turtle objects which draw - on a :class:`TurtleScreen`. Its constructor needs a Canvas, ScrolledCanvas - or TurtleScreen as argument, so the RawTurtle objects know where to draw. - Derived from RawTurtle is the subclass :class:`Turtle` (alias: :class:`Pen`), - which draws on "the" :class:`Screen` instance which is automatically - created, if not already present. +.. _turtle-how-to: - All methods of RawTurtle/Turtle also exist as functions, i.e. part of the - procedure-oriented interface. +How to... +========= -The procedural interface provides functions which are derived from the methods -of the classes :class:`Screen` and :class:`Turtle`. They have the same names as -the corresponding methods. A screen object is automatically created whenever a -function derived from a Screen method is called. An (unnamed) turtle object is -automatically created whenever any of the functions derived from a Turtle method -is called. +This section covers some typical turtle use-cases and approaches. -To use multiple turtles on a screen one has to use the object-oriented interface. + +Get started as quickly as possible +---------------------------------- + +One of the joys of turtle graphics is the immediate, visual feedback that's +available from simple commands - it's an excellent way to introduce children +to programming ideas, with a minimum of overhead (not just children, of +course). + +The turtle module makes this possible by exposing all its basic functionality +as functions, available with ``from turtle import *``. The :ref:`turtle +graphics tutorial ` covers this approach. + +It's worth noting that many of the turtle commands also have even more terse +equivalents, such as ``fd()`` for :func:`forward`. These are especially +useful when working with learners for whom typing is not a skill. + +.. _note: + + You'll need to have the :mod:`Tk interface package ` installed on + your system for turtle graphics to work. Be warned that this is not + always straightforward, so check this in advance if you're planning to + use turtle graphics with a learner. + + +Use the ``turtle`` module namespace +----------------------------------- + +Using ``from turtle import *`` is convenient - but be warned that it imports a +rather large collection of objects, and if you're doing anything but turtle +graphics you run the risk of a name conflict (this becomes even more an issue +if you're using turtle graphics in a script where other modules might be +imported). + +The solution is to use ``import turtle`` - ``fd()`` becomes +``turtle.fd()``, ``width()`` becomes ``turtle.width()`` and so on. (If typing +"turtle" over and over again becomes tedious, use for example ``import turtle +as t`` instead.) + + +Use turtle graphics in a script +------------------------------- + +It's recommended to use the ``turtle`` module namespace as described +immediately above, for example:: + + import turtle as t + from random import random + + for i in range(100): + steps = int(random() * 100) + angle = int(random() * 360) + t.right(angle) + t.fd(steps) + +Another step is also required though - as soon as the script ends, Python +will also close the turtle's window. Add:: + + t.mainloop() + +to the end of the script. The script will now wait to be dismissed and +will not exit until it is terminated, for example by closing the turtle +graphics window. + + +Use object-oriented turtle graphics +----------------------------------- + +.. seealso:: :ref:`Explanation of the object-oriented interface ` + +Other than for very basic introductory purposes, or for trying things out +as quickly as possible, it's more usual and much more powerful to use the +object-oriented approach to turtle graphics. For example, this allows +multiple turtles on screen at once. + +In this approach, the various turtle commands are methods of objects (mostly of +``Turtle`` objects). You *can* use the object-oriented approach in the shell, +but it would be more typical in a Python script. + +The example above then becomes:: + + from turtle import Turtle + from random import random + + t = Turtle() + for i in range(100): + steps = int(random() * 100) + angle = int(random() * 360) + t.right(angle) + t.fd(steps) + + t.screen.mainloop() + +Note the last line. ``t.screen`` is an instance of the :class:`Screen` +that a Turtle instance exists on; it's created automatically along with +the turtle. + +The turtle's screen can be customised, for example:: + + t.screen.title('Object-oriented turtle demo') + t.screen.bgcolor("orange") + + +Turtle graphics reference +========================= .. note:: + In the following documentation the argument list for functions is given. Methods, of course, have the additional first argument *self* which is omitted here. -Overview of available Turtle and Screen methods -================================================= - Turtle methods -------------- @@ -2201,6 +2400,41 @@ Public classes * ``a.rotate(angle)`` rotation +.. _turtle-explanation: + +Explanation +=========== + +A turtle object draws on a screen object, and there a number of key classes in +the turtle object-oriented interface that can be used to create them and relate +them to each other. + +A :class:`Turtle` instance will automatically create a :class:`Screen` +instance if one is not already present. + +``Turtle`` is a subclass of :class:`RawTurtle`, which *doesn't* automatically +create a drawing surface - a *canvas* will need to be provided or created for +it. The *canvas* can be a :class:`tkinter.Canvas`, :class:`ScrolledCanvas` +or :class:`TurtleScreen`. + + +:class:`TurtleScreen` is the basic drawing surface for a +turtle. :class:`Screen` is a subclass of ``TurtleScreen``, and +includes :ref:`some additional methods ` for managing its +appearance (including size and title) and behaviour. ``TurtleScreen``'s +constructor needs a :class:`tkinter.Canvas` or a +:class:`ScrolledCanvas` as an argument. + +The functional interface for turtle graphics uses the various methods of +``Turtle`` and ``TurtleScreen``/``Screen``. Behind the scenes, a screen +object is automatically created whenever a function derived from a ``Screen`` +method is called. Similarly, a turtle object is automatically created +whenever any of the functions derived from a Turtle method is called. + +To use multiple turtles on a screen, the object-oriented interface must be +used. + + Help and configuration ====================== diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index 0265a39ce646f4..fad945ffc8210a 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -849,6 +849,31 @@ using ``[]``. concat(b"foo", b"bar") # OK, output has type 'bytes' concat("foo", b"bar") # Error, cannot mix str and bytes + Note that, despite its name, ``AnyStr`` has nothing to do with the + :class:`Any` type, nor does it mean "any string". In particular, ``AnyStr`` + and ``str | bytes`` are different from each other and have different use + cases:: + + # Invalid use of AnyStr: + # The type variable is used only once in the function signature, + # so cannot be "solved" by the type checker + def greet_bad(cond: bool) -> AnyStr: + return "hi there!" if cond else b"greetings!" + + # The better way of annotating this function: + def greet_proper(cond: bool) -> str | bytes: + return "hi there!" if cond else b"greetings!" + + .. deprecated-removed:: 3.13 3.18 + Deprecated in favor of the new :ref:`type parameter syntax `. + Use ``class A[T: (str, bytes)]: ...`` instead of importing ``AnyStr``. See + :pep:`695` for more details. + + In Python 3.16, ``AnyStr`` will be removed from ``typing.__all__``, and + deprecation warnings will be emitted at runtime when it is accessed or + imported from ``typing``. ``AnyStr`` will be removed from ``typing`` + in Python 3.18. + .. data:: LiteralString Special type that includes only literal strings. @@ -938,13 +963,17 @@ using ``[]``. For example:: - from typing import Self + from typing import Self, reveal_type class Foo: def return_self(self) -> Self: ... return self + class SubclassOfFoo(Foo): pass + + reveal_type(Foo().return_self()) # Revealed type is "Foo" + reveal_type(SubclassOfFoo().return_self()) # Revealed type is "SubclassOfFoo" This annotation is semantically equivalent to the following, albeit in a more succinct fashion:: @@ -958,15 +987,11 @@ using ``[]``. ... return self - In general if something currently follows the pattern of:: - - class Foo: - def return_self(self) -> "Foo": - ... - return self - - You should use :data:`Self` as calls to ``SubclassOfFoo.return_self`` would have - ``Foo`` as the return type and not ``SubclassOfFoo``. + In general, if something returns ``self``, as in the above examples, you + should use ``Self`` as the return annotation. If ``Foo.return_self`` was + annotated as returning ``"Foo"``, then the type checker would infer the + object returned from ``SubclassOfFoo.return_self`` as being of type ``Foo`` + rather than ``SubclassOfFoo``. Other common use cases include: @@ -974,6 +999,17 @@ using ``[]``. of the ``cls`` parameter. - Annotating an :meth:`~object.__enter__` method which returns self. + You should not use ``Self`` as the return annotation if the method is not + guaranteed to return an instance of a subclass when the class is + subclassed:: + + class Eggs: + # Self would be an incorrect return annotation here, + # as the object returned is always an instance of Eggs, + # even in subclasses + def returns_eggs(self) -> "Eggs": + return Eggs() + See :pep:`673` for more details. .. versionadded:: 3.11 @@ -2329,9 +2365,6 @@ types. class XZ(X, Z): pass # raises TypeError - T = TypeVar('T') - class XT(X, Generic[T]): pass # raises TypeError - A ``TypedDict`` can be generic:: class Group[T](TypedDict): @@ -3688,3 +3721,7 @@ convenience. This is subject to change, and not all deprecations are listed. - 3.13 - 3.15 - :gh:`106309` + * - :data:`typing.AnyStr` + - 3.13 + - 3.18 + - :gh:`105578` diff --git a/Doc/library/unittest.mock.rst b/Doc/library/unittest.mock.rst index 6d5f17d1c2c5cd..836fd42ab71c81 100644 --- a/Doc/library/unittest.mock.rst +++ b/Doc/library/unittest.mock.rst @@ -2485,7 +2485,7 @@ behaviour you can switch it off by setting the module level switch Alternatively you can just use ``vars(my_mock)`` (instance members) and ``dir(type(my_mock))`` (type members) to bypass the filtering irrespective of -:data:`mock.FILTER_DIR`. +:const:`mock.FILTER_DIR`. mock_open diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst index b26e6c0e6bc024..518bf6b13bad54 100644 --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -1134,7 +1134,7 @@ Test cases If given, *level* should be either a numeric logging level or its string equivalent (for example either ``"ERROR"`` or - :attr:`logging.ERROR`). The default is :attr:`logging.INFO`. + :const:`logging.ERROR`). The default is :const:`logging.INFO`. The test passes if at least one message emitted inside the ``with`` block matches the *logger* and *level* conditions, otherwise it fails. @@ -1175,7 +1175,7 @@ Test cases If given, *level* should be either a numeric logging level or its string equivalent (for example either ``"ERROR"`` or - :attr:`logging.ERROR`). The default is :attr:`logging.INFO`. + :const:`logging.ERROR`). The default is :const:`logging.INFO`. Unlike :meth:`assertLogs`, nothing will be returned by the context manager. diff --git a/Doc/library/urllib.error.rst b/Doc/library/urllib.error.rst index 3adbdd26132273..a5bcb5b1e643bf 100644 --- a/Doc/library/urllib.error.rst +++ b/Doc/library/urllib.error.rst @@ -72,6 +72,8 @@ The following exceptions are raised by :mod:`urllib.error` as appropriate: This exception is raised when the :func:`~urllib.request.urlretrieve` function detects that the amount of the downloaded data is less than the expected amount (given by - the *Content-Length* header). The :attr:`content` attribute stores the - downloaded (and supposedly truncated) data. + the *Content-Length* header). + .. attribute:: content + + The downloaded (and supposedly truncated) data. diff --git a/Doc/library/urllib.request.rst b/Doc/library/urllib.request.rst index 7e79871bbd6077..a672d8753b7f2f 100644 --- a/Doc/library/urllib.request.rst +++ b/Doc/library/urllib.request.rst @@ -91,7 +91,7 @@ The :mod:`urllib.request` module defines the following functions: .. versionchanged:: 3.2 HTTPS virtual hosts are now supported if possible (that is, if - :data:`ssl.HAS_SNI` is true). + :const:`ssl.HAS_SNI` is true). .. versionadded:: 3.2 *data* can be an iterable object. diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst index 9e5672545dea35..2482441d649790 100644 --- a/Doc/library/venv.rst +++ b/Doc/library/venv.rst @@ -60,7 +60,7 @@ running from a virtual environment. A virtual environment may be "activated" using a script in its binary directory (``bin`` on POSIX; ``Scripts`` on Windows). -This will prepend that directory to your :envvar:`!PATH`, so that running +This will prepend that directory to your :envvar:`PATH`, so that running :program:`python` will invoke the environment's Python interpreter and you can run installed scripts without having to use their full path. The invocation of the activation script is platform-specific @@ -100,10 +100,10 @@ In order to achieve this, scripts installed into virtual environments have a "shebang" line which points to the environment's Python interpreter, i.e. :samp:`#!/{}/bin/python`. This means that the script will run with that interpreter regardless of the -value of :envvar:`!PATH`. On Windows, "shebang" line processing is supported if +value of :envvar:`PATH`. On Windows, "shebang" line processing is supported if you have the :ref:`launcher` installed. Thus, double-clicking an installed script in a Windows Explorer window should run it with the correct interpreter -without the environment needing to be activated or on the :envvar:`!PATH`. +without the environment needing to be activated or on the :envvar:`PATH`. When a virtual environment has been activated, the :envvar:`!VIRTUAL_ENV` environment variable is set to the path of the environment. diff --git a/Doc/library/webbrowser.rst b/Doc/library/webbrowser.rst index b6762f78830a5f..4667b81e38ada2 100644 --- a/Doc/library/webbrowser.rst +++ b/Doc/library/webbrowser.rst @@ -20,7 +20,7 @@ will be used if graphical browsers are not available or an X11 display isn't available. If text-mode browsers are used, the calling process will block until the user exits the browser. -If the environment variable :envvar:`!BROWSER` exists, it is interpreted as the +If the environment variable :envvar:`BROWSER` exists, it is interpreted as the :data:`os.pathsep`-separated list of browsers to try ahead of the platform defaults. When the value of a list part contains the string ``%s``, then it is interpreted as a literal browser command line to be used with the argument URL @@ -97,7 +97,7 @@ The following functions are defined: Setting *preferred* to ``True`` makes this browser a preferred result for a :func:`get` call with no argument. Otherwise, this entry point is only - useful if you plan to either set the :envvar:`!BROWSER` variable or call + useful if you plan to either set the :envvar:`BROWSER` variable or call :func:`get` with a nonempty argument matching the name of a handler you declare. @@ -224,4 +224,4 @@ module-level convenience functions: .. rubric:: Footnotes .. [1] Executables named here without a full path will be searched in the - directories given in the :envvar:`!PATH` environment variable. + directories given in the :envvar:`PATH` environment variable. diff --git a/Doc/library/winreg.rst b/Doc/library/winreg.rst index 4ab671817710dd..06bd4d87eb03c6 100644 --- a/Doc/library/winreg.rst +++ b/Doc/library/winreg.rst @@ -288,7 +288,7 @@ This module offers the following functions: table (FAT) file system, the filename may not have an extension. A call to :func:`LoadKey` fails if the calling process does not have the - :const:`SE_RESTORE_PRIVILEGE` privilege. Note that privileges are different + :c:data:`!SE_RESTORE_PRIVILEGE` privilege. Note that privileges are different from permissions -- see the `RegLoadKey documentation `__ for more details. @@ -414,7 +414,7 @@ This module offers the following functions: If *key* represents a key on a remote computer, the path described by *file_name* is relative to the remote computer. The caller of this method must - possess the :const:`SeBackupPrivilege` security privilege. Note that + possess the **SeBackupPrivilege** security privilege. Note that privileges are different than permissions -- see the `Conflicts Between User Rights and Permissions documentation `__ @@ -536,7 +536,7 @@ This module offers the following functions: Constants ------------------ -The following constants are defined for use in many :mod:`_winreg` functions. +The following constants are defined for use in many :mod:`winreg` functions. .. _hkey-constants: @@ -745,7 +745,7 @@ All registry functions in this module return one of these objects. All registry functions in this module which accept a handle object also accept an integer, however, use of the handle object is encouraged. -Handle objects provide semantics for :meth:`__bool__` -- thus :: +Handle objects provide semantics for :meth:`~object.__bool__` -- thus :: if handle: print("Yes") diff --git a/Doc/library/winsound.rst b/Doc/library/winsound.rst index 372f792a0f938e..370c5216652ba7 100644 --- a/Doc/library/winsound.rst +++ b/Doc/library/winsound.rst @@ -24,7 +24,7 @@ provided by Windows platforms. It includes functions and several constants. .. function:: PlaySound(sound, flags) - Call the underlying :c:func:`PlaySound` function from the Platform API. The + Call the underlying :c:func:`!PlaySound` function from the Platform API. The *sound* parameter may be a filename, a system sound alias, audio data as a :term:`bytes-like object`, or ``None``. Its interpretation depends on the value of *flags*, which can be a bitwise ORed @@ -35,7 +35,7 @@ provided by Windows platforms. It includes functions and several constants. .. function:: MessageBeep(type=MB_OK) - Call the underlying :c:func:`MessageBeep` function from the Platform API. This + Call the underlying :c:func:`!MessageBeep` function from the Platform API. This plays a sound as specified in the registry. The *type* argument specifies which sound to play; possible values are ``-1``, ``MB_ICONASTERISK``, ``MB_ICONEXCLAMATION``, ``MB_ICONHAND``, ``MB_ICONQUESTION``, and ``MB_OK``, all diff --git a/Doc/library/xml.rst b/Doc/library/xml.rst index 20b0905bb1093a..1e49b6568dfc28 100644 --- a/Doc/library/xml.rst +++ b/Doc/library/xml.rst @@ -73,12 +73,12 @@ decompression bomb Safe Safe Safe 1. Expat 2.4.1 and newer is not vulnerable to the "billion laughs" and "quadratic blowup" vulnerabilities. Items still listed as vulnerable due to potential reliance on system-provided libraries. Check - :data:`pyexpat.EXPAT_VERSION`. + :const:`pyexpat.EXPAT_VERSION`. 2. :mod:`xml.etree.ElementTree` doesn't expand external entities and raises a - :exc:`ParserError` when an entity occurs. + :exc:`~xml.etree.ElementTree.ParseError` when an entity occurs. 3. :mod:`xml.dom.minidom` doesn't expand external entities and simply returns the unexpanded entity verbatim. -4. :mod:`xmlrpclib` doesn't expand external entities and omits them. +4. :mod:`xmlrpc.client` doesn't expand external entities and omits them. 5. Since Python 3.7.1, external general entities are no longer processed by default. @@ -119,8 +119,8 @@ all known attack vectors with examples and references. .. _defusedxml-package: -The :mod:`defusedxml` Package ------------------------------------------------------- +The :mod:`!defusedxml` Package +------------------------------ `defusedxml`_ is a pure Python package with modified subclasses of all stdlib XML parsers that prevent any potentially malicious operation. Use of this diff --git a/Doc/library/xml.sax.handler.rst b/Doc/library/xml.sax.handler.rst index 719ce5ab1bcf65..e2f28e3244cb09 100644 --- a/Doc/library/xml.sax.handler.rst +++ b/Doc/library/xml.sax.handler.rst @@ -393,7 +393,7 @@ implements this interface, then register the object with your :class:`~xml.sax.xmlreader.XMLReader`, the parser will call the methods in your object to report all warnings and errors. There are three levels of errors available: warnings, (possibly) recoverable errors, -and unrecoverable errors. All methods take a :exc:`SAXParseException` as the +and unrecoverable errors. All methods take a :exc:`~xml.sax.SAXParseException` as the only parameter. Errors and warnings may be converted to an exception by raising the passed-in exception object. diff --git a/Doc/library/xml.sax.utils.rst b/Doc/library/xml.sax.utils.rst index ab4606bcf9fe6c..e57e76dcac7820 100644 --- a/Doc/library/xml.sax.utils.rst +++ b/Doc/library/xml.sax.utils.rst @@ -92,5 +92,5 @@ or as base classes. reading. The input source can be given as a string, a file-like object, or an :class:`~xml.sax.xmlreader.InputSource` object; parsers will use this function to implement the polymorphic *source* argument to their - :meth:`parse` method. + :meth:`~xml.sax.xmlreader.XMLReader.parse` method. diff --git a/Doc/library/xmlrpc.rst b/Doc/library/xmlrpc.rst index ae68157b0f63c1..5f0a2cf68d01f9 100644 --- a/Doc/library/xmlrpc.rst +++ b/Doc/library/xmlrpc.rst @@ -1,5 +1,5 @@ -:mod:`xmlrpc` --- XMLRPC server and client modules -================================================== +:mod:`!xmlrpc` --- XMLRPC server and client modules +=================================================== XML-RPC is a Remote Procedure Call method that uses XML passed via HTTP as a transport. With it, a client can call methods with parameters on a remote diff --git a/Doc/license.rst b/Doc/license.rst index 1dadb6264a0059..be8efa70611378 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -659,134 +659,186 @@ The modules :mod:`hashlib`, :mod:`posix` and :mod:`ssl` use the OpenSSL library for added performance if made available by the operating system. Additionally, the Windows and macOS installers for Python may include a copy of the OpenSSL libraries, so we include a copy -of the OpenSSL license here:: - - - LICENSE ISSUES - ============== - - The OpenSSL toolkit stays under a dual license, i.e. both the conditions of - the OpenSSL License and the original SSLeay license apply to the toolkit. - See below for the actual license texts. Actually both licenses are BSD-style - Open Source licenses. In case of any license issues related to OpenSSL - please contact openssl-core@openssl.org. - - OpenSSL License - --------------- - - /* ==================================================================== - * Copyright (c) 1998-2008 The OpenSSL Project. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided with the - * distribution. - * - * 3. All advertising materials mentioning features or use of this - * software must display the following acknowledgment: - * "This product includes software developed by the OpenSSL Project - * for use in the OpenSSL Toolkit. (http://www.openssl.org/)" - * - * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to - * endorse or promote products derived from this software without - * prior written permission. For written permission, please contact - * openssl-core@openssl.org. - * - * 5. Products derived from this software may not be called "OpenSSL" - * nor may "OpenSSL" appear in their names without prior written - * permission of the OpenSSL Project. - * - * 6. Redistributions of any form whatsoever must retain the following - * acknowledgment: - * "This product includes software developed by the OpenSSL Project - * for use in the OpenSSL Toolkit (http://www.openssl.org/)" - * - * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY - * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR - * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR - * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT - * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; - * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED - * OF THE POSSIBILITY OF SUCH DAMAGE. - * ==================================================================== - * - * This product includes cryptographic software written by Eric Young - * (eay@cryptsoft.com). This product includes software written by Tim - * Hudson (tjh@cryptsoft.com). - * - */ - - Original SSLeay License - ----------------------- - - /* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) - * All rights reserved. - * - * This package is an SSL implementation written - * by Eric Young (eay@cryptsoft.com). - * The implementation was written so as to conform with Netscapes SSL. - * - * This library is free for commercial and non-commercial use as long as - * the following conditions are aheared to. The following conditions - * apply to all code found in this distribution, be it the RC4, RSA, - * lhash, DES, etc., code; not just the SSL code. The SSL documentation - * included with this distribution is covered by the same copyright terms - * except that the holder is Tim Hudson (tjh@cryptsoft.com). - * - * Copyright remains Eric Young's, and as such any Copyright notices in - * the code are not to be removed. - * If this package is used in a product, Eric Young should be given attribution - * as the author of the parts of the library used. - * This can be in the form of a textual message at program startup or - * in documentation (online or textual) provided with the package. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * 1. Redistributions of source code must retain the copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. All advertising materials mentioning features or use of this software - * must display the following acknowledgement: - * "This product includes cryptographic software written by - * Eric Young (eay@cryptsoft.com)" - * The word 'cryptographic' can be left out if the rouines from the library - * being used are not cryptographic related :-). - * 4. If you include any Windows specific code (or a derivative thereof) from - * the apps directory (application code) you must include an acknowledgement: - * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" - * - * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS - * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY - * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF - * SUCH DAMAGE. - * - * The licence and distribution terms for any publically available version or - * derivative of this code cannot be changed. i.e. this code cannot simply be - * copied and put under another distribution licence - * [including the GNU Public Licence.] - */ +of the OpenSSL license here. For the OpenSSL 3.0 release, +and later releases derived from that, the Apache License v2 applies:: + + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS expat diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst index 6d30eccab1990f..12ad18d4119617 100644 --- a/Doc/reference/compound_stmts.rst +++ b/Doc/reference/compound_stmts.rst @@ -1840,7 +1840,7 @@ like ``TYPE_PARAMS_OF_ListOrSet`` are not actually bound at runtime. * a class that inherits from :class:`collections.abc.Sequence` * a Python class that has been registered as :class:`collections.abc.Sequence` - * a builtin class that has its (CPython) :data:`Py_TPFLAGS_SEQUENCE` bit set + * a builtin class that has its (CPython) :c:macro:`Py_TPFLAGS_SEQUENCE` bit set * a class that inherits from any of the above The following standard library classes are sequences: @@ -1859,7 +1859,7 @@ like ``TYPE_PARAMS_OF_ListOrSet`` are not actually bound at runtime. * a class that inherits from :class:`collections.abc.Mapping` * a Python class that has been registered as :class:`collections.abc.Mapping` - * a builtin class that has its (CPython) :data:`Py_TPFLAGS_MAPPING` bit set + * a builtin class that has its (CPython) :c:macro:`Py_TPFLAGS_MAPPING` bit set * a class that inherits from any of the above The standard library classes :class:`dict` and :class:`types.MappingProxyType` diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst index 8a10a34347c2de..229fa696c9142f 100644 --- a/Doc/reference/datamodel.rst +++ b/Doc/reference/datamodel.rst @@ -1594,9 +1594,9 @@ Basic customization Called to implement truth value testing and the built-in operation ``bool()``; should return ``False`` or ``True``. When this method is not - defined, :meth:`__len__` is called, if it is defined, and the object is + defined, :meth:`~object.__len__` is called, if it is defined, and the object is considered true if its result is nonzero. If a class defines neither - :meth:`__len__` nor :meth:`__bool__`, all its instances are considered + :meth:`!__len__` nor :meth:`!__bool__`, all its instances are considered true. @@ -2494,16 +2494,16 @@ through the object's keys; for sequences, it should iterate through the values. Called to implement the built-in function :func:`len`. Should return the length of the object, an integer ``>=`` 0. Also, an object that doesn't define a - :meth:`__bool__` method and whose :meth:`__len__` method returns zero is + :meth:`~object.__bool__` method and whose :meth:`!__len__` method returns zero is considered to be false in a Boolean context. .. impl-detail:: - In CPython, the length is required to be at most :attr:`sys.maxsize`. - If the length is larger than :attr:`!sys.maxsize` some features (such as + In CPython, the length is required to be at most :data:`sys.maxsize`. + If the length is larger than :data:`!sys.maxsize` some features (such as :func:`len`) may raise :exc:`OverflowError`. To prevent raising :exc:`!OverflowError` by truth value testing, an object must define a - :meth:`__bool__` method. + :meth:`~object.__bool__` method. .. method:: object.__length_hint__(self) diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst index ce1c9a59d58353..8e0346ccc718de 100644 --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -1724,7 +1724,7 @@ control flow statements, the following values are interpreted as false: ``False``, ``None``, numeric zero of all types, and empty strings and containers (including strings, tuples, lists, dictionaries, sets and frozensets). All other values are interpreted as true. User-defined objects can customize their -truth value by providing a :meth:`__bool__` method. +truth value by providing a :meth:`~object.__bool__` method. .. index:: pair: operator; not diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst index 47062f86810e91..83cd4402a36cf6 100644 --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -549,6 +549,10 @@ retained), except that three unescaped quotes in a row terminate the literal. ( .. _escape-sequences: + +Escape sequences +^^^^^^^^^^^^^^^^ + Unless an ``'r'`` or ``'R'`` prefix is present, escape sequences in string and bytes literals are interpreted according to rules similar to those used by Standard C. The recognized escape sequences are: @@ -783,7 +787,7 @@ is converted before formatting. Conversion ``'!s'`` calls :func:`str` on the result, ``'!r'`` calls :func:`repr`, and ``'!a'`` calls :func:`ascii`. The result is then formatted using the :func:`format` protocol. The -format specifier is passed to the :meth:`__format__` method of the +format specifier is passed to the :meth:`~object.__format__` method of the expression or conversion result. An empty string is passed when the format specifier is omitted. The formatted result is then included in the final value of the whole string. diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt index f7e0665bde445d..94611ca22f09fe 100644 --- a/Doc/requirements-oldest-sphinx.txt +++ b/Doc/requirements-oldest-sphinx.txt @@ -14,11 +14,10 @@ python-docs-theme>=2022.1 # Docutils<0.17, Jinja2<3, and MarkupSafe<2 are additionally specified as # Sphinx 3.2 is incompatible with newer releases of these packages. -Sphinx==3.2.1 alabaster==0.7.13 Babel==2.12.1 -certifi==2022.12.7 -charset-normalizer==3.1.0 +certifi==2023.7.22 +charset-normalizer==3.2.0 colorama==0.4.6 docutils==0.16 idna==3.4 @@ -29,10 +28,11 @@ packaging==23.1 Pygments==2.15.1 requests==2.31.0 snowballstemmer==2.2.0 +Sphinx==3.2.1 sphinxcontrib-applehelp==1.0.4 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 -urllib3==1.26.15 +urllib3==2.0.4 diff --git a/Doc/requirements.txt b/Doc/requirements.txt index bde509febf5bde..d4f23ea8c400fe 100644 --- a/Doc/requirements.txt +++ b/Doc/requirements.txt @@ -7,7 +7,7 @@ # won't suddenly cause build failures. Updating the version is fine as long # as no warnings are raised by doing so. # PR #104777: Sphinx 6.2 no longer uses imghdr, removed in Python 3.13. -sphinx==6.2.0 +sphinx==6.2.1 blurb @@ -15,6 +15,6 @@ sphinxext-opengraph==0.7.5 # The theme used by the documentation is stored separately, so we need # to install that as well. -python-docs-theme>=2022.1 +python-docs-theme>=2023.3.1,!=2023.7 -c constraints.txt diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore index 23aa30c956b3bd..21b350c4134fd7 100644 --- a/Doc/tools/.nitignore +++ b/Doc/tools/.nitignore @@ -1,85 +1,47 @@ # All RST files under Doc/ -- except these -- must pass Sphinx nit-picky mode, -# as tested on the CI via touch-clean-files.py in doc.yml. -# Add blank lines between files and keep them sorted lexicographically -# to help avoid merge conflicts. +# as tested on the CI via check-warnings.py in reusable-docs.yml. +# Keep lines sorted lexicographically to help avoid merge conflicts. -Doc/c-api/allocation.rst -Doc/c-api/apiabiversion.rst -Doc/c-api/arg.rst -Doc/c-api/bool.rst Doc/c-api/buffer.rst -Doc/c-api/bytes.rst -Doc/c-api/call.rst -Doc/c-api/capsule.rst -Doc/c-api/cell.rst -Doc/c-api/code.rst -Doc/c-api/codec.rst -Doc/c-api/complex.rst -Doc/c-api/conversion.rst Doc/c-api/datetime.rst Doc/c-api/descriptor.rst -Doc/c-api/dict.rst Doc/c-api/exceptions.rst Doc/c-api/file.rst Doc/c-api/float.rst Doc/c-api/gcsupport.rst -Doc/c-api/import.rst Doc/c-api/init.rst Doc/c-api/init_config.rst Doc/c-api/intro.rst -Doc/c-api/iterator.rst -Doc/c-api/long.rst -Doc/c-api/mapping.rst -Doc/c-api/marshal.rst Doc/c-api/memory.rst Doc/c-api/memoryview.rst Doc/c-api/module.rst -Doc/c-api/none.rst Doc/c-api/object.rst -Doc/c-api/refcounting.rst -Doc/c-api/sequence.rst Doc/c-api/set.rst Doc/c-api/stable.rst Doc/c-api/structures.rst Doc/c-api/sys.rst -Doc/c-api/tuple.rst Doc/c-api/type.rst -Doc/c-api/typehints.rst Doc/c-api/typeobj.rst Doc/c-api/unicode.rst -Doc/c-api/veryhigh.rst -Doc/c-api/weakref.rst -Doc/extending/embedding.rst Doc/extending/extending.rst Doc/extending/newtypes.rst -Doc/extending/newtypes_tutorial.rst Doc/faq/design.rst -Doc/faq/extending.rst Doc/faq/gui.rst Doc/faq/library.rst Doc/faq/programming.rst Doc/glossary.rst -Doc/howto/curses.rst Doc/howto/descriptor.rst Doc/howto/enum.rst -Doc/howto/functional.rst -Doc/howto/instrumentation.rst Doc/howto/isolating-extensions.rst Doc/howto/logging-cookbook.rst Doc/howto/logging.rst -Doc/howto/regex.rst -Doc/howto/sorting.rst -Doc/howto/unicode.rst Doc/howto/urllib2.rst -Doc/install/index.rst Doc/library/__future__.rst -Doc/library/_thread.rst Doc/library/abc.rst Doc/library/ast.rst Doc/library/asyncio-dev.rst Doc/library/asyncio-eventloop.rst Doc/library/asyncio-extending.rst -Doc/library/asyncio-future.rst Doc/library/asyncio-policy.rst Doc/library/asyncio-stream.rst Doc/library/asyncio-subprocess.rst @@ -88,25 +50,15 @@ Doc/library/bdb.rst Doc/library/bisect.rst Doc/library/bz2.rst Doc/library/calendar.rst -Doc/library/cgi.rst -Doc/library/cmath.rst Doc/library/cmd.rst -Doc/library/code.rst Doc/library/codecs.rst -Doc/library/codeop.rst Doc/library/collections.abc.rst Doc/library/collections.rst -Doc/library/compileall.rst Doc/library/concurrent.futures.rst -Doc/library/concurrent.rst Doc/library/configparser.rst -Doc/library/constants.rst Doc/library/contextlib.rst Doc/library/copy.rst Doc/library/csv.rst -Doc/library/ctypes.rst -Doc/library/curses.ascii.rst -Doc/library/curses.rst Doc/library/datetime.rst Doc/library/dbm.rst Doc/library/decimal.rst @@ -116,11 +68,8 @@ Doc/library/dis.rst Doc/library/doctest.rst Doc/library/email.charset.rst Doc/library/email.compat32-message.rst -Doc/library/email.encoders.rst Doc/library/email.errors.rst -Doc/library/email.generator.rst Doc/library/email.headerregistry.rst -Doc/library/email.message.rst Doc/library/email.mime.rst Doc/library/email.parser.rst Doc/library/email.policy.rst @@ -128,29 +77,20 @@ Doc/library/enum.rst Doc/library/exceptions.rst Doc/library/faulthandler.rst Doc/library/fcntl.rst -Doc/library/filecmp.rst -Doc/library/fileinput.rst -Doc/library/fractions.rst Doc/library/ftplib.rst Doc/library/functions.rst Doc/library/functools.rst -Doc/library/getopt.rst Doc/library/getpass.rst Doc/library/gettext.rst -Doc/library/graphlib.rst Doc/library/gzip.rst -Doc/library/hashlib.rst Doc/library/http.client.rst Doc/library/http.cookiejar.rst Doc/library/http.cookies.rst Doc/library/http.server.rst -Doc/library/idle.rst -Doc/library/importlib.resources.abc.rst Doc/library/importlib.resources.rst Doc/library/importlib.rst Doc/library/inspect.rst Doc/library/io.rst -Doc/library/json.rst Doc/library/locale.rst Doc/library/logging.config.rst Doc/library/logging.handlers.rst @@ -158,12 +98,9 @@ Doc/library/logging.rst Doc/library/lzma.rst Doc/library/mailbox.rst Doc/library/mmap.rst -Doc/library/msvcrt.rst Doc/library/multiprocessing.rst Doc/library/multiprocessing.shared_memory.rst -Doc/library/netrc.rst Doc/library/numbers.rst -Doc/library/operator.rst Doc/library/optparse.rst Doc/library/os.path.rst Doc/library/os.rst @@ -171,46 +108,34 @@ Doc/library/pickle.rst Doc/library/pickletools.rst Doc/library/platform.rst Doc/library/plistlib.rst -Doc/library/poplib.rst -Doc/library/posix.rst -Doc/library/pprint.rst Doc/library/profile.rst -Doc/library/pty.rst -Doc/library/py_compile.rst Doc/library/pyclbr.rst Doc/library/pydoc.rst Doc/library/pyexpat.rst Doc/library/random.rst -Doc/library/re.rst Doc/library/readline.rst Doc/library/reprlib.rst Doc/library/resource.rst Doc/library/rlcompleter.rst -Doc/library/sched.rst Doc/library/select.rst Doc/library/selectors.rst Doc/library/shelve.rst -Doc/library/shutil.rst Doc/library/signal.rst Doc/library/site.rst Doc/library/smtplib.rst Doc/library/socket.rst Doc/library/socketserver.rst Doc/library/ssl.rst -Doc/library/stat.rst Doc/library/stdtypes.rst Doc/library/string.rst -Doc/library/struct.rst Doc/library/subprocess.rst Doc/library/sys.rst Doc/library/sys_path_init.rst -Doc/library/sysconfig.rst Doc/library/syslog.rst Doc/library/tarfile.rst Doc/library/tempfile.rst Doc/library/termios.rst Doc/library/test.rst -Doc/library/textwrap.rst Doc/library/threading.rst Doc/library/time.rst Doc/library/tkinter.rst @@ -222,13 +147,10 @@ Doc/library/turtle.rst Doc/library/unittest.mock-examples.rst Doc/library/unittest.mock.rst Doc/library/unittest.rst -Doc/library/urllib.error.rst Doc/library/urllib.parse.rst Doc/library/urllib.request.rst Doc/library/uuid.rst Doc/library/weakref.rst -Doc/library/winreg.rst -Doc/library/winsound.rst Doc/library/wsgiref.rst Doc/library/xml.dom.minidom.rst Doc/library/xml.dom.pulldom.rst @@ -238,9 +160,7 @@ Doc/library/xml.rst Doc/library/xml.sax.handler.rst Doc/library/xml.sax.reader.rst Doc/library/xml.sax.rst -Doc/library/xml.sax.utils.rst Doc/library/xmlrpc.client.rst -Doc/library/xmlrpc.rst Doc/library/xmlrpc.server.rst Doc/library/zlib.rst Doc/license.rst @@ -248,21 +168,16 @@ Doc/reference/compound_stmts.rst Doc/reference/datamodel.rst Doc/reference/expressions.rst Doc/reference/import.rst -Doc/reference/lexical_analysis.rst Doc/reference/simple_stmts.rst Doc/tutorial/appendix.rst Doc/tutorial/classes.rst Doc/tutorial/controlflow.rst Doc/tutorial/datastructures.rst -Doc/tutorial/errors.rst Doc/tutorial/inputoutput.rst -Doc/tutorial/interactive.rst Doc/tutorial/introduction.rst Doc/tutorial/modules.rst -Doc/tutorial/stdlib2.rst Doc/using/cmdline.rst Doc/using/configure.rst -Doc/using/unix.rst Doc/using/windows.rst Doc/whatsnew/2.0.rst Doc/whatsnew/2.1.rst diff --git a/Doc/tools/check-warnings.py b/Doc/tools/check-warnings.py new file mode 100644 index 00000000000000..c17d0f51cd1272 --- /dev/null +++ b/Doc/tools/check-warnings.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python3 +""" +Check the output of running Sphinx in nit-picky mode (missing references). +""" +import argparse +import csv +import os +import re +import sys +from pathlib import Path + +# Exclude these whether they're dirty or clean, +# because they trigger a rebuild of dirty files. +EXCLUDE_FILES = { + "Doc/whatsnew/changelog.rst", +} + +# Subdirectories of Doc/ to exclude. +EXCLUDE_SUBDIRS = { + ".env", + ".venv", + "env", + "includes", + "venv", +} + +PATTERN = re.compile(r"(?P[^:]+):(?P\d+): WARNING: (?P.+)") + + +def check_and_annotate(warnings: list[str], files_to_check: str) -> None: + """ + Convert Sphinx warning messages to GitHub Actions. + + Converts lines like: + .../Doc/library/cgi.rst:98: WARNING: reference target not found + to: + ::warning file=.../Doc/library/cgi.rst,line=98::reference target not found + + Non-matching lines are echoed unchanged. + + see: + https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-a-warning-message + """ + files_to_check = next(csv.reader([files_to_check])) + for warning in warnings: + if any(filename in warning for filename in files_to_check): + if match := PATTERN.fullmatch(warning): + print("::warning file={file},line={line}::{msg}".format_map(match)) + + +def fail_if_regression( + warnings: list[str], files_with_expected_nits: set[str], files_with_nits: set[str] +) -> int: + """ + Ensure some files always pass Sphinx nit-picky mode (no missing references). + These are files which are *not* in .nitignore. + """ + all_rst = { + str(rst) + for rst in Path("Doc/").rglob("*.rst") + if rst.parts[1] not in EXCLUDE_SUBDIRS + } + should_be_clean = all_rst - files_with_expected_nits - EXCLUDE_FILES + problem_files = sorted(should_be_clean & files_with_nits) + if problem_files: + print("\nError: must not contain warnings:\n") + for filename in problem_files: + print(filename) + for warning in warnings: + if filename in warning: + if match := PATTERN.fullmatch(warning): + print(" {line}: {msg}".format_map(match)) + return -1 + return 0 + + +def fail_if_improved( + files_with_expected_nits: set[str], files_with_nits: set[str] +) -> int: + """ + We may have fixed warnings in some files so that the files are now completely clean. + Good news! Let's add them to .nitignore to prevent regression. + """ + files_with_no_nits = files_with_expected_nits - files_with_nits + if files_with_no_nits: + print("\nCongratulations! You improved:\n") + for filename in sorted(files_with_no_nits): + print(filename) + print("\nPlease remove from Doc/tools/.nitignore\n") + return -1 + return 0 + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument( + "--check-and-annotate", + help="Comma-separated list of files to check, " + "and annotate those with warnings on GitHub Actions", + ) + parser.add_argument( + "--fail-if-regression", + action="store_true", + help="Fail if known-good files have warnings", + ) + parser.add_argument( + "--fail-if-improved", + action="store_true", + help="Fail if new files with no nits are found", + ) + args = parser.parse_args() + exit_code = 0 + + wrong_directory_msg = "Must run this script from the repo root" + assert Path("Doc").exists() and Path("Doc").is_dir(), wrong_directory_msg + + with Path("Doc/sphinx-warnings.txt").open() as f: + warnings = f.read().splitlines() + + cwd = str(Path.cwd()) + os.path.sep + files_with_nits = { + warning.removeprefix(cwd).split(":")[0] + for warning in warnings + if "Doc/" in warning + } + + with Path("Doc/tools/.nitignore").open() as clean_files: + files_with_expected_nits = { + filename.strip() + for filename in clean_files + if filename.strip() and not filename.startswith("#") + } + + if args.check_and_annotate: + check_and_annotate(warnings, args.check_and_annotate) + + if args.fail_if_regression: + exit_code += fail_if_regression( + warnings, files_with_expected_nits, files_with_nits + ) + + if args.fail_if_improved: + exit_code += fail_if_improved(files_with_expected_nits, files_with_nits) + + return exit_code + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index 8d99b0bfa4f381..3cf4d236604bcb 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -98,14 +98,13 @@ class ImplementationDetail(Directive): final_argument_whitespace = True # This text is copied to templates/dummy.html - label_text = 'CPython implementation detail:' + label_text = sphinx_gettext('CPython implementation detail:') def run(self): self.assert_has_content() pnode = nodes.compound(classes=['impl-detail']) - label = sphinx_gettext(self.label_text) content = self.content - add_text = nodes.strong(label, label) + add_text = nodes.strong(self.label_text, self.label_text) self.state.nested_parse(content, self.content_offset, pnode) content = nodes.inline(pnode[0].rawsource, translatable=True) content.source = pnode[0].source @@ -180,7 +179,7 @@ def parse_platforms(self): if unknown: cls = type(self) logger = logging.getLogger(cls.__qualname__) - logger.warn( + logger.warning( f"Unknown platform(s) or syntax '{' '.join(sorted(unknown))}' " f"in '.. availability:: {self.arguments[0]}', see " f"{__file__}:{cls.__qualname__}.known_platforms for a set " @@ -234,9 +233,9 @@ class AuditEvent(Directive): final_argument_whitespace = True _label = [ - "Raises an :ref:`auditing event ` {name} with no arguments.", - "Raises an :ref:`auditing event ` {name} with argument {args}.", - "Raises an :ref:`auditing event ` {name} with arguments {args}.", + sphinx_gettext("Raises an :ref:`auditing event ` {name} with no arguments."), + sphinx_gettext("Raises an :ref:`auditing event ` {name} with argument {args}."), + sphinx_gettext("Raises an :ref:`auditing event ` {name} with arguments {args}."), ] @property @@ -252,7 +251,7 @@ def run(self): else: args = [] - label = sphinx_gettext(self._label[min(2, len(args))]) + label = self._label[min(2, len(args))] text = label.format(name="``{}``".format(name), args=", ".join("``{}``".format(a) for a in args if a)) @@ -267,7 +266,7 @@ def run(self): info = env.all_audit_events.setdefault(name, new_info) if info is not new_info: if not self._do_args_match(info['args'], new_info['args']): - self.logger.warn( + self.logger.warning( "Mismatched arguments for audit-event {}: {!r} != {!r}" .format(name, info['args'], new_info['args']) ) @@ -414,8 +413,8 @@ class DeprecatedRemoved(Directive): final_argument_whitespace = True option_spec = {} - _deprecated_label = 'Deprecated since version {deprecated}, will be removed in version {removed}' - _removed_label = 'Deprecated since version {deprecated}, removed in version {removed}' + _deprecated_label = sphinx_gettext('Deprecated since version {deprecated}, will be removed in version {removed}') + _removed_label = sphinx_gettext('Deprecated since version {deprecated}, removed in version {removed}') def run(self): node = addnodes.versionmodified() @@ -431,7 +430,6 @@ def run(self): else: label = self._removed_label - label = sphinx_gettext(label) text = label.format(deprecated=self.arguments[0], removed=self.arguments[1]) if len(self.arguments) == 3: inodes, messages = self.state.inline_text(self.arguments[2], @@ -544,7 +542,7 @@ def write(self, *ignored): 'building topics... ', length=len(pydoc_topic_labels)): if label not in self.env.domaindata['std']['labels']: - self.env.logger.warn('label %r not in documentation' % label) + self.env.logger.warning(f'label {label!r} not in documentation') continue docname, labelid, sectname = self.env.domaindata['std']['labels'][label] doctree = self.env.get_and_resolve_doctree(docname, self) diff --git a/Doc/tools/templates/layout.html b/Doc/tools/templates/layout.html index 18a49271df5f20..80103158ea01e6 100644 --- a/Doc/tools/templates/layout.html +++ b/Doc/tools/templates/layout.html @@ -4,16 +4,16 @@ {%- if outdated %}
{% trans %}This document is for an old version of Python that is no longer supported. - You should upgrade, and read the {% endtrans %} - {% trans %} Python documentation for the current stable release{% endtrans %}. + You should upgrade, and read the{% endtrans %} + {% trans %}Python documentation for the current stable release{% endtrans %}.
{%- endif %} {%- if is_deployment_preview %}
{% trans %}This is a deploy preview created from a pull request. - For authoritative documentation, see {% endtrans %} - {% trans %} the current stable release{% endtrans %}. + For authoritative documentation, see{% endtrans %} + {% trans %}the current stable release{% endtrans %}.
{%- endif %} {% endblock %} @@ -26,7 +26,9 @@ {% endblock %} {% block extrahead %} - + {% if builder == "html" %} + + {% endif %} {% if builder != "htmlhelp" %} {% if pagename == 'whatsnew/changelog' and not embedded %} diff --git a/Doc/tools/touch-clean-files.py b/Doc/tools/touch-clean-files.py deleted file mode 100644 index 2b045bd68a0cf0..00000000000000 --- a/Doc/tools/touch-clean-files.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python3 -""" -Touch files that must pass Sphinx nit-picky mode -so they are rebuilt and we can catch regressions. -""" -import argparse -import csv -import sys -from pathlib import Path - -wrong_directory_msg = "Must run this script from the repo root" -assert Path("Doc").exists() and Path("Doc").is_dir(), wrong_directory_msg - -# Exclude these whether they're dirty or clean, -# because they trigger a rebuild of dirty files. -EXCLUDE_FILES = { - Path("Doc/whatsnew/changelog.rst"), -} - -# Subdirectories of Doc/ to exclude. -EXCLUDE_SUBDIRS = { - ".env", - ".venv", - "env", - "includes", - "venv", -} - -ALL_RST = { - rst for rst in Path("Doc/").rglob("*.rst") if rst.parts[1] not in EXCLUDE_SUBDIRS -} - - -parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter -) -parser.add_argument("-c", "--clean", help="Comma-separated list of clean files") -args = parser.parse_args() - -if args.clean: - clean_files = next(csv.reader([args.clean])) - CLEAN = { - Path(filename.strip()) - for filename in clean_files - if Path(filename.strip()).is_file() - } -elif args.clean is not None: - print( - "Not touching any files: an empty string `--clean` arg value passed.", - ) - sys.exit(0) -else: - with Path("Doc/tools/.nitignore").open() as ignored_files: - IGNORED = { - Path(filename.strip()) - for filename in ignored_files - if filename.strip() and not filename.startswith("#") - } - CLEAN = ALL_RST - IGNORED - EXCLUDE_FILES - -print("Touching:") -for filename in sorted(CLEAN): - print(filename) - filename.touch() -print(f"Touched {len(CLEAN)} files") diff --git a/Doc/tools/warnings-to-gh-actions.py b/Doc/tools/warnings-to-gh-actions.py deleted file mode 100644 index da33a4ede07abc..00000000000000 --- a/Doc/tools/warnings-to-gh-actions.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python3 - -""" -Convert Sphinx warning messages to GitHub Actions. - -Converts lines like: - .../Doc/library/cgi.rst:98: WARNING: reference target not found -to: - ::warning file=.../Doc/library/cgi.rst,line=98::reference target not found - -Non-matching lines are echoed unchanged. - -see: https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-a-warning-message -""" - -import re -import sys - -pattern = re.compile(r'(?P[^:]+):(?P\d+): WARNING: (?P.+)') - -for line in sys.stdin: - if match := pattern.fullmatch(line.strip()): - print('::warning file={file},line={line}::{msg}'.format_map(match)) - else: - print(line) diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst index e140f51f1dda78..138d87f892e891 100644 --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -4,8 +4,8 @@ More Control Flow Tools *********************** -Besides the :keyword:`while` statement just introduced, Python uses the usual -flow control statements known from other languages, with some twists. +As well as the :keyword:`while` statement just introduced, Python uses a few more +that we will encounter in this chapter. .. _tut-if: @@ -163,14 +163,21 @@ arguments. In chapter :ref:`tut-structures`, we will discuss in more detail abo :keyword:`!break` and :keyword:`!continue` Statements, and :keyword:`!else` Clauses on Loops ============================================================================================ -The :keyword:`break` statement, like in C, breaks out of the innermost enclosing +The :keyword:`break` statement breaks out of the innermost enclosing :keyword:`for` or :keyword:`while` loop. -Loop statements may have an :keyword:`!else` clause; it is executed when the loop -terminates through exhaustion of the iterable (with :keyword:`for`) or when the -condition becomes false (with :keyword:`while`), but not when the loop is -terminated by a :keyword:`break` statement. This is exemplified by the -following loop, which searches for prime numbers:: +A :keyword:`!for` or :keyword:`!while` loop can include an :keyword:`!else` clause. + +In a :keyword:`for` loop, the :keyword:`!else` clause is executed +after the loop reaches its final iteration. + +In a :keyword:`while` loop, it's executed after the loop's condition becomes false. + +In either kind of loop, the :keyword:`!else` clause is **not** executed +if the loop was terminated by a :keyword:`break`. + +This is exemplified in the following :keyword:`!for` loop, +which searches for prime numbers:: >>> for n in range(2, 10): ... for x in range(2, n): diff --git a/Doc/tutorial/errors.rst b/Doc/tutorial/errors.rst index 6419ff621f1b31..1ec59767e9ce12 100644 --- a/Doc/tutorial/errors.rst +++ b/Doc/tutorial/errors.rst @@ -154,7 +154,7 @@ exception type. The *except clause* may specify a variable after the exception name. The variable is bound to the exception instance which typically has an ``args`` attribute that stores the arguments. For convenience, builtin exception -types define :meth:`__str__` to print all the arguments without explicitly +types define :meth:`~object.__str__` to print all the arguments without explicitly accessing ``.args``. :: >>> try: @@ -174,7 +174,7 @@ accessing ``.args``. :: x = spam y = eggs -The exception's :meth:`__str__` output is printed as the last part ('detail') +The exception's :meth:`~object.__str__` output is printed as the last part ('detail') of the message for unhandled exceptions. :exc:`BaseException` is the common base class of all exceptions. One of its @@ -535,11 +535,20 @@ of a certain type while letting all other exceptions propagate to other clauses and eventually to be reraised. :: >>> def f(): - ... raise ExceptionGroup("group1", - ... [OSError(1), - ... SystemError(2), - ... ExceptionGroup("group2", - ... [OSError(3), RecursionError(4)])]) + ... raise ExceptionGroup( + ... "group1", + ... [ + ... OSError(1), + ... SystemError(2), + ... ExceptionGroup( + ... "group2", + ... [ + ... OSError(3), + ... RecursionError(4) + ... ] + ... ) + ... ] + ... ) ... >>> try: ... f() diff --git a/Doc/tutorial/interactive.rst b/Doc/tutorial/interactive.rst index c0eb1feec4eb4d..0d3896a4832b59 100644 --- a/Doc/tutorial/interactive.rst +++ b/Doc/tutorial/interactive.rst @@ -23,7 +23,7 @@ Python statement names, the current local variables, and the available module names. For dotted expressions such as ``string.a``, it will evaluate the expression up to the final ``'.'`` and then suggest completions from the attributes of the resulting object. Note that this may execute -application-defined code if an object with a :meth:`__getattr__` method +application-defined code if an object with a :meth:`~object.__getattr__` method is part of the expression. The default configuration also saves your history into a file named :file:`.python_history` in your user directory. The history will be available again during the next interactive interpreter diff --git a/Doc/tutorial/introduction.rst b/Doc/tutorial/introduction.rst index ebc2e9187534b4..0fc75c7d7532e2 100644 --- a/Doc/tutorial/introduction.rst +++ b/Doc/tutorial/introduction.rst @@ -52,8 +52,8 @@ Numbers The interpreter acts as a simple calculator: you can type an expression at it and it will write the value. Expression syntax is straightforward: the -operators ``+``, ``-``, ``*`` and ``/`` work just like in most other languages -(for example, Pascal or C); parentheses (``()``) can be used for grouping. +operators ``+``, ``-``, ``*`` and ``/`` can be used to perform +arithmetic; parentheses (``()``) can be used for grouping. For example:: >>> 2 + 2 @@ -138,16 +138,25 @@ and uses the ``j`` or ``J`` suffix to indicate the imaginary part .. _tut-strings: -Strings -------- +Text +---- -Besides numbers, Python can also manipulate strings, which can be expressed -in several ways. They can be enclosed in single quotes (``'...'``) or -double quotes (``"..."``) with the same result [#]_. ``\`` can be used -to escape quotes:: +Python can manipulate text (represented by type :class:`str`, so-called +"strings") as well as numbers. This includes characters "``!``", words +"``rabbit``", names "``Paris``", sentences "``Got your back.``", etc. +"``Yay! :)``". They can be enclosed in single quotes (``'...'``) or double +quotes (``"..."``) with the same result [#]_. >>> 'spam eggs' # single quotes 'spam eggs' + >>> "Paris rabbit got your back :)! Yay!" # double quotes + 'Paris rabbit got your back :)! Yay!' + >>> '1975' # digits and numerals enclosed in quotes are also strings + '1975' + +To quote a quote, we need to "escape" it, by preceding it with ``\``. +Alternatively, we can use the other type of quotation marks:: + >>> 'doesn\'t' # use \' to escape the single quote... "doesn't" >>> "doesn't" # ...or use double quotes instead @@ -159,23 +168,14 @@ to escape quotes:: >>> '"Isn\'t," they said.' '"Isn\'t," they said.' -In the interactive interpreter, the output string is enclosed in quotes and -special characters are escaped with backslashes. While this might sometimes -look different from the input (the enclosing quotes could change), the two -strings are equivalent. The string is enclosed in double quotes if -the string contains a single quote and no double quotes, otherwise it is -enclosed in single quotes. The :func:`print` function produces a more -readable output, by omitting the enclosing quotes and by printing escaped -and special characters:: +In the Python shell, the string definition and output string can look +different. The :func:`print` function produces a more readable output, by +omitting the enclosing quotes and by printing escaped and special characters:: - >>> '"Isn\'t," they said.' - '"Isn\'t," they said.' - >>> print('"Isn\'t," they said.') - "Isn't," they said. >>> s = 'First line.\nSecond line.' # \n means newline - >>> s # without print(), \n is included in the output + >>> s # without print(), special characters are included in the string 'First line.\nSecond line.' - >>> print(s) # with print(), \n produces a new line + >>> print(s) # with print(), special characters are interpreted, so \n produces new line First line. Second line. diff --git a/Doc/tutorial/modules.rst b/Doc/tutorial/modules.rst index 3bd034bcc9703f..734dd1cfe6871a 100644 --- a/Doc/tutorial/modules.rst +++ b/Doc/tutorial/modules.rst @@ -512,6 +512,22 @@ code:: This would mean that ``from sound.effects import *`` would import the three named submodules of the :mod:`sound.effects` package. +Be aware that submodules might become shadowed by locally defined names. For +example, if you added a ``reverse`` function to the +:file:`sound/effects/__init__.py` file, the ``from sound.effects import *`` +would only import the two submodules ``echo`` and ``surround``, but *not* the +``reverse`` submodule, because it is shadowed by the locally defined +``reverse`` function:: + + __all__ = [ + "echo", # refers to the 'echo.py' file + "surround", # refers to the 'surround.py' file + "reverse", # !!! refers to the 'reverse' function now !!! + ] + + def reverse(msg: str): # <-- this name shadows the 'reverse.py' submodule + return msg[::-1] # in the case of a 'from sound.effects import *' + If ``__all__`` is not defined, the statement ``from sound.effects import *`` does *not* import all submodules from the package :mod:`sound.effects` into the current namespace; it only ensures that the package :mod:`sound.effects` has diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index 1b470d395d6d58..4bf67eb439ec6c 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -897,11 +897,11 @@ conflict. * ``default``: use the :ref:`default memory allocators `. * ``malloc``: use the :c:func:`malloc` function of the C library - for all domains (:c:data:`PYMEM_DOMAIN_RAW`, :c:data:`PYMEM_DOMAIN_MEM`, - :c:data:`PYMEM_DOMAIN_OBJ`). + for all domains (:c:macro:`PYMEM_DOMAIN_RAW`, :c:macro:`PYMEM_DOMAIN_MEM`, + :c:macro:`PYMEM_DOMAIN_OBJ`). * ``pymalloc``: use the :ref:`pymalloc allocator ` for - :c:data:`PYMEM_DOMAIN_MEM` and :c:data:`PYMEM_DOMAIN_OBJ` domains and use - the :c:func:`malloc` function for the :c:data:`PYMEM_DOMAIN_RAW` domain. + :c:macro:`PYMEM_DOMAIN_MEM` and :c:macro:`PYMEM_DOMAIN_OBJ` domains and use + the :c:func:`malloc` function for the :c:macro:`PYMEM_DOMAIN_RAW` domain. Install :ref:`debug hooks `: diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index fbe280d6413170..924e73dc54da2e 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -97,7 +97,7 @@ General Options .. cmdoption:: --with-tzpath= - Select the default time zone search path for :data:`zoneinfo.TZPATH`. + Select the default time zone search path for :const:`zoneinfo.TZPATH`. See the :ref:`Compile-time configuration ` of the :mod:`zoneinfo` module. @@ -112,7 +112,7 @@ General Options Build the ``_decimal`` extension module using a thread-local context rather than a coroutine-local context (default), see the :mod:`decimal` module. - See :data:`decimal.HAVE_CONTEXTVAR` and the :mod:`contextvars` module. + See :const:`decimal.HAVE_CONTEXTVAR` and the :mod:`contextvars` module. .. versionadded:: 3.9 diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index ac1ba111e6d9b3..d24450e2f963ff 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -1201,7 +1201,7 @@ non-standard paths in the registry and user site-packages. Modules specified in the registry under ``Modules`` (not ``PythonPath``) may be imported by :class:`importlib.machinery.WindowsRegistryFinder`. This finder is enabled on Windows in 3.6.0 and earlier, but may need to - be explicitly added to :attr:`sys.meta_path` in the future. + be explicitly added to :data:`sys.meta_path` in the future. Additional modules ================== diff --git a/Doc/whatsnew/2.0.rst b/Doc/whatsnew/2.0.rst index 489268ced4c864..71f681881f446f 100644 --- a/Doc/whatsnew/2.0.rst +++ b/Doc/whatsnew/2.0.rst @@ -664,7 +664,7 @@ extra set of parentheses to pass both values as a tuple: ``L.append( (1,2) )``. The earlier versions of these methods were more forgiving because they used an old function in Python's C interface to parse their arguments; 2.0 modernizes -them to use :func:`PyArg_ParseTuple`, the current argument parsing function, +them to use :c:func:`PyArg_ParseTuple`, the current argument parsing function, which provides more helpful error messages and treats multi-argument calls as errors. If you absolutely must use 2.0 but can't fix your code, you can edit :file:`Objects/listobject.c` and define the preprocessor symbol @@ -766,7 +766,7 @@ file, :file:`Include/pyport.h`. Vladimir Marangozov's long-awaited malloc restructuring was completed, to make it easy to have the Python interpreter use a custom allocator instead of C's -standard :func:`malloc`. For documentation, read the comments in +standard :c:func:`malloc`. For documentation, read the comments in :file:`Include/pymem.h` and :file:`Include/objimpl.h`. For the lengthy discussions during which the interface was hammered out, see the web archives of the 'patches' and 'python-dev' lists at python.org. @@ -794,15 +794,15 @@ are generating Python code would run into this limit. A patch by Charles G. Waldman raises the limit from ``2**16`` to ``2**32``. Three new convenience functions intended for adding constants to a module's -dictionary at module initialization time were added: :func:`PyModule_AddObject`, -:func:`PyModule_AddIntConstant`, and :func:`PyModule_AddStringConstant`. Each +dictionary at module initialization time were added: :c:func:`PyModule_AddObject`, +:c:func:`PyModule_AddIntConstant`, and :c:func:`PyModule_AddStringConstant`. Each of these functions takes a module object, a null-terminated C string containing the name to be added, and a third argument for the value to be assigned to the name. This third argument is, respectively, a Python object, a C long, or a C string. -A wrapper API was added for Unix-style signal handlers. :func:`PyOS_getsig` gets -a signal handler and :func:`PyOS_setsig` will set a new handler. +A wrapper API was added for Unix-style signal handlers. :c:func:`PyOS_getsig` gets +a signal handler and :c:func:`PyOS_setsig` will set a new handler. .. ====================================================================== diff --git a/Doc/whatsnew/2.1.rst b/Doc/whatsnew/2.1.rst index 676da702b39693..f0e1ded75a9d27 100644 --- a/Doc/whatsnew/2.1.rst +++ b/Doc/whatsnew/2.1.rst @@ -692,8 +692,8 @@ applied, and 136 bugs fixed; both figures are likely to be underestimates. Some of the more notable changes are: * A specialized object allocator is now optionally available, that should be - faster than the system :func:`malloc` and have less memory overhead. The - allocator uses C's :func:`malloc` function to get large pools of memory, and + faster than the system :c:func:`malloc` and have less memory overhead. The + allocator uses C's :c:func:`!malloc` function to get large pools of memory, and then fulfills smaller memory requests from these pools. It can be enabled by providing the :option:`!--with-pymalloc` option to the :program:`configure` script; see :file:`Objects/obmalloc.c` for the implementation details. @@ -701,13 +701,13 @@ of the more notable changes are: Authors of C extension modules should test their code with the object allocator enabled, because some incorrect code may break, causing core dumps at runtime. There are a bunch of memory allocation functions in Python's C API that have - previously been just aliases for the C library's :func:`malloc` and - :func:`free`, meaning that if you accidentally called mismatched functions, the + previously been just aliases for the C library's :c:func:`malloc` and + :c:func:`free`, meaning that if you accidentally called mismatched functions, the error wouldn't be noticeable. When the object allocator is enabled, these - functions aren't aliases of :func:`malloc` and :func:`free` any more, and + functions aren't aliases of :c:func:`!malloc` and :c:func:`!free` any more, and calling the wrong function to free memory will get you a core dump. For - example, if memory was allocated using :func:`PyMem_New`, it has to be freed - using :func:`PyMem_Del`, not :func:`free`. A few modules included with Python + example, if memory was allocated using :c:macro:`PyMem_New`, it has to be freed + using :c:func:`PyMem_Del`, not :c:func:`!free`. A few modules included with Python fell afoul of this and had to be fixed; doubtless there are more third-party modules that will have the same problem. @@ -717,7 +717,7 @@ of the more notable changes are: complain about its lack of speed, and because it's often been used as a naïve benchmark. The :meth:`readline` method of file objects has therefore been rewritten to be much faster. The exact amount of the speedup will vary from - platform to platform depending on how slow the C library's :func:`getc` was, but + platform to platform depending on how slow the C library's :c:func:`!getc` was, but is around 66%, and potentially much faster on some particular operating systems. Tim Peters did much of the benchmarking and coding for this change, motivated by a discussion in comp.lang.python. @@ -770,7 +770,7 @@ of the more notable changes are: reorganization done by Jeremy Hylton. * C extensions which import other modules have been changed to use - :func:`PyImport_ImportModule`, which means that they will use any import hooks + :c:func:`PyImport_ImportModule`, which means that they will use any import hooks that have been installed. This is also encouraged for third-party extensions that need to import some other module from C code. diff --git a/Doc/whatsnew/2.2.rst b/Doc/whatsnew/2.2.rst index 82aff0be1ed3b3..44e9bd8d492bfc 100644 --- a/Doc/whatsnew/2.2.rst +++ b/Doc/whatsnew/2.2.rst @@ -1105,11 +1105,11 @@ code, none of the changes described here will affect you very much. expected, and a set of pointers to :c:expr:`PyObject*` variables that will be filled in with argument values. -* Two new flags :const:`METH_NOARGS` and :const:`METH_O` are available in method +* Two new flags :c:macro:`METH_NOARGS` and :c:macro:`METH_O` are available in method definition tables to simplify implementation of methods with no arguments or a single untyped argument. Calling such methods is more efficient than calling a - corresponding method that uses :const:`METH_VARARGS`. Also, the old - :const:`METH_OLDARGS` style of writing C methods is now officially deprecated. + corresponding method that uses :c:macro:`METH_VARARGS`. Also, the old + :c:macro:`METH_OLDARGS` style of writing C methods is now officially deprecated. * Two new wrapper functions, :c:func:`PyOS_snprintf` and :c:func:`PyOS_vsnprintf` were added to provide cross-platform implementations for the relatively new diff --git a/Doc/whatsnew/2.3.rst b/Doc/whatsnew/2.3.rst index 43bf3fa46a29f1..a96c1061455e00 100644 --- a/Doc/whatsnew/2.3.rst +++ b/Doc/whatsnew/2.3.rst @@ -1474,7 +1474,7 @@ complete list of changes, or look through the CVS logs for all the details. * On Windows, the :mod:`socket` module now ships with Secure Sockets Layer (SSL) support. -* The value of the C :const:`PYTHON_API_VERSION` macro is now exposed at the +* The value of the C :c:macro:`PYTHON_API_VERSION` macro is now exposed at the Python level as ``sys.api_version``. The current exception can be cleared by calling the new :func:`sys.exc_clear` function. @@ -1847,7 +1847,7 @@ specifically for allocating Python objects. :c:func:`PyObject_Malloc`, :c:func:`PyObject_Realloc`, and :c:func:`PyObject_Free`. * To allocate and free Python objects, use the "object" family - :c:func:`PyObject_New`, :c:func:`PyObject_NewVar`, and :c:func:`PyObject_Del`. + :c:macro:`PyObject_New`, :c:macro:`PyObject_NewVar`, and :c:func:`PyObject_Del`. Thanks to lots of work by Tim Peters, pymalloc in 2.3 also provides debugging features to catch memory overwrites and doubled frees in both extension modules @@ -1899,10 +1899,10 @@ Changes to Python's build process and to the C API include: * The :c:func:`PyArg_NoArgs` macro is now deprecated, and code that uses it should be changed. For Python 2.2 and later, the method definition table can - specify the :const:`METH_NOARGS` flag, signalling that there are no arguments, + specify the :c:macro:`METH_NOARGS` flag, signalling that there are no arguments, and the argument checking can then be removed. If compatibility with pre-2.2 versions of Python is important, the code could use ``PyArg_ParseTuple(args, - "")`` instead, but this will be slower than using :const:`METH_NOARGS`. + "")`` instead, but this will be slower than using :c:macro:`METH_NOARGS`. * :c:func:`PyArg_ParseTuple` accepts new format characters for various sizes of unsigned integers: ``B`` for :c:expr:`unsigned char`, ``H`` for :c:expr:`unsigned @@ -1918,7 +1918,7 @@ Changes to Python's build process and to the C API include: seconds, according to one measurement). * It's now possible to define class and static methods for a C extension type by - setting either the :const:`METH_CLASS` or :const:`METH_STATIC` flags in a + setting either the :c:macro:`METH_CLASS` or :c:macro:`METH_STATIC` flags in a method's :c:type:`PyMethodDef` structure. * Python now includes a copy of the Expat XML parser's source code, removing any diff --git a/Doc/whatsnew/2.4.rst b/Doc/whatsnew/2.4.rst index 43c3f01e5af89c..9e8a9e6a622d00 100644 --- a/Doc/whatsnew/2.4.rst +++ b/Doc/whatsnew/2.4.rst @@ -1476,7 +1476,7 @@ Some of the changes to Python's build process and to the C API are: :c:func:`PyArg_ParseTupleAndKeywords` but takes a :c:type:`va_list` instead of a number of arguments. (Contributed by Greg Chapman.) -* A new method flag, :const:`METH_COEXISTS`, allows a function defined in slots +* A new method flag, :c:macro:`METH_COEXISTS`, allows a function defined in slots to co-exist with a :c:type:`PyCFunction` having the same name. This can halve the access time for a method such as :meth:`set.__contains__`. (Contributed by Raymond Hettinger.) @@ -1491,7 +1491,7 @@ Some of the changes to Python's build process and to the C API are: though that processor architecture doesn't call that register "the TSC register". (Contributed by Jeremy Hylton.) -* The :c:type:`tracebackobject` type has been renamed to +* The :c:type:`!tracebackobject` type has been renamed to :c:type:`PyTracebackObject`. .. ====================================================================== diff --git a/Doc/whatsnew/2.5.rst b/Doc/whatsnew/2.5.rst index 85ffd170e7d113..a47327d15fd79a 100644 --- a/Doc/whatsnew/2.5.rst +++ b/Doc/whatsnew/2.5.rst @@ -954,7 +954,7 @@ The return value must be either a Python integer or long integer. The interpreter will check that the type returned is correct, and raises a :exc:`TypeError` if this requirement isn't met. -A corresponding :attr:`nb_index` slot was added to the C-level +A corresponding :c:member:`~PyNumberMethods.nb_index` slot was added to the C-level :c:type:`PyNumberMethods` structure to let C extensions implement this protocol. ``PyNumber_Index(obj)`` can be used in extension code to call the :meth:`__index__` function and retrieve its result. @@ -1448,10 +1448,10 @@ complete list of changes, or look through the SVN logs for all the details. return times that are precise to fractions of a second; not all systems support such precision.) - Constants named :attr:`os.SEEK_SET`, :attr:`os.SEEK_CUR`, and - :attr:`os.SEEK_END` have been added; these are the parameters to the + Constants named :const:`os.SEEK_SET`, :const:`os.SEEK_CUR`, and + :const:`os.SEEK_END` have been added; these are the parameters to the :func:`os.lseek` function. Two new constants for locking are - :attr:`os.O_SHLOCK` and :attr:`os.O_EXLOCK`. + :const:`os.O_SHLOCK` and :const:`os.O_EXLOCK`. Two new functions, :func:`wait3` and :func:`wait4`, were added. They're similar the :func:`waitpid` function which waits for a child process to exit and returns @@ -1602,7 +1602,7 @@ complete list of changes, or look through the SVN logs for all the details. * The :mod:`unicodedata` module has been updated to use version 4.1.0 of the Unicode character database. Version 3.2.0 is required by some specifications, - so it's still available as :attr:`unicodedata.ucd_3_2_0`. + so it's still available as :data:`unicodedata.ucd_3_2_0`. * New module: the :mod:`uuid` module generates universally unique identifiers (UUIDs) according to :rfc:`4122`. The RFC defines several different UUID @@ -2151,8 +2151,8 @@ Changes to Python's build process and to the C API include: Previously these different families all reduced to the platform's :c:func:`malloc` and :c:func:`free` functions. This meant it didn't matter if - you got things wrong and allocated memory with the :c:func:`PyMem` function but - freed it with the :c:func:`PyObject` function. With 2.5's changes to obmalloc, + you got things wrong and allocated memory with the ``PyMem`` function but + freed it with the ``PyObject`` function. With 2.5's changes to obmalloc, these families now do different things and mismatches will probably result in a segfault. You should carefully test your C extension modules with Python 2.5. diff --git a/Doc/whatsnew/2.6.rst b/Doc/whatsnew/2.6.rst index 69170897ccc50a..ad899d53886c59 100644 --- a/Doc/whatsnew/2.6.rst +++ b/Doc/whatsnew/2.6.rst @@ -1138,11 +1138,11 @@ indicate that the external caller is done. The *flags* argument to :c:func:`PyObject_GetBuffer` specifies constraints upon the memory returned. Some examples are: - * :const:`PyBUF_WRITABLE` indicates that the memory must be writable. + * :c:macro:`PyBUF_WRITABLE` indicates that the memory must be writable. - * :const:`PyBUF_LOCK` requests a read-only or exclusive lock on the memory. + * :c:macro:`PyBUF_LOCK` requests a read-only or exclusive lock on the memory. - * :const:`PyBUF_C_CONTIGUOUS` and :const:`PyBUF_F_CONTIGUOUS` + * :c:macro:`PyBUF_C_CONTIGUOUS` and :c:macro:`PyBUF_F_CONTIGUOUS` requests a C-contiguous (last dimension varies the fastest) or Fortran-contiguous (first dimension varies the fastest) array layout. @@ -2289,7 +2289,7 @@ changes, or look through the Subversion logs for all the details. (Contributed by Raymond Hettinger; :issue:`1861`.) * The :mod:`select` module now has wrapper functions - for the Linux :c:func:`epoll` and BSD :c:func:`kqueue` system calls. + for the Linux :c:func:`!epoll` and BSD :c:func:`!kqueue` system calls. :meth:`modify` method was added to the existing :class:`poll` objects; ``pollobj.modify(fd, eventmask)`` takes a file descriptor or file object and an event mask, modifying the recorded event mask @@ -2328,7 +2328,7 @@ changes, or look through the Subversion logs for all the details. one for reading and one for writing. The writable descriptor will be passed to :func:`set_wakeup_fd`, and the readable descriptor will be added to the list of descriptors monitored by the event loop via - :c:func:`select` or :c:func:`poll`. + :c:func:`!select` or :c:func:`!poll`. On receiving a signal, a byte will be written and the main event loop will be woken up, avoiding the need to poll. @@ -2982,7 +2982,7 @@ Changes to Python's build process and to the C API include: * Python now must be compiled with C89 compilers (after 19 years!). This means that the Python source tree has dropped its - own implementations of :c:func:`memmove` and :c:func:`strerror`, which + own implementations of :c:func:`!memmove` and :c:func:`!strerror`, which are in the C89 standard library. * Python 2.6 can be built with Microsoft Visual Studio 2008 (version diff --git a/Doc/whatsnew/2.7.rst b/Doc/whatsnew/2.7.rst index f8c7872d7d3f89..4b5a31f8a84810 100644 --- a/Doc/whatsnew/2.7.rst +++ b/Doc/whatsnew/2.7.rst @@ -355,7 +355,7 @@ added as a more powerful replacement for the This means Python now supports three different modules for parsing command-line arguments: :mod:`getopt`, :mod:`optparse`, and :mod:`argparse`. The :mod:`getopt` module closely resembles the C -library's :c:func:`getopt` function, so it remains useful if you're writing a +library's :c:func:`!getopt` function, so it remains useful if you're writing a Python prototype that will eventually be rewritten in C. :mod:`optparse` becomes redundant, but there are no plans to remove it because there are many scripts still using it, and there's no @@ -1556,9 +1556,9 @@ changes, or look through the Subversion logs for all the details. :issue:`8484`.) The version of OpenSSL being used is now available as the module - attributes :data:`ssl.OPENSSL_VERSION` (a string), - :data:`ssl.OPENSSL_VERSION_INFO` (a 5-tuple), and - :data:`ssl.OPENSSL_VERSION_NUMBER` (an integer). (Added by Antoine + attributes :const:`ssl.OPENSSL_VERSION` (a string), + :const:`ssl.OPENSSL_VERSION_INFO` (a 5-tuple), and + :const:`ssl.OPENSSL_VERSION_NUMBER` (an integer). (Added by Antoine Pitrou; :issue:`8321`.) * The :mod:`struct` module will no longer silently ignore overflow @@ -2231,7 +2231,7 @@ Changes to Python's build process and to the C API include: * When using the :c:type:`PyMemberDef` structure to define attributes of a type, Python will no longer let you try to delete or set a - :const:`T_STRING_INPLACE` attribute. + :c:macro:`T_STRING_INPLACE` attribute. .. rev 79644 diff --git a/Doc/whatsnew/3.0.rst b/Doc/whatsnew/3.0.rst index b8cd7c48b359b2..b767bbe177abeb 100644 --- a/Doc/whatsnew/3.0.rst +++ b/Doc/whatsnew/3.0.rst @@ -789,7 +789,7 @@ Operators And Special Methods :attr:`__doc__`, :attr:`__globals__`, :attr:`~definition.__name__`, respectively. -* :meth:`__nonzero__` is now :meth:`__bool__`. +* :meth:`!__nonzero__` is now :meth:`~object.__bool__`. Builtins -------- diff --git a/Doc/whatsnew/3.1.rst b/Doc/whatsnew/3.1.rst index 054762de7e743a..c399f007fd63fb 100644 --- a/Doc/whatsnew/3.1.rst +++ b/Doc/whatsnew/3.1.rst @@ -370,7 +370,7 @@ New, Improved, and Deprecated Modules * The :mod:`io` module has three new constants for the :meth:`seek` method :data:`SEEK_SET`, :data:`SEEK_CUR`, and :data:`SEEK_END`. -* The :attr:`sys.version_info` tuple is now a named tuple:: +* The :data:`sys.version_info` tuple is now a named tuple:: >>> sys.version_info sys.version_info(major=3, minor=1, micro=0, releaselevel='alpha', serial=2) @@ -486,7 +486,7 @@ Changes to Python's build process and to the C API include: Apart from the performance improvements this change should be invisible to end users, with one exception: for testing and debugging purposes there's a - new :attr:`sys.int_info` that provides information about the + new :data:`sys.int_info` that provides information about the internal format, giving the number of bits per digit and the size in bytes of the C type used to store each digit:: diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index ab030db5b3ffaa..e4ca3c4c81ba58 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -887,7 +887,7 @@ Other Language Changes New Modules =========== -* None yet. +* None. Improved Modules @@ -1253,8 +1253,8 @@ descriptors without copying between kernel address space and user address space, where one of the file descriptors must refer to a pipe. (Contributed by Pablo Galindo in :issue:`41625`.) -Add :data:`~os.O_EVTONLY`, :data:`~os.O_FSYNC`, :data:`~os.O_SYMLINK` -and :data:`~os.O_NOFOLLOW_ANY` for macOS. +Add :const:`~os.O_EVTONLY`, :const:`~os.O_FSYNC`, :const:`~os.O_SYMLINK` +and :const:`~os.O_NOFOLLOW_ANY` for macOS. (Contributed by Dong-hee Na in :issue:`43106`.) os.path @@ -1319,7 +1319,7 @@ objects in the tree returned by :func:`pyclbr.readline` and shelve ------ -The :mod:`shelve` module now uses :data:`pickle.DEFAULT_PROTOCOL` by default +The :mod:`shelve` module now uses :const:`pickle.DEFAULT_PROTOCOL` by default instead of :mod:`pickle` protocol ``3`` when creating shelves. (Contributed by Zackery Spytz in :issue:`34204`.) @@ -1356,7 +1356,7 @@ The ssl module requires OpenSSL 1.1.1 or newer. (Contributed by Christian Heimes in :pep:`644` and :issue:`43669`.) The ssl module has preliminary support for OpenSSL 3.0.0 and new option -:data:`~ssl.OP_IGNORE_UNEXPECTED_EOF`. +:const:`~ssl.OP_IGNORE_UNEXPECTED_EOF`. (Contributed by Christian Heimes in :issue:`38820`, :issue:`43794`, :issue:`43788`, :issue:`43791`, :issue:`43799`, :issue:`43920`, :issue:`43789`, and :issue:`43811`.) @@ -1387,7 +1387,7 @@ Add a *timeout* parameter to the :func:`ssl.get_server_certificate` function. The ssl module uses heap-types and multi-phase initialization. (Contributed by Christian Heimes in :issue:`42333`.) -A new verify flag :data:`~ssl.VERIFY_X509_PARTIAL_CHAIN` has been added. +A new verify flag :const:`~ssl.VERIFY_X509_PARTIAL_CHAIN` has been added. (Contributed by l0x in :issue:`40849`.) sqlite3 @@ -1413,7 +1413,7 @@ _thread ------- :func:`_thread.interrupt_main` now takes an optional signal number to -simulate (the default is still :data:`signal.SIGINT`). +simulate (the default is still :const:`signal.SIGINT`). (Contributed by Antoine Pitrou in :issue:`43356`.) threading @@ -1757,8 +1757,8 @@ Deprecated * :data:`~ssl.PROTOCOL_SSLv2`, :data:`~ssl.PROTOCOL_SSLv3`, :data:`~ssl.PROTOCOL_SSLv23`, :data:`~ssl.PROTOCOL_TLSv1`, :data:`~ssl.PROTOCOL_TLSv1_1`, :data:`~ssl.PROTOCOL_TLSv1_2`, and - :data:`~ssl.PROTOCOL_TLS` are deprecated in favor of - :data:`~ssl.PROTOCOL_TLS_CLIENT` and :data:`~ssl.PROTOCOL_TLS_SERVER` + :const:`~ssl.PROTOCOL_TLS` are deprecated in favor of + :const:`~ssl.PROTOCOL_TLS_CLIENT` and :const:`~ssl.PROTOCOL_TLS_SERVER` * :func:`~ssl.wrap_socket` is replaced by :meth:`ssl.SSLContext.wrap_socket` @@ -2124,11 +2124,11 @@ New Features These functions allow to activate, deactivate and query the state of the garbage collector from C code without having to import the :mod:`gc` module. -* Add a new :c:data:`Py_TPFLAGS_DISALLOW_INSTANTIATION` type flag to disallow +* Add a new :c:macro:`Py_TPFLAGS_DISALLOW_INSTANTIATION` type flag to disallow creating type instances. (Contributed by Victor Stinner in :issue:`43916`.) -* Add a new :c:data:`Py_TPFLAGS_IMMUTABLETYPE` type flag for creating immutable +* Add a new :c:macro:`Py_TPFLAGS_IMMUTABLETYPE` type flag for creating immutable type objects: type attributes cannot be set nor deleted. (Contributed by Victor Stinner and Erlend E. Aasland in :issue:`43908`.) @@ -2187,9 +2187,9 @@ Porting to Python 3.10 been included directly, consider including ``Python.h`` instead. (Contributed by Nicholas Sim in :issue:`35134`.) -* Use the :c:data:`Py_TPFLAGS_IMMUTABLETYPE` type flag to create immutable type - objects. Do not rely on :c:data:`Py_TPFLAGS_HEAPTYPE` to decide if a type - object is mutable or not; check if :c:data:`Py_TPFLAGS_IMMUTABLETYPE` is set +* Use the :c:macro:`Py_TPFLAGS_IMMUTABLETYPE` type flag to create immutable type + objects. Do not rely on :c:macro:`Py_TPFLAGS_HEAPTYPE` to decide if a type + object is mutable or not; check if :c:macro:`Py_TPFLAGS_IMMUTABLETYPE` is set instead. (Contributed by Victor Stinner and Erlend E. Aasland in :issue:`43908`.) diff --git a/Doc/whatsnew/3.11.rst b/Doc/whatsnew/3.11.rst index 45194130c993a0..ec5263ec35c52f 100644 --- a/Doc/whatsnew/3.11.rst +++ b/Doc/whatsnew/3.11.rst @@ -640,7 +640,7 @@ dataclasses datetime -------- -* Add :attr:`datetime.UTC`, a convenience alias for +* Add :const:`datetime.UTC`, a convenience alias for :attr:`datetime.timezone.utc`. (Contributed by Kabir Kwatra in :gh:`91973`.) * :meth:`datetime.date.fromisoformat`, :meth:`datetime.time.fromisoformat` and @@ -690,7 +690,7 @@ enum * Added the :func:`~enum.global_enum` enum decorator, which adjusts :meth:`~object.__repr__` and :meth:`~object.__str__` to show values as members of their module rather than the enum class. - For example, ``'re.ASCII'`` for the :data:`~re.ASCII` member + For example, ``'re.ASCII'`` for the :const:`~re.ASCII` member of :class:`re.RegexFlag` rather than ``'RegexFlag.ASCII'``. * Enhanced :class:`~enum.Flag` to support @@ -1063,8 +1063,8 @@ threading * On Unix, if the ``sem_clockwait()`` function is available in the C library (glibc 2.30 and newer), the :meth:`threading.Lock.acquire` method now uses - the monotonic clock (:data:`time.CLOCK_MONOTONIC`) for the timeout, rather - than using the system clock (:data:`time.CLOCK_REALTIME`), to not be affected + the monotonic clock (:const:`time.CLOCK_MONOTONIC`) for the timeout, rather + than using the system clock (:const:`time.CLOCK_REALTIME`), to not be affected by system clock changes. (Contributed by Victor Stinner in :issue:`41710`.) @@ -1812,7 +1812,7 @@ Standard Library (Contributed by Serhiy Storchaka in :gh:`91760`.) * In the :mod:`re` module, the :func:`!re.template` function - and the corresponding :data:`!re.TEMPLATE` and :data:`!re.T` flags + and the corresponding :const:`!re.TEMPLATE` and :const:`!re.T` flags are deprecated, as they were undocumented and lacked an obvious purpose. They will be removed in Python 3.13. (Contributed by Serhiy Storchaka and Miro Hrončok in :gh:`92728`.) @@ -2227,7 +2227,7 @@ New Features (Contributed by Christian Heimes in :issue:`45459`.) -* Added the :c:data:`PyType_GetModuleByDef` function, used to get the module +* Added the :c:func:`PyType_GetModuleByDef` function, used to get the module in which a method was defined, in cases where this information is not available directly (via :c:type:`PyCMethod`). (Contributed by Petr Viktorin in :issue:`46613`.) @@ -2347,11 +2347,11 @@ Porting to Python 3.11 #endif * The :c:func:`PyType_Ready` function now raises an error if a type is defined - with the :const:`Py_TPFLAGS_HAVE_GC` flag set but has no traverse function + with the :c:macro:`Py_TPFLAGS_HAVE_GC` flag set but has no traverse function (:c:member:`PyTypeObject.tp_traverse`). (Contributed by Victor Stinner in :issue:`44263`.) -* Heap types with the :const:`Py_TPFLAGS_IMMUTABLETYPE` flag can now inherit +* Heap types with the :c:macro:`Py_TPFLAGS_IMMUTABLETYPE` flag can now inherit the :pep:`590` vectorcall protocol. Previously, this was only possible for :ref:`static types `. (Contributed by Erlend E. Aasland in :issue:`43908`) diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index a6d101bdb9f7a8..8ed435476c9cec 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -3,8 +3,7 @@ What's New In Python 3.12 **************************** -:Release: |release| -:Date: |today| +:Editor: TBD .. Rules for maintenance: @@ -70,6 +69,10 @@ New grammar features: * :pep:`701`: Syntactic formalization of f-strings +Interpreter improvements: + +* :ref:`whatsnew312-pep684` + New typing features: * :pep:`688`: Making the buffer protocol accessible in Python @@ -84,7 +87,9 @@ Important deprecations, removals or restrictions: * :pep:`623`: Remove wstr from Unicode -* :pep:`632`: Remove the ``distutils`` package +* :pep:`632`: Remove the ``distutils`` package. See + `the migration guide `_ + for advice on its replacement. Improved Error Messages ======================= @@ -275,6 +280,36 @@ The new :class:`inspect.BufferFlags` enum represents the flags that can be used to customize buffer creation. (Contributed by Jelle Zijlstra in :gh:`102500`.) +.. _whatsnew312-pep684: + +PEP 684: A Per-Interpreter GIL +------------------------------ + +Sub-interpreters may now be created with a unique GIL per interpreter. +This allows Python programs to take full advantage of multiple CPU +cores. + +Use the new :c:func:`Py_NewInterpreterFromConfig` function to +create an interpreter with its own GIL:: + + PyInterpreterConfig config = { + .check_multi_interp_extensions = 1, + .gil = PyInterpreterConfig_OWN_GIL, + }; + PyThreadState *tstate = NULL; + PyStatus status = Py_NewInterpreterFromConfig(&tstate, &config); + if (PyStatus_Exception(status)) { + return -1; + } + /* The new interpeter is now active in the current thread. */ + +For further examples how to use the C-API for sub-interpreters with a +per-interpreter GIL, see :source:`Modules/_xxsubinterpretersmodule.c`. + +A Python API is anticipated for 3.13. (See :pep:`554`.) + +(Contributed by Eric Snow in :gh:`104210`, etc.) + New Features Related to Type Hints ================================== @@ -469,8 +504,8 @@ Other Language Changes * :class:`slice` objects are now hashable, allowing them to be used as dict keys and set items. (Contributed by Will Bradshaw, Furkan Onder, and Raymond Hettinger in :gh:`101264`.) -* :func:`sum` now uses Neumaier summation to improve accuracy when summing - floats or mixed ints and floats. +* :func:`sum` now uses Neumaier summation to improve accuracy and commutativity + when summing floats or mixed ints and floats. (Contributed by Raymond Hettinger in :gh:`100425`.) * Exceptions raised in a typeobject's ``__set_name__`` method are no longer @@ -486,7 +521,7 @@ Other Language Changes New Modules =========== -* None yet. +* None. Improved Modules @@ -555,7 +590,7 @@ calendar csv --- -* Add :data:`~csv.QUOTE_NOTNULL` and :data:`~csv.QUOTE_STRINGS` flags to +* Add :const:`~csv.QUOTE_NOTNULL` and :const:`~csv.QUOTE_STRINGS` flags to provide finer grained control of ``None`` and empty strings by :class:`~csv.writer` objects. @@ -570,14 +605,6 @@ dis :data:`~dis.hasarg` collection instead. (Contributed by Irit Katriel in :gh:`94216`.) -email ------ - -* :func:`email.utils.getaddresses` and :func:`email.utils.parseaddr` now return - ``('', '')`` 2-tuples in more situations where invalid email addresses are - encountered instead of potentially inaccurate values. - (Contributed by Thomas Dwyer for :gh:`102988` to ameliorate CVE-2023-27043.) - fractions --------- @@ -620,7 +647,7 @@ math os -- -* Add :data:`os.PIDFD_NONBLOCK` to open a file descriptor +* Add :const:`os.PIDFD_NONBLOCK` to open a file descriptor for a process with :func:`os.pidfd_open` in non-blocking mode. (Contributed by Kumar Aditya in :gh:`93312`.) @@ -774,6 +801,11 @@ sys exception instance, rather than to a ``(typ, exc, tb)`` tuple. (Contributed by Irit Katriel in :gh:`103176`.) +* :func:`sys.setrecursionlimit` and :func:`sys.getrecursionlimit`. + The recursion limit now applies only to Python code. Builtin functions do + not use the recursion limit, but are protected by a different mechanism + that prevents recursion from causing a virtual machine crash. + tempfile -------- @@ -975,67 +1007,76 @@ Demos and Tools Deprecated ========== -* :class:`typing.Hashable` and :class:`typing.Sized` aliases for :class:`collections.abc.Hashable` - and :class:`collections.abc.Sized`. (:gh:`94309`.) - -* The :mod:`sqlite3` :ref:`default adapters and converters - ` are now deprecated. - Instead, use the :ref:`sqlite3-adapter-converter-recipes` - and tailor them to your needs. - (Contributed by Erlend E. Aasland in :gh:`90016`.) - -* In :meth:`~sqlite3.Cursor.execute`, :exc:`DeprecationWarning` is now emitted - when :ref:`named placeholders ` are used together with - parameters supplied as a :term:`sequence` instead of as a :class:`dict`. - Starting from Python 3.14, using named placeholders with parameters supplied - as a sequence will raise a :exc:`~sqlite3.ProgrammingError`. - (Contributed by Erlend E. Aasland in :gh:`101698`.) - -* The 3-arg signatures (type, value, traceback) of :meth:`~coroutine.throw`, - :meth:`~generator.throw` and :meth:`~agen.athrow` are deprecated and - may be removed in a future version of Python. Use the single-arg versions - of these functions instead. (Contributed by Ofey Chan in :gh:`89874`.) - -* :exc:`DeprecationWarning` is now raised when ``__package__`` on a - module differs from ``__spec__.parent`` (previously it was - :exc:`ImportWarning`). - (Contributed by Brett Cannon in :gh:`65961`.) - -* The :meth:`~asyncio.get_event_loop` method of the +* :mod:`asyncio`: The :meth:`~asyncio.get_event_loop` method of the default event loop policy now emits a :exc:`DeprecationWarning` if there is no current event loop set and it decides to create one. (Contributed by Serhiy Storchaka and Guido van Rossum in :gh:`100160`.) -* The :mod:`xml.etree.ElementTree` module now emits :exc:`DeprecationWarning` - when testing the truth value of an :class:`xml.etree.ElementTree.Element`. - Before, the Python implementation emitted :exc:`FutureWarning`, and the C - implementation emitted nothing. +* :mod:`calendar`: ``calendar.January`` and ``calendar.February`` constants are deprecated and + replaced by :data:`calendar.Month.JANUARY` and :data:`calendar.Month.FEBRUARY`. + (Contributed by Prince Roshan in :gh:`103636`.) -* In accordance with :pep:`699`, the ``ma_version_tag`` field in :c:type:`PyDictObject` - is deprecated for extension modules. Accessing this field will generate a compiler - warning at compile time. This field will be removed in Python 3.14. - (Contributed by Ramvikrams and Kumar Aditya in :gh:`101193`. PEP by Ken Jin.) +* :mod:`datetime`: :class:`datetime.datetime`'s :meth:`~datetime.datetime.utcnow` and + :meth:`~datetime.datetime.utcfromtimestamp` are deprecated and will be + removed in a future version. Instead, use timezone-aware objects to represent + datetimes in UTC: respectively, call :meth:`~datetime.datetime.now` and + :meth:`~datetime.datetime.fromtimestamp` with the *tz* parameter set to + :const:`datetime.UTC`. + (Contributed by Paul Ganssle in :gh:`103857`.) -* The ``st_ctime`` fields return by :func:`os.stat` and :func:`os.lstat` on +* :mod:`os`: The ``st_ctime`` fields return by :func:`os.stat` and :func:`os.lstat` on Windows are deprecated. In a future release, they will contain the last metadata change time, consistent with other platforms. For now, they still contain the creation time, which is also available in the new ``st_birthtime`` field. (Contributed by Steve Dower in :gh:`99726`.) -* The :data:`sys.last_type`, :data:`sys.last_value` and :data:`sys.last_traceback` +* :mod:`shutil`: The *onerror* argument of :func:`shutil.rmtree` is deprecated as will be removed + in Python 3.14. Use *onexc* instead. (Contributed by Irit Katriel in :gh:`102828`.) + +* :mod:`sqlite3`: + * :ref:`default adapters and converters + ` are now deprecated. + Instead, use the :ref:`sqlite3-adapter-converter-recipes` + and tailor them to your needs. + (Contributed by Erlend E. Aasland in :gh:`90016`.) + + * In :meth:`~sqlite3.Cursor.execute`, :exc:`DeprecationWarning` is now emitted + when :ref:`named placeholders ` are used together with + parameters supplied as a :term:`sequence` instead of as a :class:`dict`. + Starting from Python 3.14, using named placeholders with parameters supplied + as a sequence will raise a :exc:`~sqlite3.ProgrammingError`. + (Contributed by Erlend E. Aasland in :gh:`101698`.) + +* :mod:`sys`: The :data:`sys.last_type`, :data:`sys.last_value` and :data:`sys.last_traceback` fields are deprecated. Use :data:`sys.last_exc` instead. (Contributed by Irit Katriel in :gh:`102778`.) -* The *onerror* argument of :func:`shutil.rmtree` is deprecated as will be removed - in Python 3.14. Use *onexc* instead. (Contributed by Irit Katriel in :gh:`102828`.) - -* Extracting tar archives without specifying *filter* is deprecated until +* :mod:`tarfile`: Extracting tar archives without specifying *filter* is deprecated until Python 3.14, when ``'data'`` filter will become the default. See :ref:`tarfile-extraction-filter` for details. -* ``calendar.January`` and ``calendar.February`` constants are deprecated and - replaced by :data:`calendar.Month.JANUARY` and :data:`calendar.Month.FEBRUARY`. - (Contributed by Prince Roshan in :gh:`103636`.) +* :mod:`typing`: :class:`typing.Hashable` and :class:`typing.Sized` aliases for :class:`collections.abc.Hashable` + and :class:`collections.abc.Sized`. (:gh:`94309`.) + +* :mod:`xml.etree.ElementTree`: The module now emits :exc:`DeprecationWarning` + when testing the truth value of an :class:`xml.etree.ElementTree.Element`. + Before, the Python implementation emitted :exc:`FutureWarning`, and the C + implementation emitted nothing. + +* The 3-arg signatures (type, value, traceback) of :meth:`~coroutine.throw`, + :meth:`~generator.throw` and :meth:`~agen.athrow` are deprecated and + may be removed in a future version of Python. Use the single-arg versions + of these functions instead. (Contributed by Ofey Chan in :gh:`89874`.) + +* :exc:`DeprecationWarning` is now raised when ``__package__`` on a + module differs from ``__spec__.parent`` (previously it was + :exc:`ImportWarning`). + (Contributed by Brett Cannon in :gh:`65961`.) + +* In accordance with :pep:`699`, the ``ma_version_tag`` field in :c:type:`PyDictObject` + is deprecated for extension modules. Accessing this field will generate a compiler + warning at compile time. This field will be removed in Python 3.14. + (Contributed by Ramvikrams and Kumar Aditya in :gh:`101193`. PEP by Ken Jin.) * The bitwise inversion operator (``~``) on bool is deprecated. It will throw an error in Python 3.14. Use ``not`` for logical negation of bools instead. @@ -1043,16 +1084,6 @@ Deprecated ``int``, convert to int explicitly with ``~int(x)``. (Contributed by Tim Hoffmann in :gh:`103487`.) -* :class:`datetime.datetime`'s - :meth:`~datetime.datetime.utcnow` and - :meth:`~datetime.datetime.utcfromtimestamp` are deprecated and will be - removed in a future version. Instead, use timezone-aware objects to represent - datetimes in UTC: respectively, call - :meth:`~datetime.datetime.now` and - :meth:`~datetime.datetime.fromtimestamp` with the *tz* parameter set to - :attr:`datetime.UTC`. - (Contributed by Paul Ganssle in :gh:`103857`.) - Pending Removal in Python 3.13 ------------------------------ @@ -1097,7 +1128,33 @@ APIs: Pending Removal in Python 3.14 ------------------------------ -* Deprecated the following :mod:`importlib.abc` classes, scheduled for removal in +* :mod:`argparse`: The *type*, *choices*, and *metavar* parameters + of :class:`!argparse.BooleanOptionalAction` are deprecated + and will be removed in 3.14. + (Contributed by Nikita Sobolev in :gh:`92248`.) + +* :mod:`ast`: The following :mod:`ast` features have been deprecated in documentation since + Python 3.8, now cause a :exc:`DeprecationWarning` to be emitted at runtime + when they are accessed or used, and will be removed in Python 3.14: + + * :class:`!ast.Num` + * :class:`!ast.Str` + * :class:`!ast.Bytes` + * :class:`!ast.NameConstant` + * :class:`!ast.Ellipsis` + + Use :class:`ast.Constant` instead. + (Contributed by Serhiy Storchaka in :gh:`90953`.) + +* :mod:`collections.abc`: Deprecated :class:`collections.abc.ByteString`. + Prefer :class:`Sequence` or :class:`collections.abc.Buffer`. + For use in typing, prefer a union, like ``bytes | bytearray``, or :class:`collections.abc.Buffer`. + (Contributed by Shantanu Jain in :gh:`91896`.) + +* :mod:`email`: Deprecated the *isdst* parameter in :func:`email.utils.localtime`. + (Contributed by Alan Williams in :gh:`72346`.) + +* :mod:`importlib.abc`: Deprecated the following classes, scheduled for removal in Python 3.14: * :class:`!importlib.abc.ResourceReader` @@ -1111,27 +1168,13 @@ Pending Removal in Python 3.14 (Contributed by Jason R. Coombs and Hugo van Kemenade in :gh:`93963`.) -* Deprecated :class:`collections.abc.ByteString`. - Prefer :class:`Sequence` or :class:`collections.abc.Buffer`. - For use in typing, prefer a union, like ``bytes | bytearray``, or :class:`collections.abc.Buffer`. - (Contributed by Shantanu Jain in :gh:`91896`.) - -* :class:`typing.ByteString`, deprecated since Python 3.9, now causes a - :exc:`DeprecationWarning` to be emitted when it is used. - -* Creating immutable types (:data:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable - bases using the C API. - -* Deprecated the *isdst* parameter in :func:`email.utils.localtime`. - (Contributed by Alan Williams in :gh:`72346`.) - -* ``__package__`` and ``__cached__`` will cease to be set or taken - into consideration by the import system (:gh:`97879`). - -* Testing the truth value of an :class:`xml.etree.ElementTree.Element` - is deprecated and will raise an exception in Python 3.14. +* :mod:`itertools`: The module had undocumented, inefficient, historically buggy, + and inconsistent support for copy, deepcopy, and pickle operations. + This will be removed in 3.14 for a significant reduction in code + volume and maintenance burden. + (Contributed by Raymond Hettinger in :gh:`101588`.) -* The default :mod:`multiprocessing` start method will change to a safer one on +* :mod:`multiprocessing`: The default :mod:`multiprocessing` start method will change to a safer one on Linux, BSDs, and other non-macOS POSIX platforms where ``'fork'`` is currently the default (:gh:`84559`). Adding a runtime warning about this was deemed too disruptive as the majority of code is not expected to care. Use the @@ -1139,47 +1182,35 @@ Pending Removal in Python 3.14 :func:`~multiprocessing.set_start_method` APIs to explicitly specify when your code *requires* ``'fork'``. See :ref:`multiprocessing-start-methods`. -* :mod:`pty` has two undocumented ``master_open()`` and ``slave_open()`` +* :mod:`pkgutil`: :func:`pkgutil.find_loader` and :func:`pkgutil.get_loader` + now raise :exc:`DeprecationWarning`; + use :func:`importlib.util.find_spec` instead. + (Contributed by Nikita Sobolev in :gh:`97850`.) + +* :mod:`pty`: The module has two undocumented ``master_open()`` and ``slave_open()`` functions that have been deprecated since Python 2 but only gained a proper :exc:`DeprecationWarning` in 3.12. Remove them in 3.14. -* :mod:`itertools` had undocumented, inefficient, historically buggy, - and inconsistent support for copy, deepcopy, and pickle operations. - This will be removed in 3.14 for a significant reduction in code - volume and maintenance burden. - (Contributed by Raymond Hettinger in :gh:`101588`.) - -* Accessing ``co_lnotab`` was deprecated in :pep:`626` since 3.10 - and was planned to be removed in 3.12 - but it only got a proper :exc:`DeprecationWarning` in 3.12. - May be removed in 3.14. - (Contributed by Nikita Sobolev in :gh:`101866`.) - -* The *onerror* argument of :func:`shutil.rmtree` is deprecated in 3.12, +* :mod:`shutil`: The *onerror* argument of :func:`shutil.rmtree` is deprecated in 3.12, and will be removed in 3.14. -* The *type*, *choices*, and *metavar* parameters - of :class:`!argparse.BooleanOptionalAction` are deprecated - and will be removed in 3.14. - (Contributed by Nikita Sobolev in :gh:`92248`.) +* :mod:`typing`: :class:`typing.ByteString`, deprecated since Python 3.9, now causes a + :exc:`DeprecationWarning` to be emitted when it is used. -* :func:`pkgutil.find_loader` and :func:`pkgutil.get_loader` - now raise :exc:`DeprecationWarning`; - use :func:`importlib.util.find_spec` instead. - (Contributed by Nikita Sobolev in :gh:`97850`.) +* :mod:`xml.etree.ElementTree`: Testing the truth value of an :class:`xml.etree.ElementTree.Element` + is deprecated and will raise an exception in Python 3.14. -* The following :mod:`ast` features have been deprecated in documentation since - Python 3.8, now cause a :exc:`DeprecationWarning` to be emitted at runtime - when they are accessed or used, and will be removed in Python 3.14: +* Creating immutable types (:c:macro:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable + bases using the C API. - * :class:`!ast.Num` - * :class:`!ast.Str` - * :class:`!ast.Bytes` - * :class:`!ast.NameConstant` - * :class:`!ast.Ellipsis` +* ``__package__`` and ``__cached__`` will cease to be set or taken + into consideration by the import system (:gh:`97879`). - Use :class:`ast.Constant` instead. - (Contributed by Serhiy Storchaka in :gh:`90953`.) +* Accessing ``co_lnotab`` was deprecated in :pep:`626` since 3.10 + and was planned to be removed in 3.12 + but it only got a proper :exc:`DeprecationWarning` in 3.12. + May be removed in 3.14. + (Contributed by Nikita Sobolev in :gh:`101866`.) Pending Removal in Future Versions ---------------------------------- @@ -1202,13 +1233,29 @@ although there is currently no date scheduled for their removal. Removed ======= -* Remove the ``distutils`` package. It was deprecated in Python 3.10 by +* ``asynchat`` and ``asyncore``: These two modules have been removed + according to the schedule in :pep:`594`, + having been deprecated in Python 3.6. + Use :mod:`asyncio` instead. + (Contributed by Nikita Sobolev in :gh:`96580`.) + +* :mod:`configparser`: Several names deprecated in the :mod:`configparser` way back in 3.2 have + been removed per :gh:`89336`: + + * :class:`configparser.ParsingError` no longer has a ``filename`` attribute + or argument. Use the ``source`` attribute and argument instead. + * :mod:`configparser` no longer has a ``SafeConfigParser`` class. Use the + shorter :class:`~configparser.ConfigParser` name instead. + * :class:`configparser.ConfigParser` no longer has a ``readfp`` method. + Use :meth:`~configparser.ConfigParser.read_file` instead. + +* ``distutils``: Remove the ``distutils`` package. It was deprecated in Python 3.10 by :pep:`632` "Deprecate distutils module". For projects still using ``distutils`` and cannot be updated to something else, the ``setuptools`` project can be installed: it still provides ``distutils``. (Contributed by Victor Stinner in :gh:`92584`.) -* Remove the bundled setuptools wheel from :mod:`ensurepip`, +* :mod:`ensurepip`: Remove the bundled setuptools wheel from :mod:`ensurepip`, and stop installing setuptools in environments created by :mod:`venv`. ``pip (>= 22.1)`` does not require setuptools to be installed in the @@ -1226,94 +1273,13 @@ Removed (Contributed by Pradyun Gedam in :gh:`95299`.) -* Removed many old deprecated :mod:`unittest` features: +* :mod:`enum`: Remove ``EnumMeta.__getattr__``, which is no longer needed for + enum attribute access. + (Contributed by Ethan Furman in :gh:`95083`.) - - A number of :class:`~unittest.TestCase` method aliases: - - ============================ =============================== =============== - Deprecated alias Method Name Deprecated in - ============================ =============================== =============== - ``failUnless`` :meth:`.assertTrue` 3.1 - ``failIf`` :meth:`.assertFalse` 3.1 - ``failUnlessEqual`` :meth:`.assertEqual` 3.1 - ``failIfEqual`` :meth:`.assertNotEqual` 3.1 - ``failUnlessAlmostEqual`` :meth:`.assertAlmostEqual` 3.1 - ``failIfAlmostEqual`` :meth:`.assertNotAlmostEqual` 3.1 - ``failUnlessRaises`` :meth:`.assertRaises` 3.1 - ``assert_`` :meth:`.assertTrue` 3.2 - ``assertEquals`` :meth:`.assertEqual` 3.2 - ``assertNotEquals`` :meth:`.assertNotEqual` 3.2 - ``assertAlmostEquals`` :meth:`.assertAlmostEqual` 3.2 - ``assertNotAlmostEquals`` :meth:`.assertNotAlmostEqual` 3.2 - ``assertRegexpMatches`` :meth:`.assertRegex` 3.2 - ``assertRaisesRegexp`` :meth:`.assertRaisesRegex` 3.2 - ``assertNotRegexpMatches`` :meth:`.assertNotRegex` 3.5 - ============================ =============================== =============== - - You can use https://github.com/isidentical/teyit to automatically modernise - your unit tests. - - - Undocumented and broken :class:`~unittest.TestCase` method - ``assertDictContainsSubset`` (deprecated in Python 3.2). - - - Undocumented :meth:`TestLoader.loadTestsFromModule - ` parameter *use_load_tests* - (deprecated and ignored since Python 3.2). - - - An alias of the :class:`~unittest.TextTestResult` class: - ``_TextTestResult`` (deprecated in Python 3.2). - - (Contributed by Serhiy Storchaka in :issue:`45162`.) - -* Several names deprecated in the :mod:`configparser` way back in 3.2 have - been removed per :gh:`89336`: - - * :class:`configparser.ParsingError` no longer has a ``filename`` attribute - or argument. Use the ``source`` attribute and argument instead. - * :mod:`configparser` no longer has a ``SafeConfigParser`` class. Use the - shorter :class:`~configparser.ConfigParser` name instead. - * :class:`configparser.ConfigParser` no longer has a ``readfp`` method. - Use :meth:`~configparser.ConfigParser.read_file` instead. - -* The following undocumented :mod:`sqlite3` features, deprecated in Python - 3.10, are now removed: - - * ``sqlite3.enable_shared_cache()`` - * ``sqlite3.OptimizedUnicode`` - - If a shared cache must be used, open the database in URI mode using the - ``cache=shared`` query parameter. - - The ``sqlite3.OptimizedUnicode`` text factory has been an alias for - :class:`str` since Python 3.3. Code that previously set the text factory to - ``OptimizedUnicode`` can either use ``str`` explicitly, or rely on the - default value which is also ``str``. - - (Contributed by Erlend E. Aasland in :gh:`92548`.) - -* ``smtpd`` has been removed according to the schedule in :pep:`594`, - having been deprecated in Python 3.4.7 and 3.5.4. - Use aiosmtpd_ PyPI module or any other - :mod:`asyncio`-based server instead. - (Contributed by Oleg Iarygin in :gh:`93243`.) - -.. _aiosmtpd: https://pypi.org/project/aiosmtpd/ - -* ``asynchat`` and ``asyncore`` have been removed - according to the schedule in :pep:`594`, - having been deprecated in Python 3.6. - Use :mod:`asyncio` instead. - (Contributed by Nikita Sobolev in :gh:`96580`.) - -* Remove ``io.OpenWrapper`` and ``_pyio.OpenWrapper``, deprecated in Python - 3.10: just use :func:`open` instead. The :func:`open` (:func:`io.open`) - function is a built-in function. Since Python 3.10, :func:`!_pyio.open` is - also a static method. - (Contributed by Victor Stinner in :gh:`94169`.) - -* Remove the :func:`!ssl.RAND_pseudo_bytes` function, deprecated in Python 3.6: - use :func:`os.urandom` or :func:`ssl.RAND_bytes` instead. - (Contributed by Victor Stinner in :gh:`94199`.) +* :mod:`ftplib`: Remove the ``FTP_TLS.ssl_version`` class attribute: use the + *context* parameter instead. + (Contributed by Victor Stinner in :gh:`94172`.) * :mod:`gzip`: Remove the ``filename`` attribute of :class:`gzip.GzipFile`, deprecated since Python 2.6, use the :attr:`~gzip.GzipFile.name` attribute @@ -1321,43 +1287,13 @@ Removed extension if it was not present. (Contributed by Victor Stinner in :gh:`94196`.) -* Remove the :func:`!ssl.match_hostname` function. - It was deprecated in Python 3.7. OpenSSL performs - hostname matching since Python 3.7, Python no longer uses the - :func:`!ssl.match_hostname` function. - (Contributed by Victor Stinner in :gh:`94199`.) - -* Remove the :func:`!locale.format` function, deprecated in Python 3.7: - use :func:`locale.format_string` instead. - (Contributed by Victor Stinner in :gh:`94226`.) - * :mod:`hashlib`: Remove the pure Python implementation of :func:`hashlib.pbkdf2_hmac()`, deprecated in Python 3.10. Python 3.10 and newer requires OpenSSL 1.1.1 (:pep:`644`): this OpenSSL version provides a C implementation of :func:`~hashlib.pbkdf2_hmac()` which is faster. (Contributed by Victor Stinner in :gh:`94199`.) -* :mod:`xml.etree.ElementTree`: Remove the ``ElementTree.Element.copy()`` method of the - pure Python implementation, deprecated in Python 3.10, use the - :func:`copy.copy` function instead. The C implementation of :mod:`xml.etree.ElementTree` - has no ``copy()`` method, only a ``__copy__()`` method. - (Contributed by Victor Stinner in :gh:`94383`.) - -* :mod:`zipimport`: Remove ``find_loader()`` and ``find_module()`` methods, - deprecated in Python 3.10: use the ``find_spec()`` method instead. See - :pep:`451` for the rationale. - (Contributed by Victor Stinner in :gh:`94379`.) - -* Remove the :func:`!ssl.wrap_socket` function, deprecated in Python 3.7: - instead, create a :class:`ssl.SSLContext` object and call its - :class:`ssl.SSLContext.wrap_socket` method. Any package that still uses - :func:`!ssl.wrap_socket` is broken and insecure. The function neither sends a - SNI TLS extension nor validates server hostname. Code is subject to `CWE-295 - `_: Improper Certificate - Validation. - (Contributed by Victor Stinner in :gh:`94199`.) - -* Many previously deprecated cleanups in :mod:`importlib` have now been +* :mod:`importlib`: Many previously deprecated cleanups in :mod:`importlib` have now been completed: * References to, and support for :meth:`!module_repr()` has been removed. @@ -1422,6 +1358,115 @@ Removed ``PY_COMPILED``, ``C_EXTENSION``, ``PY_RESOURCE``, ``PKG_DIRECTORY``, ``C_BUILTIN``, ``PY_FROZEN``, ``PY_CODERESOURCE``, ``IMP_HOOK``. +* :mod:`io`: Remove ``io.OpenWrapper`` and ``_pyio.OpenWrapper``, deprecated in Python + 3.10: just use :func:`open` instead. The :func:`open` (:func:`io.open`) + function is a built-in function. Since Python 3.10, :func:`!_pyio.open` is + also a static method. + (Contributed by Victor Stinner in :gh:`94169`.) + +* :mod:`locale`: Remove the :func:`!locale.format` function, deprecated in Python 3.7: + use :func:`locale.format_string` instead. + (Contributed by Victor Stinner in :gh:`94226`.) + +* ``smtpd``: The module has been removed according to the schedule in :pep:`594`, + having been deprecated in Python 3.4.7 and 3.5.4. + Use aiosmtpd_ PyPI module or any other + :mod:`asyncio`-based server instead. + (Contributed by Oleg Iarygin in :gh:`93243`.) + +.. _aiosmtpd: https://pypi.org/project/aiosmtpd/ + +* :mod:`sqlite3`: The following undocumented :mod:`sqlite3` features, deprecated in Python + 3.10, are now removed: + + * ``sqlite3.enable_shared_cache()`` + * ``sqlite3.OptimizedUnicode`` + + If a shared cache must be used, open the database in URI mode using the + ``cache=shared`` query parameter. + + The ``sqlite3.OptimizedUnicode`` text factory has been an alias for + :class:`str` since Python 3.3. Code that previously set the text factory to + ``OptimizedUnicode`` can either use ``str`` explicitly, or rely on the + default value which is also ``str``. + + (Contributed by Erlend E. Aasland in :gh:`92548`.) + +* :mod:`ssl`: + + * Remove the :func:`!ssl.RAND_pseudo_bytes` function, deprecated in Python 3.6: + use :func:`os.urandom` or :func:`ssl.RAND_bytes` instead. + (Contributed by Victor Stinner in :gh:`94199`.) + + * Remove the :func:`!ssl.match_hostname` function. + It was deprecated in Python 3.7. OpenSSL performs + hostname matching since Python 3.7, Python no longer uses the + :func:`!ssl.match_hostname` function. + (Contributed by Victor Stinner in :gh:`94199`.) + + * Remove the :func:`!ssl.wrap_socket` function, deprecated in Python 3.7: + instead, create a :class:`ssl.SSLContext` object and call its + :class:`ssl.SSLContext.wrap_socket` method. Any package that still uses + :func:`!ssl.wrap_socket` is broken and insecure. The function neither sends a + SNI TLS extension nor validates server hostname. Code is subject to `CWE-295 + `_: Improper Certificate + Validation. + (Contributed by Victor Stinner in :gh:`94199`.) + +* :mod:`unittest`: Removed many old deprecated :mod:`unittest` features: + + - A number of :class:`~unittest.TestCase` method aliases: + + ============================ =============================== =============== + Deprecated alias Method Name Deprecated in + ============================ =============================== =============== + ``failUnless`` :meth:`.assertTrue` 3.1 + ``failIf`` :meth:`.assertFalse` 3.1 + ``failUnlessEqual`` :meth:`.assertEqual` 3.1 + ``failIfEqual`` :meth:`.assertNotEqual` 3.1 + ``failUnlessAlmostEqual`` :meth:`.assertAlmostEqual` 3.1 + ``failIfAlmostEqual`` :meth:`.assertNotAlmostEqual` 3.1 + ``failUnlessRaises`` :meth:`.assertRaises` 3.1 + ``assert_`` :meth:`.assertTrue` 3.2 + ``assertEquals`` :meth:`.assertEqual` 3.2 + ``assertNotEquals`` :meth:`.assertNotEqual` 3.2 + ``assertAlmostEquals`` :meth:`.assertAlmostEqual` 3.2 + ``assertNotAlmostEquals`` :meth:`.assertNotAlmostEqual` 3.2 + ``assertRegexpMatches`` :meth:`.assertRegex` 3.2 + ``assertRaisesRegexp`` :meth:`.assertRaisesRegex` 3.2 + ``assertNotRegexpMatches`` :meth:`.assertNotRegex` 3.5 + ============================ =============================== =============== + + You can use https://github.com/isidentical/teyit to automatically modernise + your unit tests. + + - Undocumented and broken :class:`~unittest.TestCase` method + ``assertDictContainsSubset`` (deprecated in Python 3.2). + + - Undocumented :meth:`TestLoader.loadTestsFromModule + ` parameter *use_load_tests* + (deprecated and ignored since Python 3.2). + + - An alias of the :class:`~unittest.TextTestResult` class: + ``_TextTestResult`` (deprecated in Python 3.2). + + (Contributed by Serhiy Storchaka in :issue:`45162`.) + +* :mod:`webbrowser`: Remove support for obsolete browsers from :mod:`webbrowser`. + Removed browsers include: Grail, Mosaic, Netscape, Galeon, Skipstone, + Iceape, Firebird, and Firefox versions 35 and below (:gh:`102871`). + +* :mod:`xml.etree.ElementTree`: Remove the ``ElementTree.Element.copy()`` method of the + pure Python implementation, deprecated in Python 3.10, use the + :func:`copy.copy` function instead. The C implementation of :mod:`xml.etree.ElementTree` + has no ``copy()`` method, only a ``__copy__()`` method. + (Contributed by Victor Stinner in :gh:`94383`.) + +* :mod:`zipimport`: Remove ``find_loader()`` and ``find_module()`` methods, + deprecated in Python 3.10: use the ``find_spec()`` method instead. See + :pep:`451` for the rationale. + (Contributed by Victor Stinner in :gh:`94379`.) + * Removed the ``suspicious`` rule from the documentation Makefile, and removed ``Doc/tools/rstlint.py``, both in favor of `sphinx-lint `_. @@ -1435,15 +1480,6 @@ Removed (*ssl_context* in :mod:`imaplib`) instead. (Contributed by Victor Stinner in :gh:`94172`.) -* :mod:`ftplib`: Remove the ``FTP_TLS.ssl_version`` class attribute: use the - *context* parameter instead. - (Contributed by Victor Stinner in :gh:`94172`.) - -* Remove support for obsolete browsers from :mod:`webbrowser`. - Removed browsers include: Grail, Mosaic, Netscape, Galeon, Skipstone, - Iceape, Firebird, and Firefox versions 35 and below (:gh:`102871`). - - .. _whatsnew312-porting-to-python312: Porting to Python 3.12 @@ -1464,7 +1500,7 @@ Changes in the Python API * Removed ``randrange()`` functionality deprecated since Python 3.10. Formerly, ``randrange(10.0)`` losslessly converted to ``randrange(10)``. Now, it raises a - :exc:`TypeError`. Also, the exception raised for non-integral values such as + :exc:`TypeError`. Also, the exception raised for non-integer values such as ``randrange(10.5)`` or ``randrange('10')`` has been changed from :exc:`ValueError` to :exc:`TypeError`. This also prevents bugs where ``randrange(1e25)`` would silently select from a larger range than ``randrange(10**25)``. @@ -1630,7 +1666,7 @@ New Features inheriting or extending the base class size. - :c:func:`PyObject_GetTypeData` and :c:func:`PyType_GetTypeDataSize` added to allow access to subclass-specific instance data. - - :const:`Py_TPFLAGS_ITEMS_AT_END` and :c:func:`PyObject_GetItemData` + - :c:macro:`Py_TPFLAGS_ITEMS_AT_END` and :c:func:`PyObject_GetItemData` added to allow safely extending certain variable-sized types, including :c:var:`PyType_Type`. - :c:macro:`Py_RELATIVE_OFFSET` added to allow defining @@ -1647,20 +1683,20 @@ New Features :ref:`the vectorcall protocol ` was added to the :ref:`Limited API `: - * :const:`Py_TPFLAGS_HAVE_VECTORCALL` + * :c:macro:`Py_TPFLAGS_HAVE_VECTORCALL` * :c:func:`PyVectorcall_NARGS` * :c:func:`PyVectorcall_Call` * :c:type:`vectorcallfunc` - The :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag is now removed from a class + The :c:macro:`Py_TPFLAGS_HAVE_VECTORCALL` flag is now removed from a class when the class's :py:meth:`~object.__call__` method is reassigned. This makes vectorcall safe to use with mutable types (i.e. heap types - without the immutable flag, :const:`Py_TPFLAGS_IMMUTABLETYPE`). + without the immutable flag, :c:macro:`Py_TPFLAGS_IMMUTABLETYPE`). Mutable types that do not override :c:member:`~PyTypeObject.tp_call` now inherit the ``Py_TPFLAGS_HAVE_VECTORCALL`` flag. (Contributed by Petr Viktorin in :gh:`93274`.) - The :const:`Py_TPFLAGS_MANAGED_DICT` and :const:`Py_TPFLAGS_MANAGED_WEAKREF` + The :c:macro:`Py_TPFLAGS_MANAGED_DICT` and :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` flags have been added. This allows extensions classes to support object ``__dict__`` and weakrefs with less bookkeeping, using less memory and with faster access. @@ -1671,7 +1707,7 @@ New Features * :c:func:`PyObject_Vectorcall` * :c:func:`PyObject_VectorcallMethod` - * :const:`PY_VECTORCALL_ARGUMENTS_OFFSET` + * :c:macro:`PY_VECTORCALL_ARGUMENTS_OFFSET` This means that both the incoming and outgoing ends of the vector call protocol are now available in the :ref:`Limited API `. (Contributed @@ -1745,6 +1781,12 @@ New Features (Contributed by Eddie Elizondo in :gh:`84436`.) +* :pep:`684`: Added the new :c:func:`Py_NewInterpreterFromConfig` + function and :c:type:`PyInterpreterConfig`, which may be used + to create sub-interpreters with their own GILs. + (See :ref:`whatsnew312-pep684` for more info.) + (Contributed by Eric Snow in :gh:`104110`.) + * In the limited C API version 3.12, :c:func:`Py_INCREF` and :c:func:`Py_DECREF` functions are now implemented as opaque function calls to hide implementation details. @@ -1777,7 +1819,7 @@ Porting to Python 3.12 for example). * Add support of more formatting options (left aligning, octals, uppercase - hexadecimals, ``intmax_t``, ``ptrdiff_t``, ``wchar_t`` C + hexadecimals, :c:type:`intmax_t`, :c:type:`ptrdiff_t`, :c:type:`wchar_t` C strings, variable width and precision) in :c:func:`PyUnicode_FromFormat` and :c:func:`PyUnicode_FromFormatV`. (Contributed by Serhiy Storchaka in :gh:`98836`.) @@ -1793,13 +1835,13 @@ Porting to Python 3.12 (Contributed by Philip Georgi in :gh:`95504`.) * Extension classes wanting to add a ``__dict__`` or weak reference slot - should use :const:`Py_TPFLAGS_MANAGED_DICT` and - :const:`Py_TPFLAGS_MANAGED_WEAKREF` instead of ``tp_dictoffset`` and + should use :c:macro:`Py_TPFLAGS_MANAGED_DICT` and + :c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` instead of ``tp_dictoffset`` and ``tp_weaklistoffset``, respectively. The use of ``tp_dictoffset`` and ``tp_weaklistoffset`` is still supported, but does not fully support multiple inheritance (:gh:`95589`), and performance may be worse. - Classes declaring :const:`Py_TPFLAGS_MANAGED_DICT` should call + Classes declaring :c:macro:`Py_TPFLAGS_MANAGED_DICT` should call :c:func:`!_PyObject_VisitManagedDict` and :c:func:`!_PyObject_ClearManagedDict` to traverse and clear their instance's dictionaries. To clear weakrefs, call :c:func:`PyObject_ClearWeakRefs`, as before. @@ -1853,7 +1895,7 @@ Porting to Python 3.12 :c:member:`~PyTypeObject.tp_init` instead. - If the metaclass doesn't need to be instantiated from Python, set its ``tp_new`` to ``NULL`` using - the :const:`Py_TPFLAGS_DISALLOW_INSTANTIATION` flag. + the :c:macro:`Py_TPFLAGS_DISALLOW_INSTANTIATION` flag. This makes it acceptable for ``PyType_From*`` functions. - Avoid ``PyType_From*`` functions: if you don't need C-specific features @@ -1872,6 +1914,24 @@ Porting to Python 3.12 subinterpreter that they don't support (or haven't yet been loaded in). See :gh:`104668` for more info. +* :c:struct:`PyLongObject` has had its internals changed for better performance. + Although the internals of :c:struct:`PyLongObject` are private, they are used + by some extension modules. + The internal fields should no longer be accessed directly, instead the API + functions beginning ``PyLong_...`` should be used instead. + Two new *unstable* API functions are provided for efficient access to the + value of :c:struct:`PyLongObject`\s which fit into a single machine word: + + * :c:func:`PyUnstable_Long_IsCompact` + * :c:func:`PyUnstable_Long_CompactValue` + +* Custom allocators, set via :c:func:`PyMem_SetAllocator`, are now + required to be thread-safe, regardless of memory domain. Allocators + that don't have their own state, including "hooks", are not affected. + If your custom allocator is not already thread-safe and you need + guidance then please create a new GitHub issue + and CC ``@ericsnowcurrently``. + Deprecated ---------- @@ -1904,7 +1964,7 @@ Deprecated :c:type:`PyConfig` instead. (Contributed by Victor Stinner in :gh:`77782`.) -* Creating immutable types (:const:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable +* Creating immutable types (:c:macro:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable bases is deprecated and will be disabled in Python 3.14. * The ``structmember.h`` header is deprecated, though it continues to be diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index 479d08b24b112a..63cdee6cf1a4f3 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -3,8 +3,7 @@ What's New In Python 3.13 **************************** -:Release: |release| -:Date: |today| +:Editor: TBD .. Rules for maintenance: @@ -102,6 +101,13 @@ array It can be used instead of ``'u'`` type code, which is deprecated. (Contributed by Inada Naoki in :gh:`80480`.) +dbm +--- + +* Add :meth:`dbm.gnu.gdbm.clear` and :meth:`dbm.ndbm.ndbm.clear` methods that remove all items + from the database. + (Contributed by Dong-hee Na in :gh:`107122`.) + io -- @@ -111,6 +117,18 @@ and only logged in :ref:`Python Development Mode ` or on :ref:`Python built on debug mode `. (Contributed by Victor Stinner in :gh:`62948`.) +opcode +------ + +* Move ``opcode.ENABLE_SPECIALIZATION`` to ``_opcode.ENABLE_SPECIALIZATION``. + This field was added in 3.12, it was never documented and is not intended for + external usage. (Contributed by Irit Katriel in :gh:`105481`.) + +* Removed ``opcode.is_pseudo``, ``opcode.MIN_PSEUDO_OPCODE`` and + ``opcode.MAX_PSEUDO_OPCODE``, which were added in 3.12, were never + documented or exposed through ``dis``, and were not intended to be + used externally. + pathlib ------- @@ -144,36 +162,14 @@ typing Optimizations ============= - +* :func:`textwrap.indent` is now ~30% faster than before for large input. + (Contributed by Inada Naoki in :gh:`107369`.) Deprecated ========== -* :mod:`wave`: Deprecate the ``getmark()``, ``setmark()`` and ``getmarkers()`` - methods of the :class:`wave.Wave_read` and :class:`wave.Wave_write` classes. - They will be removed in Python 3.15. - (Contributed by Victor Stinner in :gh:`105096`.) -* :mod:`typing`: Creating a :class:`typing.NamedTuple` class using keyword arguments to denote - the fields (``NT = NamedTuple("NT", x=int, y=int)``) is deprecated, and will - be disallowed in Python 3.15. Use the class-based syntax or the functional - syntax instead. (Contributed by Alex Waygood in :gh:`105566`.) -* :mod:`typing`: When using the functional syntax to create a :class:`typing.NamedTuple` - class or a :class:`typing.TypedDict` class, failing to pass a value to the - 'fields' parameter (``NT = NamedTuple("NT")`` or ``TD = TypedDict("TD")``) is - deprecated. Passing ``None`` to the 'fields' parameter - (``NT = NamedTuple("NT", None)`` or ``TD = TypedDict("TD", None)``) is also - deprecated. Both will be disallowed in Python 3.15. To create a NamedTuple - class with 0 fields, use ``class NT(NamedTuple): pass`` or - ``NT = NamedTuple("NT", [])``. To create a TypedDict class with 0 fields, use - ``class TD(TypedDict): pass`` or ``TD = TypedDict("TD", {})``. - (Contributed by Alex Waygood in :gh:`105566` and :gh:`105570`.) -* :func:`typing.no_type_check_decorator` is deprecated, and scheduled for - removal in Python 3.15. After eight years in the :mod:`typing` module, it - has yet to be supported by any major type checkers. - (Contributed by Alex Waygood in :gh:`106309`.) - -* :mod:`array`'s ``'u'`` format code, deprecated in docs since Python 3.3, +* :mod:`array`: :mod:`array`'s ``'u'`` format code, deprecated in docs since Python 3.3, emits :exc:`DeprecationWarning` since 3.13 and will be removed in Python 3.16. Use the ``'w'`` format code instead. @@ -184,13 +180,45 @@ Deprecated Replace ``ctypes.ARRAY(item_type, size)`` with ``item_type * size``. (Contributed by Victor Stinner in :gh:`105733`.) -* The :mod:`getopt` and :mod:`optparse` modules are now +* :mod:`getopt` and :mod:`optparse` modules: They are now :term:`soft deprecated`: the :mod:`argparse` should be used for new projects. Previously, the :mod:`optparse` module was already deprecated, its removal was not scheduled, and no warnings was emitted: so there is no change in practice. (Contributed by Victor Stinner in :gh:`106535`.) +* :mod:`typing`: Creating a :class:`typing.NamedTuple` class using keyword arguments to denote + the fields (``NT = NamedTuple("NT", x=int, y=int)``) is deprecated, and will + be disallowed in Python 3.15. Use the class-based syntax or the functional + syntax instead. (Contributed by Alex Waygood in :gh:`105566`.) + + * When using the functional syntax to create a :class:`typing.NamedTuple` + class or a :class:`typing.TypedDict` class, failing to pass a value to the + 'fields' parameter (``NT = NamedTuple("NT")`` or ``TD = TypedDict("TD")``) is + deprecated. Passing ``None`` to the 'fields' parameter + (``NT = NamedTuple("NT", None)`` or ``TD = TypedDict("TD", None)``) is also + deprecated. Both will be disallowed in Python 3.15. To create a NamedTuple + class with 0 fields, use ``class NT(NamedTuple): pass`` or + ``NT = NamedTuple("NT", [])``. To create a TypedDict class with 0 fields, use + ``class TD(TypedDict): pass`` or ``TD = TypedDict("TD", {})``. + (Contributed by Alex Waygood in :gh:`105566` and :gh:`105570`.) + + * :func:`typing.no_type_check_decorator` is deprecated, and scheduled for + removal in Python 3.15. After eight years in the :mod:`typing` module, it + has yet to be supported by any major type checkers. + (Contributed by Alex Waygood in :gh:`106309`.) + + * :data:`typing.AnyStr` is deprecated. In Python 3.16, it will be removed from + ``typing.__all__``, and a :exc:`DeprecationWarning` will be emitted when it + is imported or accessed. It will be removed entirely in Python 3.18. Use + the new :ref:`type parameter syntax ` instead. + (Contributed by Michael The in :gh:`107116`.) + +* :mod:`wave`: Deprecate the ``getmark()``, ``setmark()`` and ``getmarkers()`` + methods of the :class:`wave.Wave_read` and :class:`wave.Wave_write` classes. + They will be removed in Python 3.15. + (Contributed by Victor Stinner in :gh:`105096`.) + Pending Removal in Python 3.14 ------------------------------ @@ -327,7 +355,7 @@ Pending Removal in Python 3.15 Pending Removal in Python 3.16 ------------------------------ -* :class:`array.array` ``'u'`` type (``wchar_t``): +* :class:`array.array` ``'u'`` type (:c:type:`wchar_t`): use the ``'w'`` type instead (``Py_UCS4``). Pending Removal in Future Versions @@ -786,6 +814,13 @@ New Features always steals a reference to the value. (Contributed by Serhiy Storchaka in :gh:`86493`.) +* Added :c:func:`PyDict_GetItemRef` and :c:func:`PyDict_GetItemStringRef` + functions: similar to :c:func:`PyDict_GetItemWithError` but returning a + :term:`strong reference` instead of a :term:`borrowed reference`. Moreover, + these functions return -1 on error and so checking ``PyErr_Occurred()`` is + not needed. + (Contributed by Victor Stinner in :gh:`106004`.) + Porting to Python 3.13 ---------------------- @@ -793,8 +828,8 @@ Deprecated ---------- * Deprecate the old ``Py_UNICODE`` and ``PY_UNICODE_TYPE`` types: use directly - the ``wchar_t`` type instead. Since Python 3.3, ``Py_UNICODE`` and - ``PY_UNICODE_TYPE`` are just aliases to ``wchar_t``. + the :c:type:`wchar_t` type instead. Since Python 3.3, ``Py_UNICODE`` and + ``PY_UNICODE_TYPE`` are just aliases to :c:type:`wchar_t`. (Contributed by Victor Stinner in :gh:`105156`.) * Deprecate old Python initialization functions: @@ -828,6 +863,13 @@ Deprecated Removed ------- +* Remove many APIs (functions, macros, variables) with names prefixed by + ``_Py`` or ``_PY`` (considered as private API). If your project is affected + by one of these removals and you consider that the removed API should remain + available, please open a new issue to request a public C API and + add ``cc @vstinner`` to the issue to notify Victor Stinner. + (Contributed by Victor Stinner in :gh:`106320`.) + * Remove functions deprecated in Python 3.9. * ``PyEval_CallObject()``, ``PyEval_CallObjectWithKeywords()``: use @@ -917,13 +959,6 @@ Removed (Contributed by Victor Stinner in :gh:`105182`.) -* Remove the old private, undocumented and untested ``_PyGC_FINALIZED()`` macro - which was kept for backward compatibility with Python 3.8 and older: use - :c:func:`PyObject_GC_IsFinalized()` instead. The `pythoncapi-compat project - `__ can be used to get this - function on Python 3.8 and older. - (Contributed by Victor Stinner in :gh:`105268`.) - * Remove the old aliases to functions calling functions which were kept for backward compatibility with Python 3.8 provisional API: @@ -961,7 +996,7 @@ Removed Pending Removal in Python 3.14 ------------------------------ -* Creating immutable types (:data:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable +* Creating immutable types (:c:macro:`Py_TPFLAGS_IMMUTABLETYPE`) with mutable bases using the C API. * Global configuration variables: @@ -997,8 +1032,8 @@ Pending Removal in Python 3.15 * :c:func:`PyImport_ImportModuleNoBlock`: use :c:func:`PyImport_ImportModule`. * :c:func:`PyWeakref_GET_OBJECT`: use :c:func:`PyWeakref_GetRef` instead. * :c:func:`PyWeakref_GetObject`: use :c:func:`PyWeakref_GetRef` instead. -* :c:type:`!Py_UNICODE_WIDE` type: use ``wchar_t`` instead. -* :c:type:`Py_UNICODE` type: use ``wchar_t`` instead. +* :c:type:`!Py_UNICODE_WIDE` type: use :c:type:`wchar_t` instead. +* :c:type:`Py_UNICODE` type: use :c:type:`wchar_t` instead. * Python initialization functions: * :c:func:`PySys_ResetWarnOptions`: clear :data:`sys.warnoptions` and @@ -1017,7 +1052,7 @@ Pending Removal in Future Versions The following APIs were deprecated in earlier Python versions and will be removed, although there is currently no date scheduled for their removal. -* :const:`Py_TPFLAGS_HAVE_FINALIZE`: no needed since Python 3.8. +* :c:macro:`Py_TPFLAGS_HAVE_FINALIZE`: no needed since Python 3.8. * :c:func:`PyErr_Fetch`: use :c:func:`PyErr_GetRaisedException`. * :c:func:`PyErr_NormalizeException`: use :c:func:`PyErr_GetRaisedException`. * :c:func:`PyErr_Restore`: use :c:func:`PyErr_SetRaisedException`. diff --git a/Doc/whatsnew/3.2.rst b/Doc/whatsnew/3.2.rst index c3f7ef6e565995..ed1c1770fb0f51 100644 --- a/Doc/whatsnew/3.2.rst +++ b/Doc/whatsnew/3.2.rst @@ -424,7 +424,7 @@ protocols, the users must to be able access the environment using native strings even though the underlying platform may have a different convention. To bridge this gap, the :mod:`wsgiref` module has a new function, :func:`wsgiref.handlers.read_environ` for transcoding CGI variables from -:attr:`os.environ` into native strings and returning a new dictionary. +:data:`os.environ` into native strings and returning a new dictionary. .. seealso:: @@ -485,7 +485,7 @@ Some smaller changes made to the core Python language are: * The interpreter can now be started with a quiet option, ``-q``, to prevent the copyright and version information from being displayed in the interactive - mode. The option can be introspected using the :attr:`sys.flags` attribute: + mode. The option can be introspected using the :data:`sys.flags` attribute: .. code-block:: shell-session @@ -566,9 +566,9 @@ Some smaller changes made to the core Python language are: (See :issue:`4617`.) -* The internal :c:type:`structsequence` tool now creates subclasses of tuple. +* :ref:`Struct sequence types ` are now subclasses of tuple. This means that C structures like those returned by :func:`os.stat`, - :func:`time.gmtime`, and :attr:`sys.version_info` now work like a + :func:`time.gmtime`, and :data:`sys.version_info` now work like a :term:`named tuple` and now work with functions and methods that expect a tuple as an argument. This is a big step forward in making the C structures as flexible as their pure Python counterparts: @@ -598,7 +598,7 @@ Some smaller changes made to the core Python language are: module, or on the command line. A :exc:`ResourceWarning` is issued at interpreter shutdown if the - :data:`gc.garbage` list isn't empty, and if :attr:`gc.DEBUG_UNCOLLECTABLE` is + :data:`gc.garbage` list isn't empty, and if :const:`gc.DEBUG_UNCOLLECTABLE` is set, all uncollectable objects are printed. This is meant to make the programmer aware that their code contains object finalization issues. @@ -623,7 +623,7 @@ Some smaller changes made to the core Python language are: :class:`collections.Sequence` :term:`abstract base class`. As a result, the language will have a more uniform API. In addition, :class:`range` objects now support slicing and negative indices, even with values larger than - :attr:`sys.maxsize`. This makes *range* more interoperable with lists:: + :data:`sys.maxsize`. This makes *range* more interoperable with lists:: >>> range(0, 100, 2).count(10) 1 @@ -1007,13 +1007,13 @@ datetime and time after 1900. The new supported year range is from 1000 to 9999 inclusive. * Whenever a two-digit year is used in a time tuple, the interpretation has been - governed by :attr:`time.accept2dyear`. The default is ``True`` which means that + governed by :data:`time.accept2dyear`. The default is ``True`` which means that for a two-digit year, the century is guessed according to the POSIX rules governing the ``%y`` strptime format. Starting with Py3.2, use of the century guessing heuristic will emit a :exc:`DeprecationWarning`. Instead, it is recommended that - :attr:`time.accept2dyear` be set to ``False`` so that large date ranges + :data:`time.accept2dyear` be set to ``False`` so that large date ranges can be used without guesswork:: >>> import time, warnings @@ -1031,7 +1031,7 @@ datetime and time 'Fri Jan 1 12:34:56 11' Several functions now have significantly expanded date ranges. When - :attr:`time.accept2dyear` is false, the :func:`time.asctime` function will + :data:`time.accept2dyear` is false, the :func:`time.asctime` function will accept any year that fits in a C int, while the :func:`time.mktime` and :func:`time.strftime` functions will accept the full range supported by the corresponding operating system functions. @@ -1194,11 +1194,11 @@ can be set to "$" for the shell-style formatting provided by If no configuration is set-up before a logging event occurs, there is now a default configuration using a :class:`~logging.StreamHandler` directed to -:attr:`sys.stderr` for events of ``WARNING`` level or higher. Formerly, an +:data:`sys.stderr` for events of ``WARNING`` level or higher. Formerly, an event occurring before a configuration was set-up would either raise an exception or silently drop the event depending on the value of -:attr:`logging.raiseExceptions`. The new default handler is stored in -:attr:`logging.lastResort`. +:data:`logging.raiseExceptions`. The new default handler is stored in +:data:`logging.lastResort`. The use of filters has been simplified. Instead of creating a :class:`~logging.Filter` object, the predicate can be any Python callable that @@ -1300,7 +1300,7 @@ values are equal (:issue:`8188`):: hash(Decimal("1.5")) == hash(complex(1.5, 0)) Some of the hashing details are exposed through a new attribute, -:attr:`sys.hash_info`, which describes the bit width of the hash value, the +:data:`sys.hash_info`, which describes the bit width of the hash value, the prime modulus, the hash values for *infinity* and *nan*, and the multiplier used for the imaginary part of a number: @@ -1388,7 +1388,7 @@ select ------ The :mod:`select` module now exposes a new, constant attribute, -:attr:`~select.PIPE_BUF`, which gives the minimum number of bytes which are +:const:`~select.PIPE_BUF`, which gives the minimum number of bytes which are guaranteed not to block when :func:`select.select` says a pipe is ready for writing. @@ -1529,7 +1529,7 @@ filenames: b'Sehensw\xc3\xbcrdigkeiten' Some operating systems allow direct access to encoded bytes in the -environment. If so, the :attr:`os.supports_bytes_environ` constant will be +environment. If so, the :const:`os.supports_bytes_environ` constant will be true. For direct access to encoded environment variables (if available), @@ -1666,9 +1666,9 @@ for secure (encrypted, authenticated) internet connections: algorithm" error. * The version of OpenSSL being used is now accessible using the module - attributes :data:`ssl.OPENSSL_VERSION` (a string), - :data:`ssl.OPENSSL_VERSION_INFO` (a 5-tuple), and - :data:`ssl.OPENSSL_VERSION_NUMBER` (an integer). + attributes :const:`ssl.OPENSSL_VERSION` (a string), + :const:`ssl.OPENSSL_VERSION_INFO` (a 5-tuple), and + :const:`ssl.OPENSSL_VERSION_NUMBER` (an integer). (Contributed by Antoine Pitrou in :issue:`8850`, :issue:`1589`, :issue:`8322`, :issue:`5639`, :issue:`4870`, :issue:`8484`, and :issue:`8321`.) @@ -2302,7 +2302,7 @@ turtledemo The demonstration code for the :mod:`turtle` module was moved from the *Demo* directory to main library. It includes over a dozen sample scripts with -lively displays. Being on :attr:`sys.path`, it can now be run directly +lively displays. Being on :data:`sys.path`, it can now be run directly from the command-line: .. code-block:: shell-session @@ -2566,7 +2566,7 @@ Changes to Python's build process and to the C API include: (:issue:`2443`). * A new C API function :c:func:`PySys_SetArgvEx` allows an embedded interpreter - to set :attr:`sys.argv` without also modifying :attr:`sys.path` + to set :data:`sys.argv` without also modifying :data:`sys.path` (:issue:`5753`). * :c:macro:`PyEval_CallObject` is now only available in macro form. The diff --git a/Doc/whatsnew/3.3.rst b/Doc/whatsnew/3.3.rst index 3dca7227a91c38..bcdc0222be6dea 100644 --- a/Doc/whatsnew/3.3.rst +++ b/Doc/whatsnew/3.3.rst @@ -648,7 +648,7 @@ PEP 421: Adding sys.implementation A new attribute on the :mod:`sys` module exposes details specific to the implementation of the currently running interpreter. The initial set of -attributes on :attr:`sys.implementation` are ``name``, ``version``, +attributes on :data:`sys.implementation` are ``name``, ``version``, ``hexversion``, and ``cache_tag``. The intention of ``sys.implementation`` is to consolidate into one namespace @@ -719,7 +719,7 @@ and does not enforce any method requirements. In terms of finders, :class:`importlib.machinery.FileFinder` exposes the mechanism used to search for source and bytecode files of a module. Previously -this class was an implicit member of :attr:`sys.path_hooks`. +this class was an implicit member of :data:`sys.path_hooks`. For loaders, the new abstract base class :class:`importlib.abc.FileLoader` helps write a loader that uses the file system as the storage mechanism for a module's @@ -735,7 +735,7 @@ provide the full name of the module now instead of just the tail end of the module's name. The :func:`importlib.invalidate_caches` function will now call the method with -the same name on all finders cached in :attr:`sys.path_importer_cache` to help +the same name on all finders cached in :data:`sys.path_importer_cache` to help clean up any stored state as necessary. Visible Changes @@ -745,8 +745,8 @@ For potential required changes to code, see the `Porting Python code`_ section. Beyond the expanse of what :mod:`importlib` now exposes, there are other -visible changes to import. The biggest is that :attr:`sys.meta_path` and -:attr:`sys.path_hooks` now store all of the meta path finders and path entry +visible changes to import. The biggest is that :data:`sys.meta_path` and +:data:`sys.path_hooks` now store all of the meta path finders and path entry hooks used by import. Previously the finders were implicit and hidden within the C code of import instead of being directly exposed. This means that one can now easily remove or change the order of the various finders to fit one's needs. @@ -761,9 +761,9 @@ Loaders are also now expected to set the ``__package__`` attribute from :pep:`366`. Once again, import itself is already setting this on all loaders from :mod:`importlib` and import itself is setting the attribute post-load. -``None`` is now inserted into :attr:`sys.path_importer_cache` when no finder -can be found on :attr:`sys.path_hooks`. Since :class:`!imp.NullImporter` is not -directly exposed on :attr:`sys.path_hooks` it could no longer be relied upon to +``None`` is now inserted into :data:`sys.path_importer_cache` when no finder +can be found on :data:`sys.path_hooks`. Since :class:`!imp.NullImporter` is not +directly exposed on :data:`sys.path_hooks` it could no longer be relied upon to always be available to use as a value representing no finder found. All other changes relate to semantic changes which should be taken into @@ -842,7 +842,7 @@ Builtin functions and types * :func:`open` gets a new *opener* parameter: the underlying file descriptor for the file object is then obtained by calling *opener* with (*file*, - *flags*). It can be used to use custom flags like :data:`os.O_CLOEXEC` for + *flags*). It can be used to use custom flags like :const:`os.O_CLOEXEC` for example. The ``'x'`` mode was added: open for exclusive creation, failing if the file already exists. * :func:`print`: added the *flush* keyword argument. If the *flush* keyword @@ -1127,7 +1127,7 @@ Features * If Python is compiled without threads, the C version automatically disables the expensive thread local context machinery. In this case, - the variable :data:`~decimal.HAVE_THREADS` is set to ``False``. + the variable :const:`~decimal.HAVE_THREADS` is set to ``False``. API changes ~~~~~~~~~~~ @@ -1135,20 +1135,20 @@ API changes * The C module has the following context limits, depending on the machine architecture: - +-------------------+---------------------+------------------------------+ - | | 32-bit | 64-bit | - +===================+=====================+==============================+ - | :const:`MAX_PREC` | :const:`425000000` | :const:`999999999999999999` | - +-------------------+---------------------+------------------------------+ - | :const:`MAX_EMAX` | :const:`425000000` | :const:`999999999999999999` | - +-------------------+---------------------+------------------------------+ - | :const:`MIN_EMIN` | :const:`-425000000` | :const:`-999999999999999999` | - +-------------------+---------------------+------------------------------+ + +-------------------+----------------+-------------------------+ + | | 32-bit | 64-bit | + +===================+================+=========================+ + | :const:`MAX_PREC` | ``425000000`` | ``999999999999999999`` | + +-------------------+----------------+-------------------------+ + | :const:`MAX_EMAX` | ``425000000`` | ``999999999999999999`` | + +-------------------+----------------+-------------------------+ + | :const:`MIN_EMIN` | ``-425000000`` | ``-999999999999999999`` | + +-------------------+----------------+-------------------------+ * In the context templates (:class:`~decimal.DefaultContext`, :class:`~decimal.BasicContext` and :class:`~decimal.ExtendedContext`) the magnitude of :attr:`~decimal.Context.Emax` and - :attr:`~decimal.Context.Emin` has changed to :const:`999999`. + :attr:`~decimal.Context.Emin` has changed to ``999999``. * The :class:`~decimal.Decimal` constructor in decimal.py does not observe the context limits and converts values with arbitrary exponents or precision @@ -1576,8 +1576,8 @@ os -- * The :mod:`os` module has a new :func:`~os.pipe2` function that makes it - possible to create a pipe with :data:`~os.O_CLOEXEC` or - :data:`~os.O_NONBLOCK` flags set atomically. This is especially useful to + possible to create a pipe with :const:`~os.O_CLOEXEC` or + :const:`~os.O_NONBLOCK` flags set atomically. This is especially useful to avoid race conditions in multi-threaded programs. * The :mod:`os` module has a new :func:`~os.sendfile` function which provides @@ -1691,9 +1691,9 @@ os * Some platforms now support additional constants for the :func:`~os.lseek` function, such as ``os.SEEK_HOLE`` and ``os.SEEK_DATA``. -* New constants :data:`~os.RTLD_LAZY`, :data:`~os.RTLD_NOW`, - :data:`~os.RTLD_GLOBAL`, :data:`~os.RTLD_LOCAL`, :data:`~os.RTLD_NODELETE`, - :data:`~os.RTLD_NOLOAD`, and :data:`~os.RTLD_DEEPBIND` are available on +* New constants :const:`~os.RTLD_LAZY`, :const:`~os.RTLD_NOW`, + :const:`~os.RTLD_GLOBAL`, :const:`~os.RTLD_LOCAL`, :const:`~os.RTLD_NODELETE`, + :const:`~os.RTLD_NOLOAD`, and :const:`~os.RTLD_DEEPBIND` are available on platforms that support them. These are for use with the :func:`sys.setdlopenflags` function, and supersede the similar constants defined in :mod:`ctypes` and :mod:`DLFCN`. (Contributed by Victor Stinner @@ -1952,7 +1952,7 @@ ssl * You can query the SSL compression algorithm used by an SSL socket, thanks to its new :meth:`~ssl.SSLSocket.compression` method. The new attribute - :attr:`~ssl.OP_NO_COMPRESSION` can be used to disable compression. + :const:`~ssl.OP_NO_COMPRESSION` can be used to disable compression. (Contributed by Antoine Pitrou in :issue:`13634`.) * Support has been added for the Next Protocol Negotiation extension using @@ -1966,7 +1966,7 @@ ssl * The :func:`~ssl.get_server_certificate` function now supports IPv6. (Contributed by Charles-François Natali in :issue:`11811`.) -* New attribute :attr:`~ssl.OP_CIPHER_SERVER_PREFERENCE` allows setting +* New attribute :const:`~ssl.OP_CIPHER_SERVER_PREFERENCE` allows setting SSLv3 server sockets to use the server's cipher ordering preference rather than the client's (:issue:`13635`). @@ -1984,7 +1984,7 @@ the form '-rwxrwxrwx'. struct ------ -The :mod:`struct` module now supports ``ssize_t`` and ``size_t`` via the +The :mod:`struct` module now supports :c:type:`ssize_t` and :c:type:`size_t` via the new codes ``n`` and ``N``, respectively. (Contributed by Antoine Pitrou in :issue:`3163`.) @@ -1995,7 +1995,7 @@ subprocess Command strings can now be bytes objects on posix platforms. (Contributed by Victor Stinner in :issue:`8513`.) -A new constant :data:`~subprocess.DEVNULL` allows suppressing output in a +A new constant :const:`~subprocess.DEVNULL` allows suppressing output in a platform-independent fashion. (Contributed by Ross Lagerwall in :issue:`5870`.) @@ -2141,7 +2141,7 @@ New attribute :attr:`zlib.Decompress.eof` makes it possible to distinguish between a properly formed compressed stream and an incomplete or truncated one. (Contributed by Nadeem Vawda in :issue:`12646`.) -New attribute :attr:`zlib.ZLIB_RUNTIME_VERSION` reports the version string of +New attribute :const:`zlib.ZLIB_RUNTIME_VERSION` reports the version string of the underlying ``zlib`` library that is loaded at runtime. (Contributed by Torsten Landschoff in :issue:`12306`.) @@ -2195,7 +2195,7 @@ Changes to Python's build process and to the C API include: * :c:func:`PyUnicode_AsUCS4`, :c:func:`PyUnicode_AsUCS4Copy` * :c:macro:`PyUnicode_DATA`, :c:macro:`PyUnicode_1BYTE_DATA`, :c:macro:`PyUnicode_2BYTE_DATA`, :c:macro:`PyUnicode_4BYTE_DATA` - * :c:macro:`PyUnicode_KIND` with :c:type:`PyUnicode_Kind` enum: + * :c:macro:`PyUnicode_KIND` with :c:enum:`PyUnicode_Kind` enum: :c:data:`PyUnicode_WCHAR_KIND`, :c:data:`PyUnicode_1BYTE_KIND`, :c:data:`PyUnicode_2BYTE_KIND`, :c:data:`PyUnicode_4BYTE_KIND` * :c:macro:`PyUnicode_READ`, :c:macro:`PyUnicode_READ_CHAR`, :c:macro:`PyUnicode_WRITE` @@ -2378,16 +2378,16 @@ Porting Python code * :func:`__import__` no longer allows one to use an index value other than 0 for top-level modules. E.g. ``__import__('sys', level=1)`` is now an error. -* Because :attr:`sys.meta_path` and :attr:`sys.path_hooks` now have finders on +* Because :data:`sys.meta_path` and :data:`sys.path_hooks` now have finders on them by default, you will most likely want to use :meth:`list.insert` instead of :meth:`list.append` to add to those lists. -* Because ``None`` is now inserted into :attr:`sys.path_importer_cache`, if you +* Because ``None`` is now inserted into :data:`sys.path_importer_cache`, if you are clearing out entries in the dictionary of paths that do not have a finder, you will need to remove keys paired with values of ``None`` **and** :class:`!imp.NullImporter` to be backwards-compatible. This will lead to extra overhead on older versions of Python that re-insert ``None`` into - :attr:`sys.path_importer_cache` where it represents the use of implicit + :data:`sys.path_importer_cache` where it represents the use of implicit finders, but semantically it should not change anything. * :class:`!importlib.abc.Finder` no longer specifies a ``find_module()`` abstract @@ -2445,7 +2445,7 @@ Porting Python code error instead of sleeping forever. It has always raised an error on posix. * The ``ast.__version__`` constant has been removed. If you need to - make decisions affected by the AST version, use :attr:`sys.version_info` + make decisions affected by the AST version, use :data:`sys.version_info` to make the decision. * Code that used to work around the fact that the :mod:`threading` module used diff --git a/Doc/whatsnew/3.4.rst b/Doc/whatsnew/3.4.rst index f3a8873747a3ed..794271f3c32b89 100644 --- a/Doc/whatsnew/3.4.rst +++ b/Doc/whatsnew/3.4.rst @@ -775,7 +775,7 @@ of a given opcode and argument, information that is not otherwise available. doctest ------- -A new :ref:`option flag `, :data:`~doctest.FAIL_FAST`, halts +A new :ref:`option flag `, :const:`~doctest.FAIL_FAST`, halts test running as soon as the first failure is detected. (Contributed by R. David Murray and Daniel Urban in :issue:`16522`.) @@ -841,7 +841,7 @@ for example, if the file might have been changed and re-checked in less time than the resolution of a particular filesystem's file modification time field. (Contributed by Mark Levitt in :issue:`18149`.) -New module attribute :data:`~filecmp.DEFAULT_IGNORES` provides the list of +New module attribute :const:`~filecmp.DEFAULT_IGNORES` provides the list of directories that are used as the default value for the *ignore* parameter of the :func:`~filecmp.dircmp` function. (Contributed by Eli Bendersky in :issue:`15442`.) @@ -1189,7 +1189,7 @@ Windows). (Contributed by Brian Curtin in :issue:`11939`.) root on Windows. (Contributed by Tim Golden in :issue:`9035`.) :func:`os.open` supports two new flags on platforms that provide them, -:data:`~os.O_PATH` (un-opened file descriptor), and :data:`~os.O_TMPFILE` +:const:`~os.O_PATH` (un-opened file descriptor), and :const:`~os.O_TMPFILE` (unnamed temporary file; as of 3.4.0 release available only on Linux systems with a kernel version of 3.11 or newer that have uapi headers). (Contributed by Christian Heimes in :issue:`18673` and Benjamin Peterson, respectively.) @@ -1238,8 +1238,8 @@ plistlib stdlib serialization protocols, with new :func:`~plistlib.load`, :func:`~plistlib.dump`, :func:`~plistlib.loads`, and :func:`~plistlib.dumps` functions. (The older API is now deprecated.) In addition to the already -supported XML plist format (:data:`~plistlib.FMT_XML`), it also now supports -the binary plist format (:data:`~plistlib.FMT_BINARY`). (Contributed by Ronald +supported XML plist format (:const:`~plistlib.FMT_XML`), it also now supports +the binary plist format (:const:`~plistlib.FMT_BINARY`). (Contributed by Ronald Oussoren and others in :issue:`14455`.) @@ -1323,14 +1323,14 @@ ability to query or set the resource limits for processes other than the one making the call. (Contributed by Christian Heimes in :issue:`16595`.) On Linux kernel version 2.6.36 or later, there are also some new -Linux specific constants: :attr:`~resource.RLIMIT_MSGQUEUE`, -:attr:`~resource.RLIMIT_NICE`, :attr:`~resource.RLIMIT_RTPRIO`, -:attr:`~resource.RLIMIT_RTTIME`, and :attr:`~resource.RLIMIT_SIGPENDING`. +Linux specific constants: :const:`~resource.RLIMIT_MSGQUEUE`, +:const:`~resource.RLIMIT_NICE`, :const:`~resource.RLIMIT_RTPRIO`, +:const:`~resource.RLIMIT_RTTIME`, and :const:`~resource.RLIMIT_SIGPENDING`. (Contributed by Christian Heimes in :issue:`19324`.) On FreeBSD version 9 and later, there some new FreeBSD specific constants: -:attr:`~resource.RLIMIT_SBSIZE`, :attr:`~resource.RLIMIT_SWAP`, and -:attr:`~resource.RLIMIT_NPTS`. (Contributed by Claudiu Popa in +:const:`~resource.RLIMIT_SBSIZE`, :const:`~resource.RLIMIT_SWAP`, and +:const:`~resource.RLIMIT_NPTS`. (Contributed by Claudiu Popa in :issue:`19343`.) @@ -1388,7 +1388,7 @@ try/except statement by code that only cares whether or not an error occurred. socket ------ -The socket module now supports the :data:`~socket.CAN_BCM` protocol on +The socket module now supports the :const:`~socket.CAN_BCM` protocol on platforms that support it. (Contributed by Brian Thorne in :issue:`15359`.) Socket objects have new methods to get or set their :ref:`inheritable flag @@ -1399,7 +1399,7 @@ The ``socket.AF_*`` and ``socket.SOCK_*`` constants are now enumeration values using the new :mod:`enum` module. This allows meaningful names to be printed during debugging, instead of integer "magic numbers". -The :data:`~socket.AF_LINK` constant is now available on BSD and OSX. +The :const:`~socket.AF_LINK` constant is now available on BSD and OSX. :func:`~socket.inet_pton` and :func:`~socket.inet_ntop` are now supported on Windows. (Contributed by Atsuo Ishimoto in :issue:`7171`.) @@ -1460,8 +1460,8 @@ Heimes in :issue:`18147`.) If OpenSSL 0.9.8 or later is available, :class:`~ssl.SSLContext` has a new attribute :attr:`~ssl.SSLContext.verify_flags` that can be used to control the certificate verification process by setting it to some combination of the new -constants :data:`~ssl.VERIFY_DEFAULT`, :data:`~ssl.VERIFY_CRL_CHECK_LEAF`, -:data:`~ssl.VERIFY_CRL_CHECK_CHAIN`, or :data:`~ssl.VERIFY_X509_STRICT`. +constants :const:`~ssl.VERIFY_DEFAULT`, :const:`~ssl.VERIFY_CRL_CHECK_LEAF`, +:const:`~ssl.VERIFY_CRL_CHECK_CHAIN`, or :const:`~ssl.VERIFY_X509_STRICT`. OpenSSL does not do any CRL verification by default. (Contributed by Christien Heimes in :issue:`8813`.) @@ -1500,7 +1500,7 @@ implementation is required as most of the values aren't standardized and are platform-dependent. (Contributed by Christian Heimes in :issue:`11016`.) The module supports new :mod:`~stat.ST_MODE` flags, :mod:`~stat.S_IFDOOR`, -:attr:`~stat.S_IFPORT`, and :attr:`~stat.S_IFWHT`. (Contributed by +:const:`~stat.S_IFPORT`, and :const:`~stat.S_IFWHT`. (Contributed by Christian Hiemes in :issue:`11016`.) @@ -1849,7 +1849,7 @@ Python's default implementation to a SipHash implementation on platforms that have a 64 bit data type. Any performance differences in comparison with the older FNV algorithm are trivial. -The PEP adds additional fields to the :attr:`sys.hash_info` named tuple to +The PEP adds additional fields to the :data:`sys.hash_info` named tuple to describe the hash algorithm in use by the currently executing binary. Otherwise, the PEP does not alter any existing CPython APIs. diff --git a/Doc/whatsnew/3.5.rst b/Doc/whatsnew/3.5.rst index ccf71bf08e8608..86bfdc4d478a02 100644 --- a/Doc/whatsnew/3.5.rst +++ b/Doc/whatsnew/3.5.rst @@ -478,7 +478,7 @@ not make an additional system call:: PEP 475: Retry system calls failing with EINTR ---------------------------------------------- -An :py:data:`errno.EINTR` error code is returned whenever a system call, that +An :py:const:`errno.EINTR` error code is returned whenever a system call, that is waiting for I/O, is interrupted by a signal. Previously, Python would raise :exc:`InterruptedError` in such cases. This meant that, when writing a Python application, the developer had two choices: @@ -527,7 +527,7 @@ by a signal: :func:`~os.writev`; * special cases: :func:`os.close` and :func:`os.dup2` now ignore - :py:data:`~errno.EINTR` errors; the syscall is not retried (see the PEP + :py:const:`~errno.EINTR` errors; the syscall is not retried (see the PEP for the rationale); * :mod:`select` functions: :func:`devpoll.poll() `, @@ -1045,8 +1045,8 @@ not just sequences. (Contributed by Serhiy Storchaka in :issue:`23171`.) curses ------ -The new :func:`~curses.update_lines_cols` function updates the :envvar:`LINES` -and :envvar:`COLS` environment variables. This is useful for detecting +The new :func:`~curses.update_lines_cols` function updates the :data:`LINES` +and :data:`COLS` module variables. This is useful for detecting manual screen resizing. (Contributed by Arnon Yaari in :issue:`4254`.) @@ -1498,7 +1498,7 @@ use ``/dev/urandom`` and avoiding failures due to potential file descriptor exhaustion. (Contributed by Victor Stinner in :issue:`22181`.) New :func:`~os.get_blocking` and :func:`~os.set_blocking` functions allow -getting and setting a file descriptor's blocking mode (:data:`~os.O_NONBLOCK`.) +getting and setting a file descriptor's blocking mode (:const:`~os.O_NONBLOCK`.) (Contributed by Victor Stinner in :issue:`22054`.) The :func:`~os.truncate` and :func:`~os.ftruncate` functions are now supported @@ -1783,7 +1783,7 @@ the TLS handshake. The new :meth:`SSLSocket.selected_alpn_protocol() ` returns the protocol that was selected during the TLS handshake. -The :data:`~ssl.HAS_ALPN` flag indicates whether ALPN support is present. +The :const:`~ssl.HAS_ALPN` flag indicates whether ALPN support is present. Other Changes @@ -2192,7 +2192,7 @@ encode error with ``\N{...}`` escapes. (Contributed by Serhiy Storchaka in :issue:`19676`.) A new :c:func:`PyErr_FormatV` function similar to :c:func:`PyErr_Format`, -but accepts a ``va_list`` argument. +but accepts a :c:type:`va_list` argument. (Contributed by Antoine Pitrou in :issue:`18711`.) A new :c:data:`PyExc_RecursionError` exception. @@ -2476,7 +2476,7 @@ Changes in the Python API in Python 3.5, all old ``.pyo`` files from previous versions of Python are invalid regardless of this PEP. -* The :mod:`socket` module now exports the :data:`~socket.CAN_RAW_FD_FRAMES` +* The :mod:`socket` module now exports the :const:`~socket.CAN_RAW_FD_FRAMES` constant on linux 3.6 and greater. * The :func:`ssl.cert_time_to_seconds` function now interprets the input time @@ -2533,7 +2533,7 @@ Changes in the C API * As part of the :pep:`492` implementation, the ``tp_reserved`` slot of :c:type:`PyTypeObject` was replaced with a - :c:member:`tp_as_async` slot. Refer to :ref:`coro-objects` for + :c:member:`~PyTypeObject.tp_as_async` slot. Refer to :ref:`coro-objects` for new types, structures and functions. diff --git a/Doc/whatsnew/3.6.rst b/Doc/whatsnew/3.6.rst index 7d293c634f237b..4359a9012dd53c 100644 --- a/Doc/whatsnew/3.6.rst +++ b/Doc/whatsnew/3.6.rst @@ -650,8 +650,8 @@ compiled in release mode using ``PYTHONMALLOC=debug``. Effects of debug hooks: * Detect writes before the start of a buffer (buffer underflows) * Detect writes after the end of a buffer (buffer overflows) * Check that the :term:`GIL ` is held when allocator - functions of :c:data:`PYMEM_DOMAIN_OBJ` (ex: :c:func:`PyObject_Malloc`) and - :c:data:`PYMEM_DOMAIN_MEM` (ex: :c:func:`PyMem_Malloc`) domains are called. + functions of :c:macro:`PYMEM_DOMAIN_OBJ` (ex: :c:func:`PyObject_Malloc`) and + :c:macro:`PYMEM_DOMAIN_MEM` (ex: :c:func:`PyMem_Malloc`) domains are called. Checking if the GIL is held is also a new feature of Python 3.6. @@ -1388,7 +1388,7 @@ are treated as punctuation. site ---- -When specifying paths to add to :attr:`sys.path` in a ``.pth`` file, +When specifying paths to add to :data:`sys.path` in a ``.pth`` file, you may now specify file paths on top of directories (e.g. zip files). (Contributed by Wolfgang Langner in :issue:`26587`). @@ -1404,7 +1404,7 @@ socket ------ The :func:`~socket.socket.ioctl` function now supports the -:data:`~socket.SIO_LOOPBACK_FAST_PATH` control code. +:const:`~socket.SIO_LOOPBACK_FAST_PATH` control code. (Contributed by Daniel Stokes in :issue:`26536`.) The :meth:`~socket.socket.getsockopt` constants ``SO_DOMAIN``, @@ -1416,7 +1416,7 @@ The :meth:`~socket.socket.setsockopt` now supports the (Contributed by Christian Heimes in :issue:`27744`.) The socket module now supports the address family -:data:`~socket.AF_ALG` to interface with Linux Kernel crypto API. ``ALG_*``, +:const:`~socket.AF_ALG` to interface with Linux Kernel crypto API. ``ALG_*``, ``SOL_ALG`` and :meth:`~socket.socket.sendmsg_afalg` were added. (Contributed by Christian Heimes in :issue:`27744` with support from Victor Stinner.) @@ -1822,7 +1822,7 @@ Optimizations up to 80% faster. (Contributed by Josh Snider in :issue:`26574`). * Allocator functions of the :c:func:`PyMem_Malloc` domain - (:c:data:`PYMEM_DOMAIN_MEM`) now use the :ref:`pymalloc memory allocator + (:c:macro:`PYMEM_DOMAIN_MEM`) now use the :ref:`pymalloc memory allocator ` instead of :c:func:`malloc` function of the C library. The pymalloc allocator is optimized for objects smaller or equal to 512 bytes with a short lifetime, and use :c:func:`malloc` for larger memory blocks. @@ -1874,8 +1874,8 @@ Build and C API Changes (Original patch by Alecsandru Patrascu of Intel in :issue:`26359`.) * The :term:`GIL ` must now be held when allocator - functions of :c:data:`PYMEM_DOMAIN_OBJ` (ex: :c:func:`PyObject_Malloc`) and - :c:data:`PYMEM_DOMAIN_MEM` (ex: :c:func:`PyMem_Malloc`) domains are called. + functions of :c:macro:`PYMEM_DOMAIN_OBJ` (ex: :c:func:`PyObject_Malloc`) and + :c:macro:`PYMEM_DOMAIN_MEM` (ex: :c:func:`PyMem_Malloc`) domains are called. * New :c:func:`Py_FinalizeEx` API which indicates if flushing buffered data failed. @@ -2010,7 +2010,7 @@ been deprecated in previous versions of Python in favour of :meth:`importlib.abc.Loader.exec_module`. The :class:`importlib.machinery.WindowsRegistryFinder` class is now -deprecated. As of 3.6.0, it is still added to :attr:`sys.meta_path` by +deprecated. As of 3.6.0, it is still added to :data:`sys.meta_path` by default (on Windows), but this may change in future releases. os diff --git a/Doc/whatsnew/3.7.rst b/Doc/whatsnew/3.7.rst index 24244ff17b1ea0..218a37cd264c7c 100644 --- a/Doc/whatsnew/3.7.rst +++ b/Doc/whatsnew/3.7.rst @@ -1280,13 +1280,13 @@ This function should be used instead of :func:`os.close` for better compatibility across platforms. (Contributed by Christian Heimes in :issue:`32454`.) -The :mod:`socket` module now exposes the :data:`socket.TCP_CONGESTION` -(Linux 2.6.13), :data:`socket.TCP_USER_TIMEOUT` (Linux 2.6.37), and -:data:`socket.TCP_NOTSENT_LOWAT` (Linux 3.12) constants. +The :mod:`socket` module now exposes the :const:`socket.TCP_CONGESTION` +(Linux 2.6.13), :const:`socket.TCP_USER_TIMEOUT` (Linux 2.6.37), and +:const:`socket.TCP_NOTSENT_LOWAT` (Linux 3.12) constants. (Contributed by Omar Sandoval in :issue:`26273` and Nathaniel J. Smith in :issue:`29728`.) -Support for :data:`socket.AF_VSOCK` sockets has been added to allow +Support for :const:`socket.AF_VSOCK` sockets has been added to allow communication between virtual machines and their hosts. (Contributed by Cathy Avery in :issue:`27584`.) @@ -1394,7 +1394,7 @@ subprocess The :func:`subprocess.run` function accepts the new *capture_output* keyword argument. When true, stdout and stderr will be captured. -This is equivalent to passing :data:`subprocess.PIPE` as *stdout* and +This is equivalent to passing :const:`subprocess.PIPE` as *stdout* and *stderr* arguments. (Contributed by Bo Bayles in :issue:`32102`.) @@ -1453,12 +1453,12 @@ time New clock identifiers have been added: -* :data:`time.CLOCK_BOOTTIME` (Linux): Identical to - :data:`time.CLOCK_MONOTONIC`, except it also includes any time that the +* :const:`time.CLOCK_BOOTTIME` (Linux): Identical to + :const:`time.CLOCK_MONOTONIC`, except it also includes any time that the system is suspended. -* :data:`time.CLOCK_PROF` (FreeBSD, NetBSD and OpenBSD): High-resolution +* :const:`time.CLOCK_PROF` (FreeBSD, NetBSD and OpenBSD): High-resolution per-process CPU timer. -* :data:`time.CLOCK_UPTIME` (FreeBSD, OpenBSD): Time whose absolute value is +* :const:`time.CLOCK_UPTIME` (FreeBSD, OpenBSD): Time whose absolute value is the time the system has been running and not suspended, providing accurate uptime measurement. @@ -1674,10 +1674,10 @@ The new :c:func:`import__find__load__start` and module imports. (Contributed by Christian Heimes in :issue:`31574`.) -The fields :c:member:`name` and :c:member:`doc` of structures +The fields :c:member:`!name` and :c:member:`!doc` of structures :c:type:`PyMemberDef`, :c:type:`PyGetSetDef`, :c:type:`PyStructSequence_Field`, :c:type:`PyStructSequence_Desc`, -and :c:type:`wrapperbase` are now of type ``const char *`` rather of +and :c:struct:`wrapperbase` are now of type ``const char *`` rather of ``char *``. (Contributed by Serhiy Storchaka in :issue:`28761`.) The result of :c:func:`PyUnicode_AsUTF8AndSize` and :c:func:`PyUnicode_AsUTF8` diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 16762817ab8250..c5fb5c53dfe350 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -1305,7 +1305,7 @@ Zackery Spytz in :issue:`25451`.) time ---- -Added new clock :data:`~time.CLOCK_UPTIME_RAW` for macOS 10.12. +Added new clock :const:`~time.CLOCK_UPTIME_RAW` for macOS 10.12. (Contributed by Joannah Nanjekye in :issue:`35702`.) @@ -1839,7 +1839,7 @@ Changes in Python behavior classes will affect their string representation. (Contributed by Serhiy Storchaka in :issue:`36793`.) -* On AIX, :attr:`sys.platform` doesn't contain the major version anymore. +* On AIX, :data:`sys.platform` doesn't contain the major version anymore. It is always ``'aix'``, instead of ``'aix3'`` .. ``'aix7'``. Since older Python versions include the version number, so it is recommended to always use ``sys.platform.startswith('aix')``. @@ -1850,7 +1850,7 @@ Changes in Python behavior finalizing, making them consistent with :c:func:`PyEval_RestoreThread`, :c:func:`Py_END_ALLOW_THREADS`, and :c:func:`PyGILState_Ensure`. If this behavior is not desired, guard the call by checking :c:func:`_Py_IsFinalizing` - or :c:func:`sys.is_finalizing`. + or :func:`sys.is_finalizing`. (Contributed by Joannah Nanjekye in :issue:`36475`.) @@ -2061,8 +2061,8 @@ Changes in the C API * Remove :c:macro:`Py_INCREF` on the type object after allocating an instance - if any. - This may happen after calling :c:func:`PyObject_New`, - :c:func:`PyObject_NewVar`, :c:func:`PyObject_GC_New`, + This may happen after calling :c:macro:`PyObject_New`, + :c:macro:`PyObject_NewVar`, :c:func:`PyObject_GC_New`, :c:func:`PyObject_GC_NewVar`, or any other custom allocator that uses :c:func:`PyObject_Init` or :c:func:`PyObject_INIT`. @@ -2116,7 +2116,7 @@ Changes in the C API extension types across feature releases, anymore. A :c:type:`PyTypeObject` exported by a third-party extension module is supposed to have all the slots expected in the current Python version, including - :c:member:`~PyTypeObject.tp_finalize` (:const:`Py_TPFLAGS_HAVE_FINALIZE` + :c:member:`~PyTypeObject.tp_finalize` (:c:macro:`Py_TPFLAGS_HAVE_FINALIZE` is not checked anymore before reading :c:member:`~PyTypeObject.tp_finalize`). (Contributed by Antoine Pitrou in :issue:`32388`.) diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 976d72a0342510..3e3e0ff5c41f4f 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -427,8 +427,8 @@ digests. It skips MD5 on platforms that block MD5 digest. fcntl ----- -Added constants :data:`~fcntl.F_OFD_GETLK`, :data:`~fcntl.F_OFD_SETLK` -and :data:`~fcntl.F_OFD_SETLKW`. +Added constants :const:`~fcntl.F_OFD_GETLK`, :const:`~fcntl.F_OFD_SETLK` +and :const:`~fcntl.F_OFD_SETLKW`. (Contributed by Dong-hee Na in :issue:`38602`.) ftplib @@ -593,11 +593,11 @@ a non-blocking socket. (Contributed by Dong-hee Na in :issue:`39259`.) os -- -Added :data:`~os.CLD_KILLED` and :data:`~os.CLD_STOPPED` for :attr:`si_code`. +Added :const:`~os.CLD_KILLED` and :const:`~os.CLD_STOPPED` for :attr:`si_code`. (Contributed by Dong-hee Na in :issue:`38493`.) Exposed the Linux-specific :func:`os.pidfd_open` (:issue:`38692`) and -:data:`os.P_PIDFD` (:issue:`38713`) for process management with file +:const:`os.P_PIDFD` (:issue:`38713`) for process management with file descriptors. The :func:`os.unsetenv` function is now also available on Windows. @@ -669,11 +669,11 @@ a non-blocking socket. (Contributed by Dong-hee Na in :issue:`39259`.) socket ------ -The :mod:`socket` module now exports the :data:`~socket.CAN_RAW_JOIN_FILTERS` +The :mod:`socket` module now exports the :const:`~socket.CAN_RAW_JOIN_FILTERS` constant on Linux 4.1 and greater. (Contributed by Stefan Tatschner and Zackery Spytz in :issue:`25780`.) -The socket module now supports the :data:`~socket.CAN_J1939` protocol on +The socket module now supports the :const:`~socket.CAN_J1939` protocol on platforms that support it. (Contributed by Karl Ding in :issue:`40291`.) The socket module now has the :func:`socket.send_fds` and @@ -692,13 +692,13 @@ which has nanosecond resolution, rather than sys --- -Added a new :attr:`sys.platlibdir` attribute: name of the platform-specific +Added a new :data:`sys.platlibdir` attribute: name of the platform-specific library directory. It is used to build the path of standard library and the paths of installed extension modules. It is equal to ``"lib"`` on most platforms. On Fedora and SuSE, it is equal to ``"lib64"`` on 64-bit platforms. (Contributed by Jan Matějek, Matěj Cepl, Charalampos Stratakis and Victor Stinner in :issue:`1294959`.) -Previously, :attr:`sys.stderr` was block-buffered when non-interactive. Now +Previously, :data:`sys.stderr` was block-buffered when non-interactive. Now ``stderr`` defaults to always being line-buffered. (Contributed by Jendrik Seipp in :issue:`13601`.) @@ -1084,7 +1084,7 @@ Changes in the Python API ``__VENV_PROMPT__`` is set to ``""``. * The :meth:`select.epoll.unregister` method no longer ignores the - :data:`~errno.EBADF` error. + :const:`~errno.EBADF` error. (Contributed by Victor Stinner in :issue:`39239`.) * The *compresslevel* parameter of :class:`bz2.BZ2File` became keyword-only, @@ -1115,9 +1115,9 @@ Changes in the Python API ``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``. (Contributed by Batuhan Taskaya in :issue:`39562`) -* ``array('u')`` now uses ``wchar_t`` as C type instead of ``Py_UNICODE``. +* ``array('u')`` now uses :c:type:`wchar_t` as C type instead of ``Py_UNICODE``. This change doesn't affect to its behavior because ``Py_UNICODE`` is alias - of ``wchar_t`` since Python 3.3. + of :c:type:`wchar_t` since Python 3.3. (Contributed by Inada Naoki in :issue:`34538`.) * The :func:`logging.getLogger` API now returns the root logger when passed @@ -1226,8 +1226,8 @@ Build Changes ============= * Added ``--with-platlibdir`` option to the ``configure`` script: name of the - platform-specific library directory, stored in the new :attr:`sys.platlibdir` - attribute. See :attr:`sys.platlibdir` attribute for more information. + platform-specific library directory, stored in the new :data:`sys.platlibdir` + attribute. See :data:`sys.platlibdir` attribute for more information. (Contributed by Jan Matějek, Matěj Cepl, Charalampos Stratakis and Victor Stinner in :issue:`1294959`.) @@ -1276,7 +1276,7 @@ New Features * :pep:`573`: Added :c:func:`PyType_FromModuleAndSpec` to associate a module with a class; :c:func:`PyType_GetModule` and :c:func:`PyType_GetModuleState` to retrieve the module and its state; and - :c:data:`PyCMethod` and :c:data:`METH_METHOD` to allow a method to + :c:type:`PyCMethod` and :c:macro:`METH_METHOD` to allow a method to access the class it was defined in. (Contributed by Marcel Plch and Petr Viktorin in :issue:`38787`.) @@ -1389,8 +1389,8 @@ Porting to Python 3.9 * :c:func:`PyObject_IS_GC` macro was converted to a function. * The :c:func:`PyObject_NEW` macro becomes an alias to the - :c:func:`PyObject_New` macro, and the :c:func:`PyObject_NEW_VAR` macro - becomes an alias to the :c:func:`PyObject_NewVar` macro. They no longer + :c:macro:`PyObject_New` macro, and the :c:func:`PyObject_NEW_VAR` macro + becomes an alias to the :c:macro:`PyObject_NewVar` macro. They no longer access directly the :c:member:`PyTypeObject.tp_basicsize` member. * :c:func:`PyObject_GET_WEAKREFS_LISTPTR` macro was converted to a function: diff --git a/Grammar/Tokens b/Grammar/Tokens index 618ae811d824b0..20bb803b7d58a6 100644 --- a/Grammar/Tokens +++ b/Grammar/Tokens @@ -56,8 +56,6 @@ COLONEQUAL ':=' EXCLAMATION '!' OP -AWAIT -ASYNC TYPE_IGNORE TYPE_COMMENT SOFT_KEYWORD diff --git a/Grammar/python.gram b/Grammar/python.gram index c1863aec67cc2b..e7c817856d514b 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -127,11 +127,11 @@ simple_stmt[stmt_ty] (memo): | &'nonlocal' nonlocal_stmt compound_stmt[stmt_ty]: - | &('def' | '@' | ASYNC) function_def + | &('def' | '@' | 'async') function_def | &'if' if_stmt | &('class' | '@') class_def - | &('with' | ASYNC) with_stmt - | &('for' | ASYNC) for_stmt + | &('with' | 'async') with_stmt + | &('for' | 'async') for_stmt | &'try' try_stmt | &'while' while_stmt | match_stmt @@ -272,7 +272,7 @@ function_def_raw[stmt_ty]: _PyAST_FunctionDef(n->v.Name.id, (params) ? params : CHECK(arguments_ty, _PyPegen_empty_arguments(p)), b, NULL, a, NEW_TYPE_COMMENT(p, tc), t, EXTRA) } - | ASYNC 'def' n=NAME t=[type_params] &&'(' params=[params] ')' a=['->' z=expression { z }] &&':' tc=[func_type_comment] b=block { + | 'async' 'def' n=NAME t=[type_params] &&'(' params=[params] ')' a=['->' z=expression { z }] &&':' tc=[func_type_comment] b=block { CHECK_VERSION( stmt_ty, 5, @@ -385,7 +385,7 @@ for_stmt[stmt_ty]: | invalid_for_stmt | 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] { _PyAST_For(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA) } - | ASYNC 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] { + | 'async' 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] { CHECK_VERSION(stmt_ty, 5, "Async for loops are", _PyAST_AsyncFor(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA)) } | invalid_for_target @@ -398,9 +398,9 @@ with_stmt[stmt_ty]: CHECK_VERSION(stmt_ty, 9, "Parenthesized context managers are", _PyAST_With(a, b, NULL, EXTRA)) } | 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { _PyAST_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } - | ASYNC 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' b=block { + | 'async' 'with' '(' a[asdl_withitem_seq*]=','.with_item+ ','? ')' ':' b=block { CHECK_VERSION(stmt_ty, 5, "Async with statements are", _PyAST_AsyncWith(a, b, NULL, EXTRA)) } - | ASYNC 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { + | 'async' 'with' a[asdl_withitem_seq*]=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { CHECK_VERSION(stmt_ty, 5, "Async with statements are", _PyAST_AsyncWith(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA)) } | invalid_with_stmt @@ -814,7 +814,7 @@ power[expr_ty]: # Primary elements are things like "obj.something.something", "obj[something]", "obj(something)", "obj" ... await_primary[expr_ty] (memo): - | AWAIT a=primary { CHECK_VERSION(expr_ty, 5, "Await expressions are", _PyAST_Await(a, EXTRA)) } + | 'await' a=primary { CHECK_VERSION(expr_ty, 5, "Await expressions are", _PyAST_Await(a, EXTRA)) } | primary primary[expr_ty]: @@ -966,7 +966,7 @@ for_if_clauses[asdl_comprehension_seq*]: | a[asdl_comprehension_seq*]=for_if_clause+ { a } for_if_clause[comprehension_ty]: - | ASYNC 'for' a=star_targets 'in' ~ b=disjunction c[asdl_expr_seq*]=('if' z=disjunction { z })* { + | 'async' 'for' a=star_targets 'in' ~ b=disjunction c[asdl_expr_seq*]=('if' z=disjunction { z })* { CHECK_VERSION(comprehension_ty, 6, "Async comprehensions are", _PyAST_comprehension(a, b, c, 1, p->arena)) } | 'for' a=star_targets 'in' ~ b=disjunction c[asdl_expr_seq*]=('if' z=disjunction { z })* { _PyAST_comprehension(a, b, c, 0, p->arena) } @@ -1284,7 +1284,7 @@ invalid_with_item: RAISE_SYNTAX_ERROR_INVALID_TARGET(STAR_TARGETS, a) } invalid_for_target: - | ASYNC? 'for' a=star_expressions { + | 'async'? 'for' a=star_expressions { RAISE_SYNTAX_ERROR_INVALID_TARGET(FOR_TARGETS, a) } invalid_group: @@ -1301,12 +1301,12 @@ invalid_import_from_targets: RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") } invalid_with_stmt: - | [ASYNC] 'with' ','.(expression ['as' star_target])+ NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } - | [ASYNC] 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } + | ['async'] 'with' ','.(expression ['as' star_target])+ NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } + | ['async'] 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } invalid_with_stmt_indent: - | [ASYNC] a='with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT { + | ['async'] a='with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block after 'with' statement on line %d", a->lineno) } - | [ASYNC] a='with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT { + | ['async'] a='with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block after 'with' statement on line %d", a->lineno) } invalid_try_stmt: @@ -1367,11 +1367,11 @@ invalid_while_stmt: | a='while' named_expression ':' NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block after 'while' statement on line %d", a->lineno) } invalid_for_stmt: - | [ASYNC] 'for' star_targets 'in' star_expressions NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } - | [ASYNC] a='for' star_targets 'in' star_expressions ':' NEWLINE !INDENT { + | ['async'] 'for' star_targets 'in' star_expressions NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } + | ['async'] a='for' star_targets 'in' star_expressions ':' NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block after 'for' statement on line %d", a->lineno) } invalid_def_raw: - | [ASYNC] a='def' NAME '(' [params] ')' ['->' expression] ':' NEWLINE !INDENT { + | ['async'] a='def' NAME '(' [params] ')' ['->' expression] ':' NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block after function definition on line %d", a->lineno) } invalid_class_def_raw: | 'class' NAME ['(' [arguments] ')'] NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } diff --git a/Include/cpython/bytesobject.h b/Include/cpython/bytesobject.h index 0af4c83b1e5bc7..816823716e9a6f 100644 --- a/Include/cpython/bytesobject.h +++ b/Include/cpython/bytesobject.h @@ -15,18 +15,6 @@ typedef struct { } PyBytesObject; PyAPI_FUNC(int) _PyBytes_Resize(PyObject **, Py_ssize_t); -PyAPI_FUNC(PyObject*) _PyBytes_FormatEx( - const char *format, - Py_ssize_t format_len, - PyObject *args, - int use_bytearray); -PyAPI_FUNC(PyObject*) _PyBytes_FromHex( - PyObject *string, - int use_bytearray); - -/* Helper for PyBytes_DecodeEscape that detects invalid escape chars. */ -PyAPI_FUNC(PyObject *) _PyBytes_DecodeEscape(const char *, Py_ssize_t, - const char *, const char **); /* Macros and static inline functions, trading safety for speed */ #define _PyBytes_CAST(op) \ @@ -43,7 +31,3 @@ static inline Py_ssize_t PyBytes_GET_SIZE(PyObject *op) { return Py_SIZE(self); } #define PyBytes_GET_SIZE(self) PyBytes_GET_SIZE(_PyObject_CAST(self)) - -/* _PyBytes_Join(sep, x) is like sep.join(x). sep must be PyBytesObject*, - x must be an iterable object. */ -PyAPI_FUNC(PyObject *) _PyBytes_Join(PyObject *sep, PyObject *x); diff --git a/Include/cpython/code.h b/Include/cpython/code.h index 1b65b0d01d89f8..24c5ec23590c94 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -10,13 +10,13 @@ extern "C" { /* Count of all "real" monitoring events (not derived from other events) */ -#define PY_MONITORING_UNGROUPED_EVENTS 14 +#define _PY_MONITORING_UNGROUPED_EVENTS 15 /* Count of all monitoring events */ -#define PY_MONITORING_EVENTS 16 +#define _PY_MONITORING_EVENTS 17 /* Table of which tools are active for each monitored event. */ typedef struct _Py_Monitors { - uint8_t tools[PY_MONITORING_UNGROUPED_EVENTS]; + uint8_t tools[_PY_MONITORING_UNGROUPED_EVENTS]; } _Py_Monitors; /* Each instruction in a code object is a fixed-width value, diff --git a/Include/cpython/compile.h b/Include/cpython/compile.h index fd52697840203a..e6cd39af2ba739 100644 --- a/Include/cpython/compile.h +++ b/Include/cpython/compile.h @@ -77,3 +77,5 @@ PyAPI_FUNC(int) PyUnstable_OpcodeHasFree(int opcode); PyAPI_FUNC(int) PyUnstable_OpcodeHasLocal(int opcode); PyAPI_FUNC(int) PyUnstable_OpcodeHasExc(int opcode); +PyAPI_FUNC(PyObject*) _PyUnstable_GetUnaryIntrinsicName(int index); +PyAPI_FUNC(PyObject*) _PyUnstable_GetBinaryIntrinsicName(int index); diff --git a/Include/cpython/descrobject.h b/Include/cpython/descrobject.h index e2ea1b9a2d3058..bbad8b59c225ab 100644 --- a/Include/cpython/descrobject.h +++ b/Include/cpython/descrobject.h @@ -57,8 +57,6 @@ typedef struct { void *d_wrapped; /* This can be any function pointer */ } PyWrapperDescrObject; -PyAPI_DATA(PyTypeObject) _PyMethodWrapper_Type; - PyAPI_FUNC(PyObject *) PyDescr_NewWrapper(PyTypeObject *, struct wrapperbase *, void *); PyAPI_FUNC(int) PyDescr_IsData(PyObject *); diff --git a/Include/cpython/dictobject.h b/Include/cpython/dictobject.h index ddada922020aa4..2a42794fdf0c85 100644 --- a/Include/cpython/dictobject.h +++ b/Include/cpython/dictobject.h @@ -34,7 +34,6 @@ typedef struct { PyAPI_FUNC(PyObject *) _PyDict_GetItem_KnownHash(PyObject *mp, PyObject *key, Py_hash_t hash); -PyAPI_FUNC(PyObject *) _PyDict_GetItemWithError(PyObject *dp, PyObject *key); PyAPI_FUNC(PyObject *) _PyDict_GetItemIdWithError(PyObject *dp, _Py_Identifier *key); PyAPI_FUNC(PyObject *) _PyDict_GetItemStringWithError(PyObject *, const char *); @@ -44,8 +43,7 @@ PyAPI_FUNC(int) _PyDict_SetItem_KnownHash(PyObject *mp, PyObject *key, PyObject *item, Py_hash_t hash); PyAPI_FUNC(int) _PyDict_DelItem_KnownHash(PyObject *mp, PyObject *key, Py_hash_t hash); -PyAPI_FUNC(int) _PyDict_DelItemIf(PyObject *mp, PyObject *key, - int (*predicate)(PyObject *value)); + PyAPI_FUNC(int) _PyDict_Next( PyObject *mp, Py_ssize_t *pos, PyObject **key, PyObject **value, Py_hash_t *hash); @@ -58,25 +56,16 @@ static inline Py_ssize_t PyDict_GET_SIZE(PyObject *op) { } #define PyDict_GET_SIZE(op) PyDict_GET_SIZE(_PyObject_CAST(op)) -PyAPI_FUNC(int) _PyDict_Contains_KnownHash(PyObject *, PyObject *, Py_hash_t); PyAPI_FUNC(int) _PyDict_ContainsId(PyObject *, _Py_Identifier *); + PyAPI_FUNC(PyObject *) _PyDict_NewPresized(Py_ssize_t minused); -PyAPI_FUNC(void) _PyDict_MaybeUntrack(PyObject *mp); -PyAPI_FUNC(int) _PyDict_HasOnlyStringKeys(PyObject *mp); PyAPI_FUNC(Py_ssize_t) _PyDict_SizeOf(PyDictObject *); PyAPI_FUNC(PyObject *) _PyDict_Pop(PyObject *, PyObject *, PyObject *); #define _PyDict_HasSplitTable(d) ((d)->ma_values != NULL) -/* Like PyDict_Merge, but override can be 0, 1 or 2. If override is 0, - the first occurrence of a key wins, if override is 1, the last occurrence - of a key wins, if override is 2, a KeyError with conflicting key as - argument is raised. -*/ -PyAPI_FUNC(int) _PyDict_MergeEx(PyObject *mp, PyObject *other, int override); PyAPI_FUNC(int) _PyDict_SetItemId(PyObject *dp, _Py_Identifier *key, PyObject *item); PyAPI_FUNC(int) _PyDict_DelItemId(PyObject *mp, _Py_Identifier *key); -PyAPI_FUNC(void) _PyDict_DebugMallocStats(FILE *out); /* _PyDictView */ diff --git a/Include/cpython/frameobject.h b/Include/cpython/frameobject.h index a3dc6661786451..4e19535c656f2c 100644 --- a/Include/cpython/frameobject.h +++ b/Include/cpython/frameobject.h @@ -4,8 +4,6 @@ # error "this header file must not be included directly" #endif -struct _PyInterpreterFrame; - /* Standard object interface */ PyAPI_FUNC(PyFrameObject *) PyFrame_New(PyThreadState *, PyCodeObject *, @@ -29,18 +27,3 @@ PyAPI_FUNC(int) _PyFrame_IsEntryFrame(PyFrameObject *frame); PyAPI_FUNC(int) PyFrame_FastToLocalsWithError(PyFrameObject *f); PyAPI_FUNC(void) PyFrame_FastToLocals(PyFrameObject *); - -/* The following functions are for use by debuggers and other tools - * implementing custom frame evaluators with PEP 523. */ - -/* Returns the code object of the frame (strong reference). - * Does not raise an exception. */ -PyAPI_FUNC(PyObject *) PyUnstable_InterpreterFrame_GetCode(struct _PyInterpreterFrame *frame); - -/* Returns a byte ofsset into the last executed instruction. - * Does not raise an exception. */ -PyAPI_FUNC(int) PyUnstable_InterpreterFrame_GetLasti(struct _PyInterpreterFrame *frame); - -/* Returns the currently executing line number, or -1 if there is no line number. - * Does not raise an exception. */ -PyAPI_FUNC(int) PyUnstable_InterpreterFrame_GetLine(struct _PyInterpreterFrame *frame); diff --git a/Include/cpython/funcobject.h b/Include/cpython/funcobject.h index 6f78f5868d0166..de2013323d2c72 100644 --- a/Include/cpython/funcobject.h +++ b/Include/cpython/funcobject.h @@ -79,12 +79,6 @@ PyAPI_FUNC(int) PyFunction_SetClosure(PyObject *, PyObject *); PyAPI_FUNC(PyObject *) PyFunction_GetAnnotations(PyObject *); PyAPI_FUNC(int) PyFunction_SetAnnotations(PyObject *, PyObject *); -PyAPI_FUNC(PyObject *) _PyFunction_Vectorcall( - PyObject *func, - PyObject *const *stack, - size_t nargsf, - PyObject *kwnames); - #define _PyFunction_CAST(func) \ (assert(PyFunction_Check(func)), _Py_CAST(PyFunctionObject*, func)) diff --git a/Include/cpython/genobject.h b/Include/cpython/genobject.h index 7856481b5db300..49e46c277d75ae 100644 --- a/Include/cpython/genobject.h +++ b/Include/cpython/genobject.h @@ -41,9 +41,6 @@ PyAPI_DATA(PyTypeObject) PyGen_Type; PyAPI_FUNC(PyObject *) PyGen_New(PyFrameObject *); PyAPI_FUNC(PyObject *) PyGen_NewWithQualName(PyFrameObject *, PyObject *name, PyObject *qualname); -PyAPI_FUNC(int) _PyGen_SetStopIterationValue(PyObject *); -PyAPI_FUNC(int) _PyGen_FetchStopIterationValue(PyObject **); -PyAPI_FUNC(void) _PyGen_Finalize(PyObject *self); PyAPI_FUNC(PyCodeObject *) PyGen_GetCode(PyGenObject *gen); @@ -54,7 +51,6 @@ typedef struct { } PyCoroObject; PyAPI_DATA(PyTypeObject) PyCoro_Type; -PyAPI_DATA(PyTypeObject) _PyCoroWrapper_Type; #define PyCoro_CheckExact(op) Py_IS_TYPE((op), &PyCoro_Type) PyAPI_FUNC(PyObject *) PyCoro_New(PyFrameObject *, @@ -69,8 +65,6 @@ typedef struct { PyAPI_DATA(PyTypeObject) PyAsyncGen_Type; PyAPI_DATA(PyTypeObject) _PyAsyncGenASend_Type; -PyAPI_DATA(PyTypeObject) _PyAsyncGenWrappedValue_Type; -PyAPI_DATA(PyTypeObject) _PyAsyncGenAThrow_Type; PyAPI_FUNC(PyObject *) PyAsyncGen_New(PyFrameObject *, PyObject *name, PyObject *qualname); diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h index c103c2026e40e9..cbae97f12f5377 100644 --- a/Include/cpython/initconfig.h +++ b/Include/cpython/initconfig.h @@ -242,45 +242,6 @@ PyAPI_FUNC(PyStatus) PyConfig_SetWideStringList(PyConfig *config, Py_ssize_t length, wchar_t **items); -/* --- PyInterpreterConfig ------------------------------------ */ - -#define PyInterpreterConfig_DEFAULT_GIL (0) -#define PyInterpreterConfig_SHARED_GIL (1) -#define PyInterpreterConfig_OWN_GIL (2) - -typedef struct { - // XXX "allow_object_sharing"? "own_objects"? - int use_main_obmalloc; - int allow_fork; - int allow_exec; - int allow_threads; - int allow_daemon_threads; - int check_multi_interp_extensions; - int gil; -} PyInterpreterConfig; - -#define _PyInterpreterConfig_INIT \ - { \ - .use_main_obmalloc = 0, \ - .allow_fork = 0, \ - .allow_exec = 0, \ - .allow_threads = 1, \ - .allow_daemon_threads = 0, \ - .check_multi_interp_extensions = 1, \ - .gil = PyInterpreterConfig_OWN_GIL, \ - } - -#define _PyInterpreterConfig_LEGACY_INIT \ - { \ - .use_main_obmalloc = 1, \ - .allow_fork = 1, \ - .allow_exec = 1, \ - .allow_threads = 1, \ - .allow_daemon_threads = 1, \ - .check_multi_interp_extensions = 0, \ - .gil = PyInterpreterConfig_SHARED_GIL, \ - } - /* --- Helper functions --------------------------------------- */ /* Get the original command line arguments, before Python modified them. diff --git a/Include/cpython/interpreteridobject.h b/Include/cpython/interpreteridobject.h index 5076584209b90b..4ab9ad5d315f80 100644 --- a/Include/cpython/interpreteridobject.h +++ b/Include/cpython/interpreteridobject.h @@ -4,8 +4,8 @@ /* Interpreter ID Object */ -PyAPI_DATA(PyTypeObject) _PyInterpreterID_Type; +PyAPI_DATA(PyTypeObject) PyInterpreterID_Type; -PyAPI_FUNC(PyObject *) _PyInterpreterID_New(int64_t); -PyAPI_FUNC(PyObject *) _PyInterpreterState_GetIDObject(PyInterpreterState *); -PyAPI_FUNC(PyInterpreterState *) _PyInterpreterID_LookUp(PyObject *); +PyAPI_FUNC(PyObject *) PyInterpreterID_New(int64_t); +PyAPI_FUNC(PyObject *) PyInterpreterState_GetIDObject(PyInterpreterState *); +PyAPI_FUNC(PyInterpreterState *) PyInterpreterID_LookUp(PyObject *); diff --git a/Include/cpython/modsupport.h b/Include/cpython/modsupport.h index 376336b13dcf8a..cfc2c2cdb5a7f4 100644 --- a/Include/cpython/modsupport.h +++ b/Include/cpython/modsupport.h @@ -52,8 +52,6 @@ PyAPI_FUNC(int) _PyArg_ParseStackAndKeywords( PyObject *kwnames, struct _PyArg_Parser *, ...); -PyAPI_FUNC(int) _PyArg_VaParseTupleAndKeywordsFast(PyObject *, PyObject *, - struct _PyArg_Parser *, va_list); PyAPI_FUNC(PyObject * const *) _PyArg_UnpackKeywords( PyObject *const *args, Py_ssize_t nargs, PyObject *kwargs, PyObject *kwnames, diff --git a/Include/cpython/object.h b/Include/cpython/object.h index cd421b4f7e0d49..5f8b1f7c195501 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -227,7 +227,7 @@ struct _typeobject { vectorcallfunc tp_vectorcall; /* bitset of which type-watchers care about this type */ - char tp_watched; + unsigned char tp_watched; }; /* This struct is used by the specializer @@ -275,30 +275,17 @@ PyAPI_FUNC(const char *) _PyType_Name(PyTypeObject *); PyAPI_FUNC(PyObject *) _PyType_Lookup(PyTypeObject *, PyObject *); PyAPI_FUNC(PyObject *) _PyType_LookupId(PyTypeObject *, _Py_Identifier *); PyAPI_FUNC(PyObject *) _PyObject_LookupSpecialId(PyObject *, _Py_Identifier *); -#ifndef Py_BUILD_CORE -// Backward compatibility for 3rd-party extensions -// that may be using the old name. -#define _PyObject_LookupSpecial _PyObject_LookupSpecialId -#endif -PyAPI_FUNC(PyTypeObject *) _PyType_CalculateMetaclass(PyTypeObject *, PyObject *); -PyAPI_FUNC(PyObject *) _PyType_GetDocFromInternalDoc(const char *, const char *); -PyAPI_FUNC(PyObject *) _PyType_GetTextSignatureFromInternalDoc(const char *, const char *); PyAPI_FUNC(PyObject *) PyType_GetModuleByDef(PyTypeObject *, PyModuleDef *); PyAPI_FUNC(PyObject *) PyType_GetDict(PyTypeObject *); PyAPI_FUNC(int) PyObject_Print(PyObject *, FILE *, int); PyAPI_FUNC(void) _Py_BreakPoint(void); PyAPI_FUNC(void) _PyObject_Dump(PyObject *); -PyAPI_FUNC(int) _PyObject_IsFreed(PyObject *); -PyAPI_FUNC(int) _PyObject_IsAbstract(PyObject *); PyAPI_FUNC(PyObject *) _PyObject_GetAttrId(PyObject *, _Py_Identifier *); PyAPI_FUNC(int) _PyObject_SetAttrId(PyObject *, _Py_Identifier *, PyObject *); -PyAPI_FUNC(int) _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); - PyAPI_FUNC(PyObject **) _PyObject_GetDictPtr(PyObject *); -PyAPI_FUNC(PyObject *) _PyObject_NextNotImplemented(PyObject *); PyAPI_FUNC(void) PyObject_CallFinalizer(PyObject *); PyAPI_FUNC(int) PyObject_CallFinalizerFromDealloc(PyObject *); @@ -377,20 +364,6 @@ PyAPI_FUNC(PyObject *) _PyObject_FunctionStr(PyObject *); #endif -PyAPI_DATA(PyTypeObject) _PyNone_Type; -PyAPI_DATA(PyTypeObject) _PyNotImplemented_Type; - -/* Maps Py_LT to Py_GT, ..., Py_GE to Py_LE. - * Defined in object.c. - */ -PyAPI_DATA(int) _Py_SwappedOp[]; - -PyAPI_FUNC(void) -_PyDebugAllocatorStats(FILE *out, const char *block_name, int num_blocks, - size_t sizeof_block); -PyAPI_FUNC(void) -_PyObject_DebugTypeStats(FILE *out); - /* Define a pair of assertion macros: _PyObject_ASSERT_FROM(), _PyObject_ASSERT_WITH_MSG() and _PyObject_ASSERT(). @@ -439,21 +412,6 @@ PyAPI_FUNC(void) _Py_NO_RETURN _PyObject_AssertFailed( int line, const char *function); -/* Check if an object is consistent. For example, ensure that the reference - counter is greater than or equal to 1, and ensure that ob_type is not NULL. - - Call _PyObject_AssertFailed() if the object is inconsistent. - - If check_content is zero, only check header fields: reduce the overhead. - - The function always return 1. The return value is just here to be able to - write: - - assert(_PyObject_CheckConsistency(obj, 1)); */ -PyAPI_FUNC(int) _PyObject_CheckConsistency( - PyObject *op, - int check_content); - /* Trashcan mechanism, thanks to Christian Tismer. diff --git a/Include/cpython/optimizer.h b/Include/cpython/optimizer.h index 2260501bfd608e..da34ec1882a539 100644 --- a/Include/cpython/optimizer.h +++ b/Include/cpython/optimizer.h @@ -12,7 +12,7 @@ typedef struct { } _PyVMData; typedef struct _PyExecutorObject { - PyObject_HEAD + PyObject_VAR_HEAD /* WARNING: execute consumes a reference to self. This is necessary to allow executors to tail call into each other. */ struct _PyInterpreterFrame *(*execute)(struct _PyExecutorObject *self, struct _PyInterpreterFrame *frame, PyObject **stack_pointer); _PyVMData vm_data; /* Used by the VM, but opaque to the optimizer */ diff --git a/Include/cpython/pyframe.h b/Include/cpython/pyframe.h index 6ec292718aff1a..0e2afff925e31f 100644 --- a/Include/cpython/pyframe.h +++ b/Include/cpython/pyframe.h @@ -16,3 +16,20 @@ PyAPI_FUNC(PyObject *) PyFrame_GetGenerator(PyFrameObject *frame); PyAPI_FUNC(int) PyFrame_GetLasti(PyFrameObject *frame); PyAPI_FUNC(PyObject*) PyFrame_GetVar(PyFrameObject *frame, PyObject *name); PyAPI_FUNC(PyObject*) PyFrame_GetVarString(PyFrameObject *frame, const char *name); + +/* The following functions are for use by debuggers and other tools + * implementing custom frame evaluators with PEP 523. */ + +struct _PyInterpreterFrame; + +/* Returns the code object of the frame (strong reference). + * Does not raise an exception. */ +PyAPI_FUNC(PyObject *) PyUnstable_InterpreterFrame_GetCode(struct _PyInterpreterFrame *frame); + +/* Returns a byte ofsset into the last executed instruction. + * Does not raise an exception. */ +PyAPI_FUNC(int) PyUnstable_InterpreterFrame_GetLasti(struct _PyInterpreterFrame *frame); + +/* Returns the currently executing line number, or -1 if there is no line number. + * Does not raise an exception. */ +PyAPI_FUNC(int) PyUnstable_InterpreterFrame_GetLine(struct _PyInterpreterFrame *frame); diff --git a/Include/cpython/pylifecycle.h b/Include/cpython/pylifecycle.h index 8af34b05642512..d425a233f71000 100644 --- a/Include/cpython/pylifecycle.h +++ b/Include/cpython/pylifecycle.h @@ -35,6 +35,49 @@ PyAPI_FUNC(void) _Py_NO_RETURN Py_ExitStatusException(PyStatus err); PyAPI_FUNC(int) Py_FdIsInteractive(FILE *, const char *); +/* --- PyInterpreterConfig ------------------------------------ */ + +#define PyInterpreterConfig_DEFAULT_GIL (0) +#define PyInterpreterConfig_SHARED_GIL (1) +#define PyInterpreterConfig_OWN_GIL (2) + +typedef struct { + // XXX "allow_object_sharing"? "own_objects"? + int use_main_obmalloc; + int allow_fork; + int allow_exec; + int allow_threads; + int allow_daemon_threads; + int check_multi_interp_extensions; + int gil; +} PyInterpreterConfig; + +#define _PyInterpreterConfig_INIT \ + { \ + .use_main_obmalloc = 0, \ + .allow_fork = 0, \ + .allow_exec = 0, \ + .allow_threads = 1, \ + .allow_daemon_threads = 0, \ + .check_multi_interp_extensions = 1, \ + .gil = PyInterpreterConfig_OWN_GIL, \ + } + +#define _PyInterpreterConfig_LEGACY_INIT \ + { \ + .use_main_obmalloc = 1, \ + .allow_fork = 1, \ + .allow_exec = 1, \ + .allow_threads = 1, \ + .allow_daemon_threads = 1, \ + .check_multi_interp_extensions = 0, \ + .gil = PyInterpreterConfig_SHARED_GIL, \ + } + PyAPI_FUNC(PyStatus) Py_NewInterpreterFromConfig( PyThreadState **tstate_p, const PyInterpreterConfig *config); + +typedef void (*atexit_datacallbackfunc)(void *); +PyAPI_FUNC(int) PyUnstable_AtExit( + PyInterpreterState *, atexit_datacallbackfunc, void *); diff --git a/Include/cpython/pymem.h b/Include/cpython/pymem.h index d1054d76520b9a..b75f1c4d2425dd 100644 --- a/Include/cpython/pymem.h +++ b/Include/cpython/pymem.h @@ -7,18 +7,6 @@ PyAPI_FUNC(void *) PyMem_RawCalloc(size_t nelem, size_t elsize); PyAPI_FUNC(void *) PyMem_RawRealloc(void *ptr, size_t new_size); PyAPI_FUNC(void) PyMem_RawFree(void *ptr); -/* Try to get the allocators name set by _PyMem_SetupAllocators(). */ -PyAPI_FUNC(const char*) _PyMem_GetCurrentAllocatorName(void); - -/* strdup() using PyMem_RawMalloc() */ -PyAPI_FUNC(char *) _PyMem_RawStrdup(const char *str); - -/* strdup() using PyMem_Malloc() */ -PyAPI_FUNC(char *) _PyMem_Strdup(const char *str); - -/* wcsdup() using PyMem_RawMalloc() */ -PyAPI_FUNC(wchar_t*) _PyMem_RawWcsdup(const wchar_t *str); - typedef enum { /* PyMem_RawMalloc(), PyMem_RawRealloc() and PyMem_RawFree() */ diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index 4254110889fc6c..56e473cc5e42d5 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -8,6 +8,7 @@ PyAPI_FUNC(int) _PyInterpreterState_RequiresIDRef(PyInterpreterState *); PyAPI_FUNC(void) _PyInterpreterState_RequireIDRef(PyInterpreterState *, int); +PyAPI_FUNC(PyObject *) PyUnstable_InterpreterState_GetMainModule(PyInterpreterState *); /* State unique per thread */ @@ -221,7 +222,8 @@ struct _ts { # ifdef __wasi__ # define C_RECURSION_LIMIT 500 # else -# define C_RECURSION_LIMIT 800 + // This value is duplicated in Lib/test/support/__init__.py +# define C_RECURSION_LIMIT 1500 # endif #endif diff --git a/Include/cpython/pythonrun.h b/Include/cpython/pythonrun.h index fb617655374026..3b2537e01b83b1 100644 --- a/Include/cpython/pythonrun.h +++ b/Include/cpython/pythonrun.h @@ -117,5 +117,4 @@ PyAPI_FUNC(PyObject *) PyRun_FileFlags(FILE *fp, const char *p, int s, PyObject /* Stuff with no proper home (yet) */ PyAPI_FUNC(char *) PyOS_Readline(FILE *, FILE *, const char *); -PyAPI_DATA(PyThreadState*) _PyOS_ReadlineTState; PyAPI_DATA(char) *(*PyOS_ReadlineFunctionPointer)(FILE *, FILE *, const char *); diff --git a/Include/cpython/setobject.h b/Include/cpython/setobject.h index 20fd63eaae56e2..1778c778a05324 100644 --- a/Include/cpython/setobject.h +++ b/Include/cpython/setobject.h @@ -65,8 +65,3 @@ static inline Py_ssize_t PySet_GET_SIZE(PyObject *so) { return _PySet_CAST(so)->used; } #define PySet_GET_SIZE(so) PySet_GET_SIZE(_PyObject_CAST(so)) - -PyAPI_DATA(PyObject *) _PySet_Dummy; - -PyAPI_FUNC(int) _PySet_NextEntry(PyObject *set, Py_ssize_t *pos, PyObject **key, Py_hash_t *hash); -PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable); diff --git a/Include/cpython/tupleobject.h b/Include/cpython/tupleobject.h index 370da1612a61ed..e530c8beda44ab 100644 --- a/Include/cpython/tupleobject.h +++ b/Include/cpython/tupleobject.h @@ -11,7 +11,6 @@ typedef struct { } PyTupleObject; PyAPI_FUNC(int) _PyTuple_Resize(PyObject **, Py_ssize_t); -PyAPI_FUNC(void) _PyTuple_MaybeUntrack(PyObject *); /* Cast argument to PyTupleObject* type. */ #define _PyTuple_CAST(op) \ @@ -37,5 +36,3 @@ PyTuple_SET_ITEM(PyObject *op, Py_ssize_t index, PyObject *value) { } #define PyTuple_SET_ITEM(op, index, value) \ PyTuple_SET_ITEM(_PyObject_CAST(op), (index), _PyObject_CAST(value)) - -PyAPI_FUNC(void) _PyTuple_DebugMallocStats(FILE *out); diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index e75b5e154943dc..859ab7178e920a 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -140,9 +140,11 @@ typedef struct { and the kind is PyUnicode_1BYTE_KIND. If ascii is set and compact is set, use the PyASCIIObject structure. */ unsigned int ascii:1; + /* The object is statically allocated. */ + unsigned int statically_allocated:1; /* Padding to ensure that PyUnicode_DATA() is always aligned to 4 bytes (see issue #19537 on m68k). */ - unsigned int :25; + unsigned int :24; } state; } PyASCIIObject; @@ -446,17 +448,12 @@ PyAPI_FUNC(PyObject*) PyUnicode_FromKindAndData( Like PyUnicode_AsUTF8AndSize(), this also caches the UTF-8 representation in the unicodeobject. - _PyUnicode_AsString is a #define for PyUnicode_AsUTF8 to - support the previous internal function with the same behaviour. - Use of this API is DEPRECATED since no size information can be extracted from the returned data. */ PyAPI_FUNC(const char *) PyUnicode_AsUTF8(PyObject *unicode); -#define _PyUnicode_AsString PyUnicode_AsUTF8 - /* === Characters Type APIs =============================================== */ /* These should not be used directly. Use the Py_UNICODE_IS* and @@ -478,14 +475,6 @@ PyAPI_FUNC(int) _PyUnicode_IsTitlecase( Py_UCS4 ch /* Unicode character */ ); -PyAPI_FUNC(int) _PyUnicode_IsXidStart( - Py_UCS4 ch /* Unicode character */ - ); - -PyAPI_FUNC(int) _PyUnicode_IsXidContinue( - Py_UCS4 ch /* Unicode character */ - ); - PyAPI_FUNC(int) _PyUnicode_IsWhitespace( const Py_UCS4 ch /* Unicode character */ ); @@ -506,34 +495,6 @@ PyAPI_FUNC(Py_UCS4) _PyUnicode_ToTitlecase( Py_UCS4 ch /* Unicode character */ ); -PyAPI_FUNC(int) _PyUnicode_ToLowerFull( - Py_UCS4 ch, /* Unicode character */ - Py_UCS4 *res - ); - -PyAPI_FUNC(int) _PyUnicode_ToTitleFull( - Py_UCS4 ch, /* Unicode character */ - Py_UCS4 *res - ); - -PyAPI_FUNC(int) _PyUnicode_ToUpperFull( - Py_UCS4 ch, /* Unicode character */ - Py_UCS4 *res - ); - -PyAPI_FUNC(int) _PyUnicode_ToFoldedFull( - Py_UCS4 ch, /* Unicode character */ - Py_UCS4 *res - ); - -PyAPI_FUNC(int) _PyUnicode_IsCaseIgnorable( - Py_UCS4 ch /* Unicode character */ - ); - -PyAPI_FUNC(int) _PyUnicode_IsCased( - Py_UCS4 ch /* Unicode character */ - ); - PyAPI_FUNC(int) _PyUnicode_ToDecimalDigit( Py_UCS4 ch /* Unicode character */ ); diff --git a/Include/dictobject.h b/Include/dictobject.h index e7fcb44d0cf9a9..1bbeec1ab699e7 100644 --- a/Include/dictobject.h +++ b/Include/dictobject.h @@ -57,6 +57,17 @@ PyAPI_FUNC(int) PyDict_MergeFromSeq2(PyObject *d, PyAPI_FUNC(PyObject *) PyDict_GetItemString(PyObject *dp, const char *key); PyAPI_FUNC(int) PyDict_SetItemString(PyObject *dp, const char *key, PyObject *item); PyAPI_FUNC(int) PyDict_DelItemString(PyObject *dp, const char *key); + +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030D0000 +// Return the object from dictionary *op* which has a key *key*. +// - If the key is present, set *result to a new strong reference to the value +// and return 1. +// - If the key is missing, set *result to NULL and return 0 . +// - On error, raise an exception and return -1. +PyAPI_FUNC(int) PyDict_GetItemRef(PyObject *mp, PyObject *key, PyObject **result); +PyAPI_FUNC(int) PyDict_GetItemStringRef(PyObject *mp, const char *key, PyObject **result); +#endif + #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030A0000 PyAPI_FUNC(PyObject *) PyObject_GenericGetDict(PyObject *, void *); #endif diff --git a/Include/errcode.h b/Include/errcode.h index 54ae929bf25870..8d44e9ae559193 100644 --- a/Include/errcode.h +++ b/Include/errcode.h @@ -1,18 +1,24 @@ +// Error codes passed around between file input, tokenizer, parser and +// interpreter. This is necessary so we can turn them into Python +// exceptions at a higher level. Note that some errors have a +// slightly different meaning when passed from the tokenizer to the +// parser than when passed from the parser to the interpreter; e.g. +// the parser only returns E_EOF when it hits EOF immediately, and it +// never returns E_OK. +// +// The public PyRun_InteractiveOneObjectEx() function can return E_EOF, +// same as its variants: +// +// * PyRun_InteractiveOneObject() +// * PyRun_InteractiveOneFlags() +// * PyRun_InteractiveOne() + #ifndef Py_ERRCODE_H #define Py_ERRCODE_H #ifdef __cplusplus extern "C" { #endif - -/* Error codes passed around between file input, tokenizer, parser and - interpreter. This is necessary so we can turn them into Python - exceptions at a higher level. Note that some errors have a - slightly different meaning when passed from the tokenizer to the - parser than when passed from the parser to the interpreter; e.g. - the parser only returns E_EOF when it hits EOF immediately, and it - never returns E_OK. */ - #define E_OK 10 /* No error */ #define E_EOF 11 /* End Of File */ #define E_INTR 12 /* Interrupted */ diff --git a/Include/fileobject.h b/Include/fileobject.h index 2deef544d667a5..6a6d11409497fa 100644 --- a/Include/fileobject.h +++ b/Include/fileobject.h @@ -29,14 +29,6 @@ Py_DEPRECATED(3.12) PyAPI_DATA(int) Py_HasFileSystemDefaultEncoding; Py_DEPRECATED(3.12) PyAPI_DATA(int) Py_UTF8Mode; #endif -/* A routine to check if a file descriptor can be select()-ed. */ -#ifdef _MSC_VER - /* On Windows, any socket fd can be select()-ed, no matter how high */ - #define _PyIsSelectable_fd(FD) (1) -#else - #define _PyIsSelectable_fd(FD) ((unsigned int)(FD) < (unsigned int)FD_SETSIZE) -#endif - #ifndef Py_LIMITED_API # define Py_CPYTHON_FILEOBJECT_H # include "cpython/fileobject.h" diff --git a/Include/internal/pycore_atexit.h b/Include/internal/pycore_atexit.h index fc5cb6d8826435..3966df70e2616f 100644 --- a/Include/internal/pycore_atexit.h +++ b/Include/internal/pycore_atexit.h @@ -51,6 +51,7 @@ struct atexit_state { int callback_len; }; +// Export for '_xxinterpchannels' shared extension PyAPI_FUNC(int) _Py_AtExit( PyInterpreterState *interp, atexit_datacallbackfunc func, diff --git a/Include/internal/pycore_bytesobject.h b/Include/internal/pycore_bytesobject.h index 115c0c52c8f9a9..980065a65f399e 100644 --- a/Include/internal/pycore_bytesobject.h +++ b/Include/internal/pycore_bytesobject.h @@ -8,6 +8,25 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +extern PyObject* _PyBytes_FormatEx( + const char *format, + Py_ssize_t format_len, + PyObject *args, + int use_bytearray); + +extern PyObject* _PyBytes_FromHex( + PyObject *string, + int use_bytearray); + +// Helper for PyBytes_DecodeEscape that detects invalid escape chars. +// Export for test_peg_generator. +PyAPI_FUNC(PyObject*) _PyBytes_DecodeEscape(const char *, Py_ssize_t, + const char *, const char **); + +/* _PyBytes_Join(sep, x) is like sep.join(x). sep must be PyBytesObject*, + x must be an iterable object. */ +extern PyObject* _PyBytes_Join(PyObject *sep, PyObject *x); + /* Substring Search. diff --git a/Include/internal/pycore_call.h b/Include/internal/pycore_call.h index 9c32035d474b3c..c0d61785802f64 100644 --- a/Include/internal/pycore_call.h +++ b/Include/internal/pycore_call.h @@ -22,8 +22,8 @@ extern "C" { #define _PY_FASTCALL_SMALL_STACK 5 -// Export for shared stdlib extensions like the math extension, -// function used via inlined _PyObject_VectorcallTstate() function. +// Export for 'math' shared extension, function used +// via inlined _PyObject_VectorcallTstate() function. PyAPI_FUNC(PyObject*) _Py_CheckFunctionResult( PyThreadState *tstate, PyObject *callable, @@ -68,7 +68,7 @@ extern PyObject * _PyObject_CallMethodFormat( const char *format, ...); -// Export for shared stdlib extensions like the array extension +// Export for 'array' shared extension PyAPI_FUNC(PyObject*) _PyObject_CallMethod( PyObject *obj, PyObject *name, @@ -120,8 +120,8 @@ _PyObject_CallMethodIdOneArg(PyObject *self, _Py_Identifier *name, PyObject *arg // Call callable using tp_call. Arguments are like PyObject_Vectorcall(), // except that nargs is plainly the number of arguments without flags. // -// Export for shared stdlib extensions like the math extension, -// function used via inlined _PyObject_VectorcallTstate() function. +// Export for 'math' shared extension, function used +// via inlined _PyObject_VectorcallTstate() function. PyAPI_FUNC(PyObject*) _PyObject_MakeTpCall( PyThreadState *tstate, PyObject *callable, diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index e729904ff2c4cc..05b7380597812b 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -23,13 +23,14 @@ struct _ceval_runtime_state; extern void _Py_FinishPendingCalls(PyThreadState *tstate); extern void _PyEval_InitState(PyInterpreterState *, PyThread_type_lock); extern void _PyEval_FiniState(struct _ceval_state *ceval); -PyAPI_FUNC(void) _PyEval_SignalReceived(PyInterpreterState *interp); +extern void _PyEval_SignalReceived(PyInterpreterState *interp); +// Export for '_testinternalcapi' shared extension PyAPI_FUNC(int) _PyEval_AddPendingCall( PyInterpreterState *interp, int (*func)(void *), void *arg, int mainthreadonly); -PyAPI_FUNC(void) _PyEval_SignalAsyncExc(PyInterpreterState *interp); +extern void _PyEval_SignalAsyncExc(PyInterpreterState *interp); #ifdef HAVE_FORK extern PyStatus _PyEval_ReInitThreads(PyThreadState *tstate); #endif @@ -122,6 +123,7 @@ static inline int _Py_MakeRecCheck(PyThreadState *tstate) { } #endif +// Export for _Py_EnterRecursiveCall() PyAPI_FUNC(int) _Py_CheckRecursiveCall( PyThreadState *tstate, const char *where); @@ -158,6 +160,18 @@ extern int _Py_HandlePending(PyThreadState *tstate); extern PyObject * _PyEval_GetFrameLocals(void); +extern const binaryfunc _PyEval_BinaryOps[]; +int _PyEval_CheckExceptStarTypeValid(PyThreadState *tstate, PyObject* right); +int _PyEval_CheckExceptTypeValid(PyThreadState *tstate, PyObject* right); +int _PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type, PyObject **match, PyObject **rest); +void _PyEval_FormatAwaitableError(PyThreadState *tstate, PyTypeObject *type, int oparg); +void _PyEval_FormatExcCheckArg(PyThreadState *tstate, PyObject *exc, const char *format_str, PyObject *obj); +void _PyEval_FormatExcUnbound(PyThreadState *tstate, PyCodeObject *co, int oparg); +void _PyEval_FormatKwargsError(PyThreadState *tstate, PyObject *func, PyObject *kwargs); +PyObject *_PyEval_MatchClass(PyThreadState *tstate, PyObject *subject, PyObject *type, Py_ssize_t nargs, PyObject *kwargs); +PyObject *_PyEval_MatchKeys(PyThreadState *tstate, PyObject *map, PyObject *keys); +int _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v, int argcnt, int argcntafter, PyObject **sp); + #ifdef __cplusplus } diff --git a/Include/internal/pycore_ceval_state.h b/Include/internal/pycore_ceval_state.h index e56e43c6e0c6a7..1ebfcc9bebd0ab 100644 --- a/Include/internal/pycore_ceval_state.h +++ b/Include/internal/pycore_ceval_state.h @@ -84,7 +84,9 @@ struct _ceval_runtime_state { struct _ceval_state { /* This single variable consolidates all requests to break out of - the fast path in the eval loop. */ + * the fast path in the eval loop. + * It is by far the hottest field in this struct and + * should be placed at the beginning. */ _Py_atomic_int eval_breaker; /* Request for dropping the GIL */ _Py_atomic_int gil_drop_request; diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h index b6b1aeca6e5c5f..00099376635e9b 100644 --- a/Include/internal/pycore_code.h +++ b/Include/internal/pycore_code.h @@ -203,8 +203,8 @@ struct _PyCodeConstructor { // back to a regular function signature. Regardless, this approach // wouldn't be appropriate if this weren't a strictly internal API. // (See the comments in https://github.com/python/cpython/pull/26258.) -PyAPI_FUNC(int) _PyCode_Validate(struct _PyCodeConstructor *); -PyAPI_FUNC(PyCodeObject *) _PyCode_New(struct _PyCodeConstructor *); +extern int _PyCode_Validate(struct _PyCodeConstructor *); +extern PyCodeObject* _PyCode_New(struct _PyCodeConstructor *); /* Private API */ @@ -229,6 +229,8 @@ extern void _PyLineTable_InitAddressRange( extern int _PyLineTable_NextAddressRange(PyCodeAddressRange *range); extern int _PyLineTable_PreviousAddressRange(PyCodeAddressRange *range); +#define ENABLE_SPECIALIZATION 1 + /* Specialization functions */ extern void _Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls, @@ -262,7 +264,6 @@ extern int _PyStaticCode_Init(PyCodeObject *co); #ifdef Py_STATS - #define STAT_INC(opname, name) do { if (_py_stats) _py_stats->opcode_stats[opname].specialization.name++; } while (0) #define STAT_DEC(opname, name) do { if (_py_stats) _py_stats->opcode_stats[opname].specialization.name--; } while (0) #define OPCODE_EXE_INC(opname) do { if (_py_stats) _py_stats->opcode_stats[opname].execution_count++; } while (0) @@ -273,8 +274,9 @@ extern int _PyStaticCode_Init(PyCodeObject *co); #define EVAL_CALL_STAT_INC(name) do { if (_py_stats) _py_stats->call_stats.eval_calls[name]++; } while (0) #define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) \ do { if (_py_stats && PyFunction_Check(callable)) _py_stats->call_stats.eval_calls[name]++; } while (0) +#define GC_STAT_ADD(gen, name, n) do { if (_py_stats) _py_stats->gc_stats[(gen)].name += (n); } while (0) -// Used by the _opcode extension which is built as a shared library +// Export for '_opcode' shared extension PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void); #else @@ -286,6 +288,7 @@ PyAPI_FUNC(PyObject*) _Py_GetSpecializationStats(void); #define OBJECT_STAT_INC_COND(name, cond) ((void)0) #define EVAL_CALL_STAT_INC(name) ((void)0) #define EVAL_CALL_STAT_INC_IF_FUNCTION(name, callable) ((void)0) +#define GC_STAT_ADD(gen, name, n) ((void)0) #endif // !Py_STATS // Utility functions for reading/writing 32/64-bit values in the inline caches. diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h index beb37cced06dba..ad657c0f0fcedc 100644 --- a/Include/internal/pycore_compile.h +++ b/Include/internal/pycore_compile.h @@ -11,7 +11,7 @@ extern "C" { struct _arena; // Type defined in pycore_pyarena.h struct _mod; // Type defined in pycore_ast.h -// Export the symbol for test_peg_generator (built as a library) +// Export for 'test_peg_generator' shared extension PyAPI_FUNC(PyCodeObject*) _PyAST_Compile( struct _mod *mod, PyObject *filename, @@ -54,6 +54,11 @@ typedef struct { int s_next_free_label; /* next free label id */ } _PyCompile_InstructionSequence; +int _PyCompile_InstructionSequence_UseLabel(_PyCompile_InstructionSequence *seq, int lbl); +int _PyCompile_InstructionSequence_Addop(_PyCompile_InstructionSequence *seq, + int opcode, int oparg, + _PyCompilerSrcLocation loc); + typedef struct { PyObject *u_name; PyObject *u_qualname; /* dot-separated qualified name (lazy) */ @@ -91,6 +96,7 @@ int _PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj); /* Access compiler internals for unit testing */ +// Export for '_testinternalcapi' shared extension PyAPI_FUNC(PyObject*) _PyCompile_CleanDoc(PyObject *doc); PyAPI_FUNC(PyObject*) _PyCompile_CodeGen( diff --git a/Include/internal/pycore_descrobject.h b/Include/internal/pycore_descrobject.h index 76378569df90e3..3cec59a68a3d2b 100644 --- a/Include/internal/pycore_descrobject.h +++ b/Include/internal/pycore_descrobject.h @@ -20,6 +20,8 @@ typedef struct { typedef propertyobject _PyPropertyObject; +extern PyTypeObject _PyMethodWrapper_Type; + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index 6253e0841ad349..2ad6ef0f7c04d5 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -10,8 +10,29 @@ extern "C" { #endif #include "pycore_dict_state.h" +#include "pycore_object.h" #include "pycore_runtime.h" // _PyRuntime +// Unsafe flavor of PyDict_GetItemWithError(): no error checking +extern PyObject* _PyDict_GetItemWithError(PyObject *dp, PyObject *key); + +extern int _PyDict_Contains_KnownHash(PyObject *, PyObject *, Py_hash_t); + +extern int _PyDict_DelItemIf(PyObject *mp, PyObject *key, + int (*predicate)(PyObject *value)); + +extern int _PyDict_HasOnlyStringKeys(PyObject *mp); + +extern void _PyDict_MaybeUntrack(PyObject *mp); + +/* Like PyDict_Merge, but override can be 0, 1 or 2. If override is 0, + the first occurrence of a key wins, if override is 1, the last occurrence + of a key wins, if override is 2, a KeyError with conflicting key as + argument is raised. +*/ +extern int _PyDict_MergeEx(PyObject *mp, PyObject *other, int override); + +extern void _PyDict_DebugMallocStats(FILE *out); /* runtime lifecycle */ @@ -42,6 +63,8 @@ extern uint32_t _PyDictKeys_GetVersionForCurrentState( extern size_t _PyDict_KeysSize(PyDictKeysObject *keys); +extern void _PyDictKeys_DecRef(PyDictKeysObject *keys); + /* _Py_dict_lookup() returns index of entry which can be used like DK_ENTRIES(dk)[index]. * -1 when no entry found, -3 when compare raises error. */ @@ -176,6 +199,7 @@ _PyDict_NotifyEvent(PyInterpreterState *interp, } extern PyObject *_PyObject_MakeDictFromInstanceAttributes(PyObject *obj, PyDictValues *values); +extern bool _PyObject_MakeInstanceAttributesFromDict(PyObject *obj, PyDictOrValues *dorv); extern PyObject *_PyDict_FromItems( PyObject *const *keys, Py_ssize_t keys_offset, PyObject *const *values, Py_ssize_t values_offset, diff --git a/Include/internal/pycore_dtoa.h b/Include/internal/pycore_dtoa.h index 4d9681d59a64f7..ac62a4d300720a 100644 --- a/Include/internal/pycore_dtoa.h +++ b/Include/internal/pycore_dtoa.h @@ -60,10 +60,10 @@ struct _dtoa_state { /* These functions are used by modules compiled as C extension like math: they must be exported. */ -PyAPI_FUNC(double) _Py_dg_strtod(const char *str, char **ptr); -PyAPI_FUNC(char *) _Py_dg_dtoa(double d, int mode, int ndigits, - int *decpt, int *sign, char **rve); -PyAPI_FUNC(void) _Py_dg_freedtoa(char *s); +extern double _Py_dg_strtod(const char *str, char **ptr); +extern char* _Py_dg_dtoa(double d, int mode, int ndigits, + int *decpt, int *sign, char **rve); +extern void _Py_dg_freedtoa(char *s); #endif // _PY_SHORT_FLOAT_REPR == 1 diff --git a/Include/internal/pycore_fileutils.h b/Include/internal/pycore_fileutils.h index ef6642d00f1b54..daa32c0dff6097 100644 --- a/Include/internal/pycore_fileutils.h +++ b/Include/internal/pycore_fileutils.h @@ -10,6 +10,13 @@ extern "C" { #include /* struct lconv */ +/* A routine to check if a file descriptor can be select()-ed. */ +#ifdef _MSC_VER + /* On Windows, any socket fd can be select()-ed, no matter how high */ + #define _PyIsSelectable_fd(FD) (1) +#else + #define _PyIsSelectable_fd(FD) ((unsigned int)(FD) < (unsigned int)FD_SETSIZE) +#endif struct _fileutils_state { int force_ascii; @@ -45,11 +52,11 @@ PyAPI_FUNC(int) _Py_EncodeLocaleEx( int current_locale, _Py_error_handler errors); -PyAPI_FUNC(char*) _Py_EncodeLocaleRaw( +extern char* _Py_EncodeLocaleRaw( const wchar_t *text, size_t *error_pos); -PyAPI_FUNC(PyObject *) _Py_device_encoding(int); +extern PyObject* _Py_device_encoding(int); #if defined(MS_WINDOWS) || defined(__APPLE__) /* On Windows, the count parameter of read() is an int (bpo-9015, bpo-9611). @@ -102,7 +109,7 @@ PyAPI_FUNC(int) _Py_stat( PyObject *path, struct stat *status); -PyAPI_FUNC(int) _Py_open( +extern int _Py_open( const char *pathname, int flags); @@ -110,16 +117,16 @@ PyAPI_FUNC(int) _Py_open_noraise( const char *pathname, int flags); -PyAPI_FUNC(FILE *) _Py_wfopen( +extern FILE* _Py_wfopen( const wchar_t *path, const wchar_t *mode); -PyAPI_FUNC(Py_ssize_t) _Py_read( +extern Py_ssize_t _Py_read( int fd, void *buf, size_t count); -PyAPI_FUNC(Py_ssize_t) _Py_write( +extern Py_ssize_t _Py_write( int fd, const void *buf, size_t count); @@ -130,7 +137,7 @@ PyAPI_FUNC(Py_ssize_t) _Py_write_noraise( size_t count); #ifdef HAVE_READLINK -PyAPI_FUNC(int) _Py_wreadlink( +extern int _Py_wreadlink( const wchar_t *path, wchar_t *buf, /* Number of characters of 'buf' buffer @@ -139,7 +146,7 @@ PyAPI_FUNC(int) _Py_wreadlink( #endif #ifdef HAVE_REALPATH -PyAPI_FUNC(wchar_t*) _Py_wrealpath( +extern wchar_t* _Py_wrealpath( const wchar_t *path, wchar_t *resolved_path, /* Number of characters of 'resolved_path' buffer @@ -147,13 +154,13 @@ PyAPI_FUNC(wchar_t*) _Py_wrealpath( size_t resolved_path_len); #endif -PyAPI_FUNC(wchar_t*) _Py_wgetcwd( +extern wchar_t* _Py_wgetcwd( wchar_t *buf, /* Number of characters of 'buf' buffer including the trailing NUL character */ size_t buflen); -PyAPI_FUNC(int) _Py_get_inheritable(int fd); +extern int _Py_get_inheritable(int fd); PyAPI_FUNC(int) _Py_set_inheritable(int fd, int inheritable, int *atomic_flag_works); @@ -163,18 +170,18 @@ PyAPI_FUNC(int) _Py_set_inheritable_async_safe(int fd, int inheritable, PyAPI_FUNC(int) _Py_dup(int fd); -PyAPI_FUNC(int) _Py_get_blocking(int fd); +extern int _Py_get_blocking(int fd); -PyAPI_FUNC(int) _Py_set_blocking(int fd, int blocking); +extern int _Py_set_blocking(int fd, int blocking); #ifdef MS_WINDOWS -PyAPI_FUNC(void*) _Py_get_osfhandle_noraise(int fd); +extern void* _Py_get_osfhandle_noraise(int fd); PyAPI_FUNC(void*) _Py_get_osfhandle(int fd); -PyAPI_FUNC(int) _Py_open_osfhandle_noraise(void *handle, int flags); +extern int _Py_open_osfhandle_noraise(void *handle, int flags); -PyAPI_FUNC(int) _Py_open_osfhandle(void *handle, int flags); +extern int _Py_open_osfhandle(void *handle, int flags); #endif /* MS_WINDOWS */ // This is used after getting NULL back from Py_DecodeLocale(). @@ -183,9 +190,9 @@ PyAPI_FUNC(int) _Py_open_osfhandle(void *handle, int flags); ? _PyStatus_ERR("cannot decode " NAME) \ : _PyStatus_NO_MEMORY() -PyAPI_DATA(int) _Py_HasFileSystemDefaultEncodeErrors; +extern int _Py_HasFileSystemDefaultEncodeErrors; -PyAPI_FUNC(int) _Py_DecodeUTF8Ex( +extern int _Py_DecodeUTF8Ex( const char *arg, Py_ssize_t arglen, wchar_t **wstr, @@ -193,7 +200,7 @@ PyAPI_FUNC(int) _Py_DecodeUTF8Ex( const char **reason, _Py_error_handler errors); -PyAPI_FUNC(int) _Py_EncodeUTF8Ex( +extern int _Py_EncodeUTF8Ex( const wchar_t *text, char **str, size_t *error_pos, @@ -201,7 +208,7 @@ PyAPI_FUNC(int) _Py_EncodeUTF8Ex( int raw_malloc, _Py_error_handler errors); -PyAPI_FUNC(wchar_t*) _Py_DecodeUTF8_surrogateescape( +extern wchar_t* _Py_DecodeUTF8_surrogateescape( const char *arg, Py_ssize_t arglen, size_t *wlen); @@ -209,25 +216,25 @@ PyAPI_FUNC(wchar_t*) _Py_DecodeUTF8_surrogateescape( extern int _Py_wstat(const wchar_t *, struct stat *); -PyAPI_FUNC(int) _Py_GetForceASCII(void); +extern int _Py_GetForceASCII(void); /* Reset "force ASCII" mode (if it was initialized). This function should be called when Python changes the LC_CTYPE locale, so the "force ASCII" mode can be detected again on the new locale encoding. */ -PyAPI_FUNC(void) _Py_ResetForceASCII(void); +extern void _Py_ResetForceASCII(void); -PyAPI_FUNC(int) _Py_GetLocaleconvNumeric( +extern int _Py_GetLocaleconvNumeric( struct lconv *lc, PyObject **decimal_point, PyObject **thousands_sep); PyAPI_FUNC(void) _Py_closerange(int first, int last); -PyAPI_FUNC(wchar_t*) _Py_GetLocaleEncoding(void); -PyAPI_FUNC(PyObject*) _Py_GetLocaleEncodingObject(void); +extern wchar_t* _Py_GetLocaleEncoding(void); +extern PyObject* _Py_GetLocaleEncodingObject(void); #ifdef HAVE_NON_UNICODE_WCHAR_T_REPRESENTATION extern int _Py_LocaleUsesNonUnicodeWchar(void); @@ -246,13 +253,13 @@ extern int _Py_abspath(const wchar_t *path, wchar_t **abspath_p); #ifdef MS_WINDOWS extern int _PyOS_getfullpathname(const wchar_t *path, wchar_t **abspath_p); #endif -extern wchar_t * _Py_join_relfile(const wchar_t *dirname, - const wchar_t *relfile); +extern wchar_t* _Py_join_relfile(const wchar_t *dirname, + const wchar_t *relfile); extern int _Py_add_relfile(wchar_t *dirname, const wchar_t *relfile, size_t bufsize); extern size_t _Py_find_basename(const wchar_t *filename); -PyAPI_FUNC(wchar_t *) _Py_normpath(wchar_t *path, Py_ssize_t size); +PyAPI_FUNC(wchar_t*) _Py_normpath(wchar_t *path, Py_ssize_t size); // The Windows Games API family does not provide these functions // so provide our own implementations. Remove them in case they get added diff --git a/Include/internal/pycore_floatobject.h b/Include/internal/pycore_floatobject.h index 6abba04033d281..6b9af03c25ec36 100644 --- a/Include/internal/pycore_floatobject.h +++ b/Include/internal/pycore_floatobject.h @@ -55,12 +55,12 @@ struct _Py_float_state { void _PyFloat_ExactDealloc(PyObject *op); -PyAPI_FUNC(void) _PyFloat_DebugMallocStats(FILE* out); +extern void _PyFloat_DebugMallocStats(FILE* out); /* Format the object based on the format_spec, as defined in PEP 3101 (Advanced String Formatting). */ -PyAPI_FUNC(int) _PyFloat_FormatAdvancedWriter( +extern int _PyFloat_FormatAdvancedWriter( _PyUnicodeWriter *writer, PyObject *obj, PyObject *format_spec, diff --git a/Include/internal/pycore_flowgraph.h b/Include/internal/pycore_flowgraph.h index 4a01574809fff5..58fed46886ea45 100644 --- a/Include/internal/pycore_flowgraph.h +++ b/Include/internal/pycore_flowgraph.h @@ -11,101 +11,26 @@ extern "C" { #include "pycore_opcode_utils.h" #include "pycore_compile.h" - -typedef struct { - int i_opcode; - int i_oparg; - _PyCompilerSrcLocation i_loc; - struct _PyCfgBasicblock_ *i_target; /* target block (if jump instruction) */ - struct _PyCfgBasicblock_ *i_except; /* target block when exception is raised */ -} _PyCfgInstruction; - typedef struct { int id; } _PyCfgJumpTargetLabel; +struct _PyCfgBuilder; -typedef struct { - struct _PyCfgBasicblock_ *handlers[CO_MAXBLOCKS+1]; - int depth; -} _PyCfgExceptStack; - -typedef struct _PyCfgBasicblock_ { - /* Each basicblock in a compilation unit is linked via b_list in the - reverse order that the block are allocated. b_list points to the next - block in this list, not to be confused with b_next, which is next by - control flow. */ - struct _PyCfgBasicblock_ *b_list; - /* The label of this block if it is a jump target, -1 otherwise */ - _PyCfgJumpTargetLabel b_label; - /* Exception stack at start of block, used by assembler to create the exception handling table */ - _PyCfgExceptStack *b_exceptstack; - /* pointer to an array of instructions, initially NULL */ - _PyCfgInstruction *b_instr; - /* If b_next is non-NULL, it is a pointer to the next - block reached by normal control flow. */ - struct _PyCfgBasicblock_ *b_next; - /* number of instructions used */ - int b_iused; - /* length of instruction array (b_instr) */ - int b_ialloc; - /* Used by add_checks_for_loads_of_unknown_variables */ - uint64_t b_unsafe_locals_mask; - /* Number of predecessors that a block has. */ - int b_predecessors; - /* depth of stack upon entry of block, computed by stackdepth() */ - int b_startdepth; - /* Basic block is an exception handler that preserves lasti */ - unsigned b_preserve_lasti : 1; - /* Used by compiler passes to mark whether they have visited a basic block. */ - unsigned b_visited : 1; - /* b_except_handler is used by the cold-detection algorithm to mark exception targets */ - unsigned b_except_handler : 1; - /* b_cold is true if this block is not perf critical (like an exception handler) */ - unsigned b_cold : 1; - /* b_warm is used by the cold-detection algorithm to mark blocks which are definitely not cold */ - unsigned b_warm : 1; -} _PyCfgBasicblock; - -int _PyBasicblock_InsertInstruction(_PyCfgBasicblock *block, int pos, _PyCfgInstruction *instr); +int _PyCfgBuilder_UseLabel(struct _PyCfgBuilder *g, _PyCfgJumpTargetLabel lbl); +int _PyCfgBuilder_Addop(struct _PyCfgBuilder *g, int opcode, int oparg, _PyCompilerSrcLocation loc); -typedef struct cfg_builder_ { - /* The entryblock, at which control flow begins. All blocks of the - CFG are reachable through the b_next links */ - _PyCfgBasicblock *g_entryblock; - /* Pointer to the most recently allocated block. By following - b_list links, you can reach all allocated blocks. */ - _PyCfgBasicblock *g_block_list; - /* pointer to the block currently being constructed */ - _PyCfgBasicblock *g_curblock; - /* label for the next instruction to be placed */ - _PyCfgJumpTargetLabel g_current_label; -} _PyCfgBuilder; +struct _PyCfgBuilder* _PyCfgBuilder_New(void); +void _PyCfgBuilder_Free(struct _PyCfgBuilder *g); +int _PyCfgBuilder_CheckSize(struct _PyCfgBuilder* g); -int _PyCfgBuilder_UseLabel(_PyCfgBuilder *g, _PyCfgJumpTargetLabel lbl); -int _PyCfgBuilder_Addop(_PyCfgBuilder *g, int opcode, int oparg, _PyCompilerSrcLocation loc); - -int _PyCfgBuilder_Init(_PyCfgBuilder *g); -void _PyCfgBuilder_Fini(_PyCfgBuilder *g); - -_PyCfgInstruction* _PyCfg_BasicblockLastInstr(const _PyCfgBasicblock *b); -int _PyCfg_OptimizeCodeUnit(_PyCfgBuilder *g, PyObject *consts, PyObject *const_cache, - int code_flags, int nlocals, int nparams, int firstlineno); -int _PyCfg_Stackdepth(_PyCfgBasicblock *entryblock, int code_flags); -void _PyCfg_ConvertPseudoOps(_PyCfgBasicblock *entryblock); -int _PyCfg_ResolveJumps(_PyCfgBuilder *g); - - -static inline int -basicblock_nofallthrough(const _PyCfgBasicblock *b) { - _PyCfgInstruction *last = _PyCfg_BasicblockLastInstr(b); - return (last && - (IS_SCOPE_EXIT_OPCODE(last->i_opcode) || - IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode))); -} +int _PyCfg_OptimizeCodeUnit(struct _PyCfgBuilder *g, PyObject *consts, PyObject *const_cache, + int nlocals, int nparams, int firstlineno); -#define BB_NO_FALLTHROUGH(B) (basicblock_nofallthrough(B)) -#define BB_HAS_FALLTHROUGH(B) (!basicblock_nofallthrough(B)) +int _PyCfg_ToInstructionSequence(struct _PyCfgBuilder *g, _PyCompile_InstructionSequence *seq); +int _PyCfg_OptimizedCfgToInstructionSequence(struct _PyCfgBuilder *g, _PyCompile_CodeUnitMetadata *umd, + int code_flags, int *stackdepth, int *nlocalsplus, + _PyCompile_InstructionSequence *seq); PyCodeObject * _PyAssemble_MakeCodeObject(_PyCompile_CodeUnitMetadata *u, PyObject *const_cache, diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index efc19e33ec5dc5..5ff20ef845ab10 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -5,8 +5,8 @@ extern "C" { #endif #include -#include -#include "pycore_code.h" // STATS +#include // offsetof() +#include "pycore_code.h" // STATS /* See Objects/frame_layout.md for an explanation of the frame stack * including explanation of the PyFrameObject and _PyInterpreterFrame diff --git a/Include/internal/pycore_function.h b/Include/internal/pycore_function.h index ecbb7001e7d840..e844d323ec7927 100644 --- a/Include/internal/pycore_function.h +++ b/Include/internal/pycore_function.h @@ -8,6 +8,12 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +extern PyObject* _PyFunction_Vectorcall( + PyObject *func, + PyObject *const *stack, + size_t nargsf, + PyObject *kwnames); + #define FUNC_MAX_WATCHERS 8 struct _py_func_state { diff --git a/Include/internal/pycore_genobject.h b/Include/internal/pycore_genobject.h index dc60b4ca705112..96a6ec43d7a08a 100644 --- a/Include/internal/pycore_genobject.h +++ b/Include/internal/pycore_genobject.h @@ -9,9 +9,20 @@ extern "C" { #endif extern PyObject *_PyGen_yf(PyGenObject *); +extern void _PyGen_Finalize(PyObject *self); + +// Export for '_asyncio' shared extension +PyAPI_FUNC(int) _PyGen_SetStopIterationValue(PyObject *); +// Export for '_asyncio' shared extension +PyAPI_FUNC(int) _PyGen_FetchStopIterationValue(PyObject **); + extern PyObject *_PyCoro_GetAwaitableIter(PyObject *o); extern PyObject *_PyAsyncGenValueWrapperNew(PyThreadState *state, PyObject *); +extern PyTypeObject _PyCoroWrapper_Type; +extern PyTypeObject _PyAsyncGenWrappedValue_Type; +extern PyTypeObject _PyAsyncGenAThrow_Type; + /* runtime lifecycle */ extern void _PyAsyncGen_Fini(PyInterpreterState *); diff --git a/Include/internal/pycore_global_objects.h b/Include/internal/pycore_global_objects.h index 5a3fb132c745ab..442f8516278b02 100644 --- a/Include/internal/pycore_global_objects.h +++ b/Include/internal/pycore_global_objects.h @@ -8,6 +8,7 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +#include "pycore_hashtable.h" // _Py_hashtable_t #include "pycore_gc.h" // PyGC_Head #include "pycore_global_strings.h" // struct _Py_global_strings #include "pycore_hamt.h" // PyHamtNode_Bitmap @@ -28,6 +29,11 @@ extern "C" { #define _Py_SINGLETON(NAME) \ _Py_GLOBAL_OBJECT(singletons.NAME) +struct _Py_cached_objects { + // XXX We could statically allocate the hashtable. + _Py_hashtable_t *interned_strings; +}; + struct _Py_static_objects { struct { /* Small integers are preallocated in this array so that they diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h index 6d50ffd0a02f1f..ee9010583ff8b5 100644 --- a/Include/internal/pycore_global_objects_fini_generated.h +++ b/Include/internal/pycore_global_objects_fini_generated.h @@ -547,6 +547,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) { _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_lambda)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_listcomp)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_module)); + _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_null)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_setcomp)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_string)); _PyStaticObject_CheckRefcnt((PyObject *)&_Py_STR(anon_unknown)); diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h index bb1fb13f342fc6..b081c0e023fa4a 100644 --- a/Include/internal/pycore_global_strings.h +++ b/Include/internal/pycore_global_strings.h @@ -33,6 +33,7 @@ struct _Py_global_strings { STRUCT_FOR_STR(anon_lambda, "") STRUCT_FOR_STR(anon_listcomp, "") STRUCT_FOR_STR(anon_module, "") + STRUCT_FOR_STR(anon_null, "") STRUCT_FOR_STR(anon_setcomp, "") STRUCT_FOR_STR(anon_string, "") STRUCT_FOR_STR(anon_unknown, "") diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h index 6501ab14d27684..f57978a8d614fb 100644 --- a/Include/internal/pycore_hashtable.h +++ b/Include/internal/pycore_hashtable.h @@ -106,6 +106,7 @@ PyAPI_FUNC(int) _Py_hashtable_foreach( void *user_data); PyAPI_FUNC(size_t) _Py_hashtable_size(const _Py_hashtable_t *ht); +PyAPI_FUNC(size_t) _Py_hashtable_len(const _Py_hashtable_t *ht); /* Add a new entry to the hash. The key must not be present in the hash table. Return 0 on success, -1 on memory error. */ diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index c048ae88d9000c..077508e6c58f6c 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -5,13 +5,13 @@ extern "C" { #endif +#include "pycore_hashtable.h" // _Py_hashtable_t #include "pycore_time.h" // _PyTime_t extern int _PyImport_IsInitialized(PyInterpreterState *); -PyAPI_FUNC(PyObject *) _PyImport_GetModuleId(_Py_Identifier *name); PyAPI_FUNC(int) _PyImport_SetModule(PyObject *name, PyObject *module); -PyAPI_FUNC(int) _PyImport_SetModuleString(const char *name, PyObject* module); +extern int _PyImport_SetModuleString(const char *name, PyObject* module); extern void _PyImport_AcquireLock(PyInterpreterState *interp); extern int _PyImport_ReleaseLock(PyInterpreterState *interp); @@ -24,8 +24,8 @@ extern int _PyImport_FixupBuiltin( extern int _PyImport_FixupExtensionObject(PyObject*, PyObject *, PyObject *, PyObject *); -PyAPI_FUNC(PyObject *) _PyImport_GetModuleAttr(PyObject *, PyObject *); -PyAPI_FUNC(PyObject *) _PyImport_GetModuleAttrString(const char *, const char *); +PyAPI_FUNC(PyObject*) _PyImport_GetModuleAttr(PyObject *, PyObject *); +PyAPI_FUNC(PyObject*) _PyImport_GetModuleAttrString(const char *, const char *); struct _import_runtime_state { @@ -37,19 +37,15 @@ struct _import_runtime_state { See PyInterpreterState.modules_by_index for more info. */ Py_ssize_t last_module_index; struct { - /* A thread state tied to the main interpreter, - used exclusively for when the extensions dict is access/modified - from an arbitrary thread. */ - PyThreadState main_tstate; - /* A lock to guard the dict. */ + /* A lock to guard the cache. */ PyThread_type_lock mutex; - /* A dict mapping (filename, name) to PyModuleDef for modules. + /* The actual cache of (filename, name, PyModuleDef) for modules. Only legacy (single-phase init) extension modules are added and only if they support multiple initialization (m_size >- 0) or are imported in the main interpreter. This is initialized lazily in _PyImport_FixupExtensionObject(). Modules are added there and looked up in _imp.find_extension(). */ - PyObject *dict; + _Py_hashtable_t *hashtable; } extensions; /* Package context -- the full module name for package imports */ const char * pkgcontext; @@ -187,16 +183,20 @@ struct _module_alias { const char *orig; /* ASCII encoded string */ }; -PyAPI_DATA(const struct _frozen *) _PyImport_FrozenBootstrap; -PyAPI_DATA(const struct _frozen *) _PyImport_FrozenStdlib; -PyAPI_DATA(const struct _frozen *) _PyImport_FrozenTest; +// Export for test_ctypes +PyAPI_DATA(const struct _frozen*) _PyImport_FrozenBootstrap; +// Export for test_ctypes +PyAPI_DATA(const struct _frozen*) _PyImport_FrozenStdlib; +// Export for test_ctypes +PyAPI_DATA(const struct _frozen*) _PyImport_FrozenTest; + extern const struct _module_alias * _PyImport_FrozenAliases; -PyAPI_FUNC(int) _PyImport_CheckSubinterpIncompatibleExtensionAllowed( +extern int _PyImport_CheckSubinterpIncompatibleExtensionAllowed( const char *name); -// for testing +// Export for '_testinternalcapi' shared extension PyAPI_FUNC(int) _PyImport_ClearExtension(PyObject *name, PyObject *filename); #ifdef __cplusplus diff --git a/Include/internal/pycore_initconfig.h b/Include/internal/pycore_initconfig.h index 4cbd14a61d4545..0945bb0936f039 100644 --- a/Include/internal/pycore_initconfig.h +++ b/Include/internal/pycore_initconfig.h @@ -49,14 +49,14 @@ struct pyruntimestate; #define _PyWideStringList_INIT (PyWideStringList){.length = 0, .items = NULL} #ifndef NDEBUG -PyAPI_FUNC(int) _PyWideStringList_CheckConsistency(const PyWideStringList *list); +extern int _PyWideStringList_CheckConsistency(const PyWideStringList *list); #endif -PyAPI_FUNC(void) _PyWideStringList_Clear(PyWideStringList *list); -PyAPI_FUNC(int) _PyWideStringList_Copy(PyWideStringList *list, +extern void _PyWideStringList_Clear(PyWideStringList *list); +extern int _PyWideStringList_Copy(PyWideStringList *list, const PyWideStringList *list2); -PyAPI_FUNC(PyStatus) _PyWideStringList_Extend(PyWideStringList *list, +extern PyStatus _PyWideStringList_Extend(PyWideStringList *list, const PyWideStringList *list2); -PyAPI_FUNC(PyObject*) _PyWideStringList_AsList(const PyWideStringList *list); +extern PyObject* _PyWideStringList_AsList(const PyWideStringList *list); /* --- _PyArgv ---------------------------------------------------- */ @@ -68,28 +68,28 @@ typedef struct _PyArgv { wchar_t * const *wchar_argv; } _PyArgv; -PyAPI_FUNC(PyStatus) _PyArgv_AsWstrList(const _PyArgv *args, +extern PyStatus _PyArgv_AsWstrList(const _PyArgv *args, PyWideStringList *list); /* --- Helper functions ------------------------------------------- */ -PyAPI_FUNC(int) _Py_str_to_int( +extern int _Py_str_to_int( const char *str, int *result); -PyAPI_FUNC(const wchar_t*) _Py_get_xoption( +extern const wchar_t* _Py_get_xoption( const PyWideStringList *xoptions, const wchar_t *name); -PyAPI_FUNC(const char*) _Py_GetEnv( +extern const char* _Py_GetEnv( int use_environment, const char *name); -PyAPI_FUNC(void) _Py_get_env_flag( +extern void _Py_get_env_flag( int use_environment, int *flag, const char *name); /* Py_GetArgcArgv() helper */ -PyAPI_FUNC(void) _Py_ClearArgcArgv(void); +extern void _Py_ClearArgcArgv(void); /* --- _PyPreCmdline ------------------------------------------------- */ @@ -122,6 +122,7 @@ extern PyStatus _PyPreCmdline_Read(_PyPreCmdline *cmdline, /* --- PyPreConfig ----------------------------------------------- */ +// Export for '_testembed' program PyAPI_FUNC(void) _PyPreConfig_InitCompatConfig(PyPreConfig *preconfig); extern void _PyPreConfig_InitFromConfig( PyPreConfig *preconfig, @@ -146,6 +147,7 @@ typedef enum { _PyConfig_INIT_ISOLATED = 3 } _PyConfigInitEnum; +// Export for '_testembed' program PyAPI_FUNC(void) _PyConfig_InitCompatConfig(PyConfig *config); extern PyStatus _PyConfig_Copy( PyConfig *config, diff --git a/Include/internal/pycore_instruments.h b/Include/internal/pycore_instruments.h index 9fb3952227af18..56de9f87171484 100644 --- a/Include/internal/pycore_instruments.h +++ b/Include/internal/pycore_instruments.h @@ -28,7 +28,8 @@ extern "C" { #define PY_MONITORING_EVENT_BRANCH 8 #define PY_MONITORING_EVENT_STOP_ITERATION 9 -#define PY_MONITORING_INSTRUMENTED_EVENTS 10 +#define PY_MONITORING_IS_INSTRUMENTED_EVENT(ev) \ + ((ev) <= PY_MONITORING_EVENT_STOP_ITERATION) /* Other events, mainly exceptions */ @@ -36,12 +37,13 @@ extern "C" { #define PY_MONITORING_EVENT_EXCEPTION_HANDLED 11 #define PY_MONITORING_EVENT_PY_UNWIND 12 #define PY_MONITORING_EVENT_PY_THROW 13 +#define PY_MONITORING_EVENT_RERAISE 14 /* Ancilliary events */ -#define PY_MONITORING_EVENT_C_RETURN 14 -#define PY_MONITORING_EVENT_C_RAISE 15 +#define PY_MONITORING_EVENT_C_RETURN 15 +#define PY_MONITORING_EVENT_C_RAISE 16 typedef uint32_t _PyMonitoringEventSet; @@ -88,10 +90,6 @@ extern int _Py_call_instrumentation_2args(PyThreadState *tstate, int event, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg0, PyObject *arg1); -extern void -_Py_call_instrumentation_exc0(PyThreadState *tstate, int event, - _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); - extern void _Py_call_instrumentation_exc2(PyThreadState *tstate, int event, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *arg0, PyObject *arg1); diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index bb37cafe6286a9..91c473e58eaba2 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -25,7 +25,7 @@ extern "C" { #include "pycore_gc.h" // struct _gc_runtime_state #include "pycore_global_objects.h" // struct _Py_interp_static_objects #include "pycore_import.h" // struct _import_state -#include "pycore_instruments.h" // PY_MONITORING_EVENTS +#include "pycore_instruments.h" // _PY_MONITORING_EVENTS #include "pycore_list.h" // struct _Py_list_state #include "pycore_object_state.h" // struct _py_object_state #include "pycore_obmalloc.h" // struct obmalloc_state @@ -48,6 +48,11 @@ struct _Py_long_state { */ struct _is { + /* This struct countains the eval_breaker, + * which is by far the hottest field in this struct + * and should be placed at the beginning. */ + struct _ceval_state ceval; + PyInterpreterState *next; int64_t id; @@ -109,8 +114,6 @@ struct _is { // Dictionary of the builtins module PyObject *builtins; - struct _ceval_state ceval; - struct _import_state imports; /* The per-interpreter GIL, which might not be used. */ @@ -190,7 +193,7 @@ struct _is { bool sys_trace_initialized; Py_ssize_t sys_profiling_threads; /* Count of threads with c_profilefunc set */ Py_ssize_t sys_tracing_threads; /* Count of threads with c_tracefunc set */ - PyObject *monitoring_callables[PY_MONITORING_TOOL_IDS][PY_MONITORING_EVENTS]; + PyObject *monitoring_callables[PY_MONITORING_TOOL_IDS][_PY_MONITORING_EVENTS]; PyObject *monitoring_tool_names[PY_MONITORING_TOOL_IDS]; struct _Py_interp_cached_objects cached_objects; @@ -232,13 +235,11 @@ struct _xidregitem { crossinterpdatafunc getdata; }; -PyAPI_FUNC(PyInterpreterState*) _PyInterpreterState_LookUpID(int64_t); +extern PyInterpreterState* _PyInterpreterState_LookUpID(int64_t); -PyAPI_FUNC(int) _PyInterpreterState_IDInitref(PyInterpreterState *); -PyAPI_FUNC(int) _PyInterpreterState_IDIncref(PyInterpreterState *); -PyAPI_FUNC(void) _PyInterpreterState_IDDecref(PyInterpreterState *); - -PyAPI_FUNC(PyObject*) _PyInterpreterState_GetMainModule(PyInterpreterState *); +extern int _PyInterpreterState_IDInitref(PyInterpreterState *); +extern int _PyInterpreterState_IDIncref(PyInterpreterState *); +extern void _PyInterpreterState_IDDecref(PyInterpreterState *); extern const PyConfig* _PyInterpreterState_GetConfig(PyInterpreterState *interp); @@ -253,7 +254,9 @@ extern const PyConfig* _PyInterpreterState_GetConfig(PyInterpreterState *interp) The caller must hold the GIL. Once done with the configuration, PyConfig_Clear() must be called to clear - it. */ + it. + + Export for '_testinternalcapi' shared extension. */ PyAPI_FUNC(int) _PyInterpreterState_GetConfigCopy( struct PyConfig *config); @@ -271,7 +274,9 @@ PyAPI_FUNC(int) _PyInterpreterState_GetConfigCopy( Return 0 on success. Raise an exception and return -1 on error. - The configuration should come from _PyInterpreterState_GetConfigCopy(). */ + The configuration should come from _PyInterpreterState_GetConfigCopy(). + + Export for '_testinternalcapi' shared extension. */ PyAPI_FUNC(int) _PyInterpreterState_SetConfig( const struct PyConfig *config); diff --git a/Include/internal/pycore_intrinsics.h b/Include/internal/pycore_intrinsics.h index 39f15681b7b24b..37d4efc12bb771 100644 --- a/Include/internal/pycore_intrinsics.h +++ b/Include/internal/pycore_intrinsics.h @@ -1,4 +1,3 @@ -// Auto-generated by Tools/build/generate_opcode_h.py from Lib/opcode.py /* Unary Functions: */ #define INTRINSIC_1_INVALID 0 @@ -26,7 +25,18 @@ #define MAX_INTRINSIC_2 4 -typedef PyObject *(*instrinsic_func1)(PyThreadState* tstate, PyObject *value); -typedef PyObject *(*instrinsic_func2)(PyThreadState* tstate, PyObject *value1, PyObject *value2); -extern const instrinsic_func1 _PyIntrinsics_UnaryFunctions[]; -extern const instrinsic_func2 _PyIntrinsics_BinaryFunctions[]; +typedef PyObject *(*intrinsic_func1)(PyThreadState* tstate, PyObject *value); +typedef PyObject *(*intrinsic_func2)(PyThreadState* tstate, PyObject *value1, PyObject *value2); + +typedef struct { + intrinsic_func1 func; + const char *name; +} intrinsic_func1_info; + +typedef struct { + intrinsic_func2 func; + const char *name; +} intrinsic_func2_info; + +extern const intrinsic_func1_info _PyIntrinsics_UnaryFunctions[]; +extern const intrinsic_func2_info _PyIntrinsics_BinaryFunctions[]; diff --git a/Include/internal/pycore_long.h b/Include/internal/pycore_long.h index 3f01694e5f5ac4..3dc00ec7e04c6f 100644 --- a/Include/internal/pycore_long.h +++ b/Include/internal/pycore_long.h @@ -64,45 +64,45 @@ extern void _PyLong_FiniTypes(PyInterpreterState *interp); # error "_PY_NSMALLPOSINTS must be greater than or equal to 257" #endif -// Return a borrowed reference to the zero singleton. +// Return a reference to the immortal zero singleton. // The function cannot return NULL. static inline PyObject* _PyLong_GetZero(void) { return (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS]; } -// Return a borrowed reference to the one singleton. +// Return a reference to the immortal one singleton. // The function cannot return NULL. static inline PyObject* _PyLong_GetOne(void) { return (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS+1]; } static inline PyObject* _PyLong_FromUnsignedChar(unsigned char i) { - return Py_NewRef((PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS+i]); + return (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS+i]; } -PyObject *_PyLong_Add(PyLongObject *left, PyLongObject *right); -PyObject *_PyLong_Multiply(PyLongObject *left, PyLongObject *right); -PyObject *_PyLong_Subtract(PyLongObject *left, PyLongObject *right); +extern PyObject *_PyLong_Add(PyLongObject *left, PyLongObject *right); +extern PyObject *_PyLong_Multiply(PyLongObject *left, PyLongObject *right); +extern PyObject *_PyLong_Subtract(PyLongObject *left, PyLongObject *right); -/* Used by Python/mystrtoul.c, _PyBytes_FromHex(), - _PyBytes_DecodeEscape(), etc. */ +// Used by _PyBytes_FromHex(), _PyBytes_DecodeEscape(), Python/mystrtoul.c. +// Export for 'binascii' shared extension. PyAPI_DATA(unsigned char) _PyLong_DigitValue[256]; /* Format the object based on the format_spec, as defined in PEP 3101 (Advanced String Formatting). */ -PyAPI_FUNC(int) _PyLong_FormatAdvancedWriter( +extern int _PyLong_FormatAdvancedWriter( _PyUnicodeWriter *writer, PyObject *obj, PyObject *format_spec, Py_ssize_t start, Py_ssize_t end); -PyAPI_FUNC(int) _PyLong_FormatWriter( +extern int _PyLong_FormatWriter( _PyUnicodeWriter *writer, PyObject *obj, int base, int alternate); -PyAPI_FUNC(char*) _PyLong_FormatBytesWriter( +extern char* _PyLong_FormatBytesWriter( _PyBytesWriter *writer, char *str, PyObject *obj, diff --git a/Include/internal/pycore_moduleobject.h b/Include/internal/pycore_moduleobject.h index 31a31e724d0b21..5644bbe5e0552b 100644 --- a/Include/internal/pycore_moduleobject.h +++ b/Include/internal/pycore_moduleobject.h @@ -8,6 +8,12 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +extern void _PyModule_Clear(PyObject *); +extern void _PyModule_ClearDict(PyObject *); +extern int _PyModuleSpec_IsInitializing(PyObject *); + +extern int _PyModule_IsExtension(PyObject *obj); + typedef struct { PyObject_HEAD PyObject *md_dict; diff --git a/Include/internal/pycore_namespace.h b/Include/internal/pycore_namespace.h index cb76f040693d10..f165cf15319a59 100644 --- a/Include/internal/pycore_namespace.h +++ b/Include/internal/pycore_namespace.h @@ -10,9 +10,10 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif -PyAPI_DATA(PyTypeObject) _PyNamespace_Type; +extern PyTypeObject _PyNamespace_Type; -PyAPI_FUNC(PyObject *) _PyNamespace_New(PyObject *kwds); +// Export for '_testmultiphase' shared extension +PyAPI_FUNC(PyObject*) _PyNamespace_New(PyObject *kwds); #ifdef __cplusplus } diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index 2358f48738a905..857d6efec3b3b1 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -14,6 +14,27 @@ extern "C" { #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_runtime.h" // _PyRuntime +/* Check if an object is consistent. For example, ensure that the reference + counter is greater than or equal to 1, and ensure that ob_type is not NULL. + + Call _PyObject_AssertFailed() if the object is inconsistent. + + If check_content is zero, only check header fields: reduce the overhead. + + The function always return 1. The return value is just here to be able to + write: + + assert(_PyObject_CheckConsistency(obj, 1)); */ +extern int _PyObject_CheckConsistency(PyObject *op, int check_content); + +extern void _PyDebugAllocatorStats(FILE *out, const char *block_name, + int num_blocks, size_t sizeof_block); + +extern void _PyObject_DebugTypeStats(FILE *out); + +// Export for shared _testinternalcapi extension +PyAPI_FUNC(int) _PyObject_IsFreed(PyObject *); + /* We need to maintain an internal copy of Py{Var}Object_HEAD_INIT to avoid designated initializer conflicts in C++20. If we use the deinition in object.h, we will be mixing designated and non-designated initializers in @@ -135,8 +156,8 @@ _Py_DECREF_NO_DEALLOC(PyObject *op) #endif -PyAPI_FUNC(int) _PyType_CheckConsistency(PyTypeObject *type); -PyAPI_FUNC(int) _PyDict_CheckConsistency(PyObject *mp, int check_content); +extern int _PyType_CheckConsistency(PyTypeObject *type); +extern int _PyDict_CheckConsistency(PyObject *mp, int check_content); /* Update the Python traceback of an object. This function must be called when a memory block is reused from a free list. @@ -152,6 +173,7 @@ _PyType_HasFeature(PyTypeObject *type, unsigned long feature) { extern void _PyType_InitCache(PyInterpreterState *interp); +extern void _PyObject_InitState(PyInterpreterState *interp); /* Inline functions trading binary compatibility for speed: _PyObject_Init() is the fast version of PyObject_Init(), and @@ -271,8 +293,8 @@ extern void _PyDebug_PrintTotalRefs(void); #ifdef Py_TRACE_REFS extern void _Py_AddToAllObjects(PyObject *op, int force); -extern void _Py_PrintReferences(FILE *); -extern void _Py_PrintReferenceAddresses(FILE *); +extern void _Py_PrintReferences(PyInterpreterState *, FILE *); +extern void _Py_PrintReferenceAddresses(PyInterpreterState *, FILE *); #endif @@ -355,7 +377,11 @@ static inline int _PyType_SUPPORTS_WEAKREFS(PyTypeObject *type) { } extern PyObject* _PyType_AllocNoTrack(PyTypeObject *type, Py_ssize_t nitems); -PyObject *_PyType_NewManagedObject(PyTypeObject *type); +extern PyObject *_PyType_NewManagedObject(PyTypeObject *type); + +extern PyTypeObject* _PyType_CalculateMetaclass(PyTypeObject *, PyObject *); +extern PyObject* _PyType_GetDocFromInternalDoc(const char *, const char *); +extern PyObject* _PyType_GetTextSignatureFromInternalDoc(const char *, const char *, int); extern int _PyObject_InitializeDict(PyObject *obj); int _PyObject_InitInlineValues(PyObject *obj, PyTypeObject *tp); @@ -409,7 +435,13 @@ extern PyObject ** _PyObject_ComputedDictPointer(PyObject *); extern void _PyObject_FreeInstanceAttributes(PyObject *obj); extern int _PyObject_IsInstanceDictEmpty(PyObject *); -PyAPI_FUNC(PyObject *) _PyObject_LookupSpecial(PyObject *, PyObject *); +// Export for 'math' shared extension +PyAPI_FUNC(PyObject*) _PyObject_LookupSpecial(PyObject *, PyObject *); + +extern int _PyObject_IsAbstract(PyObject *); + +extern int _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method); +extern PyObject* _PyObject_NextNotImplemented(PyObject *); /* C function call trampolines to mitigate bad function pointer casts. * @@ -438,6 +470,16 @@ extern PyObject* _PyCFunctionWithKeywords_TrampolineCall( (meth)((self), (args), (kw)) #endif // __EMSCRIPTEN__ && PY_CALL_TRAMPOLINE +// Export for '_pickle' shared extension +PyAPI_DATA(PyTypeObject) _PyNone_Type; +// Export for '_pickle' shared extension +PyAPI_DATA(PyTypeObject) _PyNotImplemented_Type; + +// Maps Py_LT to Py_GT, ..., Py_GE to Py_LE. +// Defined in Objects/object.c. +// Export for the stable ABI. +PyAPI_DATA(int) _Py_SwappedOp[]; + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_object_state.h b/Include/internal/pycore_object_state.h index 94005d77881432..65feb5af969f8b 100644 --- a/Include/internal/pycore_object_state.h +++ b/Include/internal/pycore_object_state.h @@ -11,17 +11,22 @@ extern "C" { struct _py_object_runtime_state { #ifdef Py_REF_DEBUG Py_ssize_t interpreter_leaks; -#else - int _not_used; #endif + int _not_used; }; struct _py_object_state { #ifdef Py_REF_DEBUG Py_ssize_t reftotal; -#else - int _not_used; #endif +#ifdef Py_TRACE_REFS + /* Head of circular doubly-linked list of all objects. These are linked + * together via the _ob_prev and _ob_next members of a PyObject, which + * exist only in a Py_TRACE_REFS build. + */ + PyObject refchain; +#endif + int _not_used; }; diff --git a/Include/internal/pycore_opcode.h b/Include/internal/pycore_opcode.h index d7f6b84e95c4f8..a187da6e24730f 100644 --- a/Include/internal/pycore_opcode.h +++ b/Include/internal/pycore_opcode.h @@ -19,20 +19,20 @@ extern const uint8_t _PyOpcode_Deopt[256]; #ifdef NEED_OPCODE_TABLES const uint8_t _PyOpcode_Caches[256] = { - [TO_BOOL] = 3, - [BINARY_SUBSCR] = 1, - [STORE_SUBSCR] = 1, + [LOAD_GLOBAL] = 4, + [BINARY_OP] = 1, [UNPACK_SEQUENCE] = 1, + [COMPARE_OP] = 1, + [BINARY_SUBSCR] = 1, [FOR_ITER] = 1, - [STORE_ATTR] = 4, + [LOAD_SUPER_ATTR] = 1, [LOAD_ATTR] = 9, - [COMPARE_OP] = 1, - [LOAD_GLOBAL] = 4, - [BINARY_OP] = 1, + [STORE_ATTR] = 4, + [CALL] = 3, + [STORE_SUBSCR] = 1, [SEND] = 1, [JUMP_BACKWARD] = 1, - [LOAD_SUPER_ATTR] = 1, - [CALL] = 3, + [TO_BOOL] = 3, }; const uint8_t _PyOpcode_Deopt[256] = { @@ -52,6 +52,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [BINARY_SUBSCR_DICT] = BINARY_SUBSCR, [BINARY_SUBSCR_GETITEM] = BINARY_SUBSCR, [BINARY_SUBSCR_LIST_INT] = BINARY_SUBSCR, + [BINARY_SUBSCR_STR_INT] = BINARY_SUBSCR, [BINARY_SUBSCR_TUPLE_INT] = BINARY_SUBSCR, [BUILD_CONST_KEY_MAP] = BUILD_CONST_KEY_MAP, [BUILD_LIST] = BUILD_LIST, @@ -292,12 +293,12 @@ const char *const _PyOpcode_OpName[268] = { [FORMAT_SIMPLE] = "FORMAT_SIMPLE", [FORMAT_WITH_SPEC] = "FORMAT_WITH_SPEC", [BINARY_SUBSCR_LIST_INT] = "BINARY_SUBSCR_LIST_INT", + [BINARY_SUBSCR_STR_INT] = "BINARY_SUBSCR_STR_INT", [BINARY_SUBSCR_TUPLE_INT] = "BINARY_SUBSCR_TUPLE_INT", [STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT", [STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT", [SEND_GEN] = "SEND_GEN", [UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE", - [UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE", [WITH_EXCEPT_START] = "WITH_EXCEPT_START", [GET_AITER] = "GET_AITER", [GET_ANEXT] = "GET_ANEXT", @@ -305,39 +306,39 @@ const char *const _PyOpcode_OpName[268] = { [BEFORE_WITH] = "BEFORE_WITH", [END_ASYNC_FOR] = "END_ASYNC_FOR", [CLEANUP_THROW] = "CLEANUP_THROW", + [UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE", [UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST", [STORE_ATTR_INSTANCE_VALUE] = "STORE_ATTR_INSTANCE_VALUE", [STORE_ATTR_SLOT] = "STORE_ATTR_SLOT", - [STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT", [STORE_SUBSCR] = "STORE_SUBSCR", [DELETE_SUBSCR] = "DELETE_SUBSCR", + [STORE_ATTR_WITH_HINT] = "STORE_ATTR_WITH_HINT", [LOAD_GLOBAL_MODULE] = "LOAD_GLOBAL_MODULE", [LOAD_GLOBAL_BUILTIN] = "LOAD_GLOBAL_BUILTIN", [LOAD_SUPER_ATTR_ATTR] = "LOAD_SUPER_ATTR_ATTR", [LOAD_SUPER_ATTR_METHOD] = "LOAD_SUPER_ATTR_METHOD", [LOAD_ATTR_INSTANCE_VALUE] = "LOAD_ATTR_INSTANCE_VALUE", - [LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE", [GET_ITER] = "GET_ITER", [GET_YIELD_FROM_ITER] = "GET_YIELD_FROM_ITER", - [LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT", + [LOAD_ATTR_MODULE] = "LOAD_ATTR_MODULE", [LOAD_BUILD_CLASS] = "LOAD_BUILD_CLASS", + [LOAD_ATTR_WITH_HINT] = "LOAD_ATTR_WITH_HINT", [LOAD_ATTR_SLOT] = "LOAD_ATTR_SLOT", - [LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS", [LOAD_ASSERTION_ERROR] = "LOAD_ASSERTION_ERROR", [RETURN_GENERATOR] = "RETURN_GENERATOR", + [LOAD_ATTR_CLASS] = "LOAD_ATTR_CLASS", [LOAD_ATTR_PROPERTY] = "LOAD_ATTR_PROPERTY", [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = "LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN", [LOAD_ATTR_METHOD_WITH_VALUES] = "LOAD_ATTR_METHOD_WITH_VALUES", [LOAD_ATTR_METHOD_NO_DICT] = "LOAD_ATTR_METHOD_NO_DICT", [LOAD_ATTR_METHOD_LAZY_DICT] = "LOAD_ATTR_METHOD_LAZY_DICT", [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = "LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES", - [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = "LOAD_ATTR_NONDESCRIPTOR_NO_DICT", [RETURN_VALUE] = "RETURN_VALUE", - [COMPARE_OP_FLOAT] = "COMPARE_OP_FLOAT", + [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = "LOAD_ATTR_NONDESCRIPTOR_NO_DICT", [SETUP_ANNOTATIONS] = "SETUP_ANNOTATIONS", - [COMPARE_OP_INT] = "COMPARE_OP_INT", + [COMPARE_OP_FLOAT] = "COMPARE_OP_FLOAT", [LOAD_LOCALS] = "LOAD_LOCALS", - [COMPARE_OP_STR] = "COMPARE_OP_STR", + [COMPARE_OP_INT] = "COMPARE_OP_INT", [POP_EXCEPT] = "POP_EXCEPT", [STORE_NAME] = "STORE_NAME", [DELETE_NAME] = "DELETE_NAME", @@ -360,9 +361,9 @@ const char *const _PyOpcode_OpName[268] = { [IMPORT_NAME] = "IMPORT_NAME", [IMPORT_FROM] = "IMPORT_FROM", [JUMP_FORWARD] = "JUMP_FORWARD", + [COMPARE_OP_STR] = "COMPARE_OP_STR", [FOR_ITER_LIST] = "FOR_ITER_LIST", [FOR_ITER_TUPLE] = "FOR_ITER_TUPLE", - [FOR_ITER_RANGE] = "FOR_ITER_RANGE", [POP_JUMP_IF_FALSE] = "POP_JUMP_IF_FALSE", [POP_JUMP_IF_TRUE] = "POP_JUMP_IF_TRUE", [LOAD_GLOBAL] = "LOAD_GLOBAL", @@ -381,11 +382,11 @@ const char *const _PyOpcode_OpName[268] = { [POP_JUMP_IF_NONE] = "POP_JUMP_IF_NONE", [RAISE_VARARGS] = "RAISE_VARARGS", [GET_AWAITABLE] = "GET_AWAITABLE", - [FOR_ITER_GEN] = "FOR_ITER_GEN", + [FOR_ITER_RANGE] = "FOR_ITER_RANGE", [BUILD_SLICE] = "BUILD_SLICE", [JUMP_BACKWARD_NO_INTERRUPT] = "JUMP_BACKWARD_NO_INTERRUPT", [MAKE_CELL] = "MAKE_CELL", - [CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS", + [FOR_ITER_GEN] = "FOR_ITER_GEN", [LOAD_DEREF] = "LOAD_DEREF", [STORE_DEREF] = "STORE_DEREF", [DELETE_DEREF] = "DELETE_DEREF", @@ -397,26 +398,26 @@ const char *const _PyOpcode_OpName[268] = { [LIST_APPEND] = "LIST_APPEND", [SET_ADD] = "SET_ADD", [MAP_ADD] = "MAP_ADD", - [CALL_PY_EXACT_ARGS] = "CALL_PY_EXACT_ARGS", + [CALL_BOUND_METHOD_EXACT_ARGS] = "CALL_BOUND_METHOD_EXACT_ARGS", [COPY_FREE_VARS] = "COPY_FREE_VARS", [YIELD_VALUE] = "YIELD_VALUE", [RESUME] = "RESUME", [MATCH_CLASS] = "MATCH_CLASS", + [CALL_PY_EXACT_ARGS] = "CALL_PY_EXACT_ARGS", [CALL_PY_WITH_DEFAULTS] = "CALL_PY_WITH_DEFAULTS", [CALL_NO_KW_TYPE_1] = "CALL_NO_KW_TYPE_1", - [CALL_NO_KW_STR_1] = "CALL_NO_KW_STR_1", [BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP", [BUILD_STRING] = "BUILD_STRING", [CONVERT_VALUE] = "CONVERT_VALUE", + [CALL_NO_KW_STR_1] = "CALL_NO_KW_STR_1", [CALL_NO_KW_TUPLE_1] = "CALL_NO_KW_TUPLE_1", [CALL_BUILTIN_CLASS] = "CALL_BUILTIN_CLASS", - [CALL_NO_KW_BUILTIN_O] = "CALL_NO_KW_BUILTIN_O", [LIST_EXTEND] = "LIST_EXTEND", [SET_UPDATE] = "SET_UPDATE", [DICT_MERGE] = "DICT_MERGE", [DICT_UPDATE] = "DICT_UPDATE", + [CALL_NO_KW_BUILTIN_O] = "CALL_NO_KW_BUILTIN_O", [CALL_NO_KW_BUILTIN_FAST] = "CALL_NO_KW_BUILTIN_FAST", - [CALL_BUILTIN_FAST_WITH_KEYWORDS] = "CALL_BUILTIN_FAST_WITH_KEYWORDS", [LOAD_FAST_LOAD_FAST] = "LOAD_FAST_LOAD_FAST", [STORE_FAST_LOAD_FAST] = "STORE_FAST_LOAD_FAST", [STORE_FAST_STORE_FAST] = "STORE_FAST_STORE_FAST", @@ -427,6 +428,7 @@ const char *const _PyOpcode_OpName[268] = { [LOAD_FROM_DICT_OR_GLOBALS] = "LOAD_FROM_DICT_OR_GLOBALS", [LOAD_FROM_DICT_OR_DEREF] = "LOAD_FROM_DICT_OR_DEREF", [SET_FUNCTION_ATTRIBUTE] = "SET_FUNCTION_ATTRIBUTE", + [CALL_BUILTIN_FAST_WITH_KEYWORDS] = "CALL_BUILTIN_FAST_WITH_KEYWORDS", [CALL_NO_KW_LEN] = "CALL_NO_KW_LEN", [CALL_NO_KW_ISINSTANCE] = "CALL_NO_KW_ISINSTANCE", [CALL_NO_KW_LIST_APPEND] = "CALL_NO_KW_LIST_APPEND", @@ -435,7 +437,6 @@ const char *const _PyOpcode_OpName[268] = { [CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS] = "CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS", [CALL_NO_KW_METHOD_DESCRIPTOR_FAST] = "CALL_NO_KW_METHOD_DESCRIPTOR_FAST", [CALL_NO_KW_ALLOC_AND_ENTER_INIT] = "CALL_NO_KW_ALLOC_AND_ENTER_INIT", - [186] = "<186>", [187] = "<187>", [188] = "<188>", [189] = "<189>", @@ -521,7 +522,6 @@ const char *const _PyOpcode_OpName[268] = { #endif // NEED_OPCODE_TABLES #define EXTRA_CASES \ - case 186: \ case 187: \ case 188: \ case 189: \ diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index d525913f8a7aba..9f4437c09e92cb 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -35,27 +35,26 @@ #define _BINARY_OP_ADD_UNICODE 311 #define _LOAD_LOCALS 312 #define _LOAD_FROM_DICT_OR_GLOBALS 313 -#define _SKIP_CACHE 314 -#define _GUARD_GLOBALS_VERSION 315 -#define _GUARD_BUILTINS_VERSION 316 -#define _LOAD_GLOBAL_MODULE 317 -#define _LOAD_GLOBAL_BUILTINS 318 -#define _GUARD_TYPE_VERSION 319 -#define _CHECK_MANAGED_OBJECT_HAS_VALUES 320 -#define _LOAD_ATTR_INSTANCE_VALUE 321 -#define IS_NONE 322 -#define _ITER_CHECK_LIST 323 -#define _IS_ITER_EXHAUSTED_LIST 324 -#define _ITER_NEXT_LIST 325 -#define _ITER_CHECK_TUPLE 326 -#define _IS_ITER_EXHAUSTED_TUPLE 327 -#define _ITER_NEXT_TUPLE 328 -#define _ITER_CHECK_RANGE 329 -#define _IS_ITER_EXHAUSTED_RANGE 330 -#define _ITER_NEXT_RANGE 331 -#define _POP_JUMP_IF_FALSE 332 -#define _POP_JUMP_IF_TRUE 333 -#define JUMP_TO_TOP 334 +#define _GUARD_GLOBALS_VERSION 314 +#define _GUARD_BUILTINS_VERSION 315 +#define _LOAD_GLOBAL_MODULE 316 +#define _LOAD_GLOBAL_BUILTINS 317 +#define _GUARD_TYPE_VERSION 318 +#define _CHECK_MANAGED_OBJECT_HAS_VALUES 319 +#define _LOAD_ATTR_INSTANCE_VALUE 320 +#define IS_NONE 321 +#define _ITER_CHECK_LIST 322 +#define _IS_ITER_EXHAUSTED_LIST 323 +#define _ITER_NEXT_LIST 324 +#define _ITER_CHECK_TUPLE 325 +#define _IS_ITER_EXHAUSTED_TUPLE 326 +#define _ITER_NEXT_TUPLE 327 +#define _ITER_CHECK_RANGE 328 +#define _IS_ITER_EXHAUSTED_RANGE 329 +#define _ITER_NEXT_RANGE 330 +#define _POP_JUMP_IF_FALSE 331 +#define _POP_JUMP_IF_TRUE 332 +#define JUMP_TO_TOP 333 #ifndef NEED_OPCODE_METADATA extern int _PyOpcode_num_popped(int opcode, int oparg, bool jump); @@ -145,6 +144,8 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 4; case BINARY_SUBSCR_LIST_INT: return 2; + case BINARY_SUBSCR_STR_INT: + return 2; case BINARY_SUBSCR_TUPLE_INT: return 2; case BINARY_SUBSCR_DICT: @@ -272,11 +273,11 @@ _PyOpcode_num_popped(int opcode, int oparg, bool jump) { case BUILD_CONST_KEY_MAP: return oparg + 1; case DICT_UPDATE: - return 1; + return (oparg - 1) + 2; case DICT_MERGE: - return 1; + return (oparg - 1) + 5; case MAP_ADD: - return 2; + return (oparg - 1) + 3; case INSTRUMENTED_LOAD_SUPER_ATTR: return 3; case LOAD_SUPER_ATTR: @@ -589,6 +590,8 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 0; case BINARY_SUBSCR_LIST_INT: return 1; + case BINARY_SUBSCR_STR_INT: + return 1; case BINARY_SUBSCR_TUPLE_INT: return 1; case BINARY_SUBSCR_DICT: @@ -680,9 +683,9 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case LOAD_GLOBAL: return ((oparg & 1) ? 1 : 0) + 1; case LOAD_GLOBAL_MODULE: - return ((oparg & 1) ? 1 : 0) + 1; + return (oparg & 1 ? 1 : 0) + 1; case LOAD_GLOBAL_BUILTIN: - return ((oparg & 1) ? 1 : 0) + 1; + return (oparg & 1 ? 1 : 0) + 1; case DELETE_FAST: return 0; case MAKE_CELL: @@ -716,11 +719,11 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case BUILD_CONST_KEY_MAP: return 1; case DICT_UPDATE: - return 0; + return (oparg - 1) + 1; case DICT_MERGE: - return 0; + return (oparg - 1) + 4; case MAP_ADD: - return 0; + return (oparg - 1) + 1; case INSTRUMENTED_LOAD_SUPER_ATTR: return ((oparg & 1) ? 1 : 0) + 1; case LOAD_SUPER_ATTR: @@ -732,7 +735,7 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case LOAD_ZERO_SUPER_ATTR: return ((oparg & 1) ? 1 : 0) + 1; case LOAD_SUPER_ATTR_ATTR: - return ((oparg & 1) ? 1 : 0) + 1; + return 1; case LOAD_SUPER_ATTR_METHOD: return 2; case LOAD_ATTR: @@ -740,7 +743,7 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case LOAD_METHOD: return ((oparg & 1) ? 1 : 0) + 1; case LOAD_ATTR_INSTANCE_VALUE: - return ((oparg & 1) ? 1 : 0) + 1; + return (oparg & 1 ? 1 : 0) + 1; case LOAD_ATTR_MODULE: return ((oparg & 1) ? 1 : 0) + 1; case LOAD_ATTR_WITH_HINT: @@ -750,9 +753,9 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { case LOAD_ATTR_CLASS: return ((oparg & 1) ? 1 : 0) + 1; case LOAD_ATTR_PROPERTY: - return ((oparg & 1) ? 1 : 0) + 1; + return 1; case LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN: - return ((oparg & 1) ? 1 : 0) + 1; + return 1; case STORE_ATTR_INSTANCE_VALUE: return 0; case STORE_ATTR_WITH_HINT: @@ -945,7 +948,18 @@ _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { } #endif -enum InstructionFormat { INSTR_FMT_IB, INSTR_FMT_IBC, INSTR_FMT_IBC00, INSTR_FMT_IBC000, INSTR_FMT_IBC00000, INSTR_FMT_IBC00000000, INSTR_FMT_IX, INSTR_FMT_IXC, INSTR_FMT_IXC0, INSTR_FMT_IXC00, INSTR_FMT_IXC000 }; +enum InstructionFormat { + INSTR_FMT_IB, + INSTR_FMT_IBC, + INSTR_FMT_IBC00, + INSTR_FMT_IBC000, + INSTR_FMT_IBC00000000, + INSTR_FMT_IX, + INSTR_FMT_IXC, + INSTR_FMT_IXC0, + INSTR_FMT_IXC00, + INSTR_FMT_IXC000, +}; #define IS_VALID_OPCODE(OP) \ (((OP) >= 0) && ((OP) < OPCODE_METADATA_SIZE) && \ @@ -1037,6 +1051,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { [BINARY_SLICE] = { true, INSTR_FMT_IX, 0 }, [STORE_SLICE] = { true, INSTR_FMT_IX, 0 }, [BINARY_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, 0 }, + [BINARY_SUBSCR_STR_INT] = { true, INSTR_FMT_IXC, 0 }, [BINARY_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC, 0 }, [BINARY_SUBSCR_DICT] = { true, INSTR_FMT_IXC, 0 }, [BINARY_SUBSCR_GETITEM] = { true, INSTR_FMT_IXC, 0 }, @@ -1248,6 +1263,7 @@ const struct opcode_macro_expansion _PyOpcode_macro_expansion[OPCODE_MACRO_EXPAN [BINARY_SLICE] = { .nuops = 1, .uops = { { BINARY_SLICE, 0, 0 } } }, [STORE_SLICE] = { .nuops = 1, .uops = { { STORE_SLICE, 0, 0 } } }, [BINARY_SUBSCR_LIST_INT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_LIST_INT, 0, 0 } } }, + [BINARY_SUBSCR_STR_INT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_STR_INT, 0, 0 } } }, [BINARY_SUBSCR_TUPLE_INT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_TUPLE_INT, 0, 0 } } }, [BINARY_SUBSCR_DICT] = { .nuops = 1, .uops = { { BINARY_SUBSCR_DICT, 0, 0 } } }, [LIST_APPEND] = { .nuops = 1, .uops = { { LIST_APPEND, 0, 0 } } }, @@ -1279,8 +1295,8 @@ const struct opcode_macro_expansion _PyOpcode_macro_expansion[OPCODE_MACRO_EXPAN [LOAD_NAME] = { .nuops = 2, .uops = { { _LOAD_LOCALS, 0, 0 }, { _LOAD_FROM_DICT_OR_GLOBALS, 0, 0 } } }, [LOAD_FROM_DICT_OR_GLOBALS] = { .nuops = 1, .uops = { { _LOAD_FROM_DICT_OR_GLOBALS, 0, 0 } } }, [LOAD_GLOBAL] = { .nuops = 1, .uops = { { LOAD_GLOBAL, 0, 0 } } }, - [LOAD_GLOBAL_MODULE] = { .nuops = 4, .uops = { { _SKIP_CACHE, 0, 0 }, { _GUARD_GLOBALS_VERSION, 1, 1 }, { _SKIP_CACHE, 0, 0 }, { _LOAD_GLOBAL_MODULE, 1, 3 } } }, - [LOAD_GLOBAL_BUILTIN] = { .nuops = 4, .uops = { { _SKIP_CACHE, 0, 0 }, { _GUARD_GLOBALS_VERSION, 1, 1 }, { _GUARD_BUILTINS_VERSION, 1, 2 }, { _LOAD_GLOBAL_BUILTINS, 1, 3 } } }, + [LOAD_GLOBAL_MODULE] = { .nuops = 2, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _LOAD_GLOBAL_MODULE, 1, 3 } } }, + [LOAD_GLOBAL_BUILTIN] = { .nuops = 3, .uops = { { _GUARD_GLOBALS_VERSION, 1, 1 }, { _GUARD_BUILTINS_VERSION, 1, 2 }, { _LOAD_GLOBAL_BUILTINS, 1, 3 } } }, [DELETE_FAST] = { .nuops = 1, .uops = { { DELETE_FAST, 0, 0 } } }, [DELETE_DEREF] = { .nuops = 1, .uops = { { DELETE_DEREF, 0, 0 } } }, [LOAD_FROM_DICT_OR_DEREF] = { .nuops = 1, .uops = { { LOAD_FROM_DICT_OR_DEREF, 0, 0 } } }, @@ -1302,7 +1318,7 @@ const struct opcode_macro_expansion _PyOpcode_macro_expansion[OPCODE_MACRO_EXPAN [LOAD_SUPER_ATTR_ATTR] = { .nuops = 1, .uops = { { LOAD_SUPER_ATTR_ATTR, 0, 0 } } }, [LOAD_SUPER_ATTR_METHOD] = { .nuops = 1, .uops = { { LOAD_SUPER_ATTR_METHOD, 0, 0 } } }, [LOAD_ATTR] = { .nuops = 1, .uops = { { LOAD_ATTR, 0, 0 } } }, - [LOAD_ATTR_INSTANCE_VALUE] = { .nuops = 4, .uops = { { _SKIP_CACHE, 0, 0 }, { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_MANAGED_OBJECT_HAS_VALUES, 0, 0 }, { _LOAD_ATTR_INSTANCE_VALUE, 1, 3 } } }, + [LOAD_ATTR_INSTANCE_VALUE] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_MANAGED_OBJECT_HAS_VALUES, 0, 0 }, { _LOAD_ATTR_INSTANCE_VALUE, 1, 3 } } }, [COMPARE_OP] = { .nuops = 1, .uops = { { COMPARE_OP, 0, 0 } } }, [COMPARE_OP_FLOAT] = { .nuops = 1, .uops = { { COMPARE_OP_FLOAT, 0, 0 } } }, [COMPARE_OP_INT] = { .nuops = 1, .uops = { { COMPARE_OP_INT, 0, 0 } } }, @@ -1356,7 +1372,6 @@ const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE] = { [_BINARY_OP_ADD_UNICODE] = "_BINARY_OP_ADD_UNICODE", [_LOAD_LOCALS] = "_LOAD_LOCALS", [_LOAD_FROM_DICT_OR_GLOBALS] = "_LOAD_FROM_DICT_OR_GLOBALS", - [_SKIP_CACHE] = "_SKIP_CACHE", [_GUARD_GLOBALS_VERSION] = "_GUARD_GLOBALS_VERSION", [_GUARD_BUILTINS_VERSION] = "_GUARD_BUILTINS_VERSION", [_LOAD_GLOBAL_MODULE] = "_LOAD_GLOBAL_MODULE", diff --git a/Include/internal/pycore_pathconfig.h b/Include/internal/pycore_pathconfig.h index b8deaa0c3eb067..729f3e09ce1ca4 100644 --- a/Include/internal/pycore_pathconfig.h +++ b/Include/internal/pycore_pathconfig.h @@ -8,6 +8,7 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif +// Export for '_testinternalcapi' shared extension PyAPI_FUNC(void) _PyPathConfig_ClearGlobal(void); extern PyStatus _PyPathConfig_ReadGlobal(PyConfig *config); extern PyStatus _PyPathConfig_UpdateGlobal(const PyConfig *config); diff --git a/Include/internal/pycore_pyarena.h b/Include/internal/pycore_pyarena.h index d78972a88ca238..08262fba2daeee 100644 --- a/Include/internal/pycore_pyarena.h +++ b/Include/internal/pycore_pyarena.h @@ -1,4 +1,6 @@ /* An arena-like memory interface for the compiler. + * + * Export symbols for test_peg_generator. */ #ifndef Py_INTERNAL_PYARENA_H diff --git a/Include/internal/pycore_pyerrors.h b/Include/internal/pycore_pyerrors.h index e3ba4b75e3cfc3..45929f40a05496 100644 --- a/Include/internal/pycore_pyerrors.h +++ b/Include/internal/pycore_pyerrors.h @@ -11,46 +11,47 @@ extern "C" { /* Error handling definitions */ -PyAPI_FUNC(_PyErr_StackItem*) _PyErr_GetTopmostException(PyThreadState *tstate); -PyAPI_FUNC(PyObject*) _PyErr_GetHandledException(PyThreadState *); -PyAPI_FUNC(void) _PyErr_SetHandledException(PyThreadState *, PyObject *); -PyAPI_FUNC(void) _PyErr_GetExcInfo(PyThreadState *, PyObject **, PyObject **, PyObject **); - -/* Like PyErr_Format(), but saves current exception as __context__ and - __cause__. - */ -PyAPI_FUNC(PyObject *) _PyErr_FormatFromCause( +extern _PyErr_StackItem* _PyErr_GetTopmostException(PyThreadState *tstate); +extern PyObject* _PyErr_GetHandledException(PyThreadState *); +extern void _PyErr_SetHandledException(PyThreadState *, PyObject *); +extern void _PyErr_GetExcInfo(PyThreadState *, PyObject **, PyObject **, PyObject **); + +// Like PyErr_Format(), but saves current exception as __context__ and +// __cause__. +// Export for '_sqlite3' shared extension. +PyAPI_FUNC(PyObject*) _PyErr_FormatFromCause( PyObject *exception, const char *format, /* ASCII-encoded string */ ... ); -PyAPI_FUNC(int) _PyException_AddNote( +extern int _PyException_AddNote( PyObject *exc, PyObject *note); -PyAPI_FUNC(int) _PyErr_CheckSignals(void); +extern int _PyErr_CheckSignals(void); /* Support for adding program text to SyntaxErrors */ -PyAPI_FUNC(PyObject *) _PyErr_ProgramDecodedTextObject( +// Export for test_peg_generator +PyAPI_FUNC(PyObject*) _PyErr_ProgramDecodedTextObject( PyObject *filename, int lineno, const char* encoding); -PyAPI_FUNC(PyObject *) _PyUnicodeTranslateError_Create( +extern PyObject* _PyUnicodeTranslateError_Create( PyObject *object, Py_ssize_t start, Py_ssize_t end, const char *reason /* UTF-8 encoded string */ ); -PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalErrorFormat( +extern void _Py_NO_RETURN _Py_FatalErrorFormat( const char *func, const char *format, ...); -extern PyObject *_PyErr_SetImportErrorWithNameFrom( +extern PyObject* _PyErr_SetImportErrorWithNameFrom( PyObject *, PyObject *, PyObject *, @@ -79,80 +80,79 @@ static inline void _PyErr_ClearExcState(_PyErr_StackItem *exc_state) Py_CLEAR(exc_state->exc_value); } -PyAPI_FUNC(PyObject*) _PyErr_StackItemToExcInfoTuple( +extern PyObject* _PyErr_StackItemToExcInfoTuple( _PyErr_StackItem *err_info); -PyAPI_FUNC(void) _PyErr_Fetch( +extern void _PyErr_Fetch( PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **traceback); -extern PyObject * -_PyErr_GetRaisedException(PyThreadState *tstate); +extern PyObject* _PyErr_GetRaisedException(PyThreadState *tstate); -PyAPI_FUNC(int) _PyErr_ExceptionMatches( +extern int _PyErr_ExceptionMatches( PyThreadState *tstate, PyObject *exc); -void -_PyErr_SetRaisedException(PyThreadState *tstate, PyObject *exc); +extern void _PyErr_SetRaisedException(PyThreadState *tstate, PyObject *exc); -PyAPI_FUNC(void) _PyErr_Restore( +extern void _PyErr_Restore( PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *traceback); -PyAPI_FUNC(void) _PyErr_SetObject( +extern void _PyErr_SetObject( PyThreadState *tstate, PyObject *type, PyObject *value); -PyAPI_FUNC(void) _PyErr_ChainStackItem(void); +extern void _PyErr_ChainStackItem(void); -PyAPI_FUNC(void) _PyErr_Clear(PyThreadState *tstate); +extern void _PyErr_Clear(PyThreadState *tstate); -PyAPI_FUNC(void) _PyErr_SetNone(PyThreadState *tstate, PyObject *exception); +extern void _PyErr_SetNone(PyThreadState *tstate, PyObject *exception); -PyAPI_FUNC(PyObject *) _PyErr_NoMemory(PyThreadState *tstate); +extern PyObject* _PyErr_NoMemory(PyThreadState *tstate); -PyAPI_FUNC(void) _PyErr_SetString( +extern void _PyErr_SetString( PyThreadState *tstate, PyObject *exception, const char *string); -PyAPI_FUNC(PyObject *) _PyErr_Format( +extern PyObject* _PyErr_Format( PyThreadState *tstate, PyObject *exception, const char *format, ...); -PyAPI_FUNC(void) _PyErr_NormalizeException( +extern void _PyErr_NormalizeException( PyThreadState *tstate, PyObject **exc, PyObject **val, PyObject **tb); -PyAPI_FUNC(PyObject *) _PyErr_FormatFromCauseTstate( +extern PyObject* _PyErr_FormatFromCauseTstate( PyThreadState *tstate, PyObject *exception, const char *format, ...); -PyAPI_FUNC(PyObject *) _PyExc_CreateExceptionGroup( +extern PyObject* _PyExc_CreateExceptionGroup( const char *msg, PyObject *excs); -PyAPI_FUNC(PyObject *) _PyExc_PrepReraiseStar( +extern PyObject* _PyExc_PrepReraiseStar( PyObject *orig, PyObject *excs); -PyAPI_FUNC(int) _PyErr_CheckSignalsTstate(PyThreadState *tstate); +extern int _PyErr_CheckSignalsTstate(PyThreadState *tstate); -PyAPI_FUNC(void) _Py_DumpExtensionModules(int fd, PyInterpreterState *interp); +extern void _Py_DumpExtensionModules(int fd, PyInterpreterState *interp); extern PyObject* _Py_Offer_Suggestions(PyObject* exception); +// Export for '_testinternalcapi' shared extension PyAPI_FUNC(Py_ssize_t) _Py_UTF8_Edit_Cost(PyObject *str_a, PyObject *str_b, Py_ssize_t max_cost); diff --git a/Include/internal/pycore_pyhash.h b/Include/internal/pycore_pyhash.h index 34dfa53771288e..da9abd28b499a2 100644 --- a/Include/internal/pycore_pyhash.h +++ b/Include/internal/pycore_pyhash.h @@ -1,10 +1,86 @@ -#ifndef Py_INTERNAL_HASH_H -#define Py_INTERNAL_HASH_H +#ifndef Py_INTERNAL_PYHASH_H +#define Py_INTERNAL_PYHASH_H #ifndef Py_BUILD_CORE # error "this header requires Py_BUILD_CORE define" #endif +/* Helpers for hash functions */ +extern Py_hash_t _Py_HashDouble(PyObject *, double); +// _decimal shared extensions uses _Py_HashPointer() +PyAPI_FUNC(Py_hash_t) _Py_HashPointer(const void*); +// Similar to _Py_HashPointer(), but don't replace -1 with -2 +extern Py_hash_t _Py_HashPointerRaw(const void*); +// _datetime shared extension uses _Py_HashBytes() +PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void*, Py_ssize_t); + +/* Prime multiplier used in string and various other hashes. */ +#define _PyHASH_MULTIPLIER 1000003UL /* 0xf4243 */ + +/* Parameters used for the numeric hash implementation. See notes for + _Py_HashDouble in Python/pyhash.c. Numeric hashes are based on + reduction modulo the prime 2**_PyHASH_BITS - 1. */ + +#if SIZEOF_VOID_P >= 8 +# define _PyHASH_BITS 61 +#else +# define _PyHASH_BITS 31 +#endif + +#define _PyHASH_MODULUS (((size_t)1 << _PyHASH_BITS) - 1) +#define _PyHASH_INF 314159 +#define _PyHASH_IMAG _PyHASH_MULTIPLIER + +/* Hash secret + * + * memory layout on 64 bit systems + * cccccccc cccccccc cccccccc uc -- unsigned char[24] + * pppppppp ssssssss ........ fnv -- two Py_hash_t + * k0k0k0k0 k1k1k1k1 ........ siphash -- two uint64_t + * ........ ........ ssssssss djbx33a -- 16 bytes padding + one Py_hash_t + * ........ ........ eeeeeeee pyexpat XML hash salt + * + * memory layout on 32 bit systems + * cccccccc cccccccc cccccccc uc + * ppppssss ........ ........ fnv -- two Py_hash_t + * k0k0k0k0 k1k1k1k1 ........ siphash -- two uint64_t (*) + * ........ ........ ssss.... djbx33a -- 16 bytes padding + one Py_hash_t + * ........ ........ eeee.... pyexpat XML hash salt + * + * (*) The siphash member may not be available on 32 bit platforms without + * an unsigned int64 data type. + */ +typedef union { + /* ensure 24 bytes */ + unsigned char uc[24]; + /* two Py_hash_t for FNV */ + struct { + Py_hash_t prefix; + Py_hash_t suffix; + } fnv; + /* two uint64 for SipHash24 */ + struct { + uint64_t k0; + uint64_t k1; + } siphash; + /* a different (!) Py_hash_t for small string optimization */ + struct { + unsigned char padding[16]; + Py_hash_t suffix; + } djbx33a; + struct { + unsigned char padding[16]; + Py_hash_t hashsalt; + } expat; +} _Py_HashSecret_t; + +// Export for '_elementtree' shared extension +PyAPI_DATA(_Py_HashSecret_t) _Py_HashSecret; + +#ifdef Py_DEBUG +extern int _Py_HashSecret_Initialized; +#endif + struct pyhash_runtime_state { struct { @@ -34,7 +110,6 @@ struct pyhash_runtime_state { } -uint64_t _Py_KeyedHash(uint64_t, const char *, Py_ssize_t); - +extern uint64_t _Py_KeyedHash(uint64_t key, const void *src, Py_ssize_t src_sz); -#endif // Py_INTERNAL_HASH_H +#endif // !Py_INTERNAL_PYHASH_H diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index fb28652515909d..b4d5b1f1239e1d 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -103,6 +103,7 @@ PyAPI_FUNC(int) _Py_IsInterpreterFinalizing(PyInterpreterState *interp); /* Random */ extern int _PyOS_URandom(void *buffer, Py_ssize_t size); +// Export for '_random' shared extension PyAPI_FUNC(int) _PyOS_URandomNonblock(void *buffer, Py_ssize_t size); /* Legacy locale support */ diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index 81a707a0a5ddf3..6b5113714dbeb2 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -8,8 +8,20 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif -#include "pymem.h" // PyMemAllocatorName +// Try to get the allocators name set by _PyMem_SetupAllocators(). +// Return NULL if unknown. +// Export for '_testinternalcapi' shared extension. +PyAPI_FUNC(const char*) _PyMem_GetCurrentAllocatorName(void); +// strdup() using PyMem_RawMalloc() +extern char* _PyMem_RawStrdup(const char *str); + +// strdup() using PyMem_Malloc(). +// Export for '_pickle ' shared extension. +PyAPI_FUNC(char*) _PyMem_Strdup(const char *str); + +// wcsdup() using PyMem_RawMalloc() +extern wchar_t* _PyMem_RawWcsdup(const wchar_t *str); typedef struct { /* We tag each block with an API ID in order to tag API violations */ @@ -36,7 +48,7 @@ struct _pymem_allocators { /* Set the memory allocator of the specified domain to the default. Save the old allocator into *old_alloc if it's non-NULL. Return on success, or return -1 if the domain is unknown. */ -PyAPI_FUNC(int) _PyMem_SetDefaultAllocator( +extern int _PyMem_SetDefaultAllocator( PyMemAllocatorDomain domain, PyMemAllocatorEx *old_alloc); @@ -82,17 +94,17 @@ static inline int _PyMem_IsPtrFreed(const void *ptr) #endif } -PyAPI_FUNC(int) _PyMem_GetAllocatorName( +extern int _PyMem_GetAllocatorName( const char *name, PyMemAllocatorName *allocator); /* Configure the Python memory allocators. Pass PYMEM_ALLOCATOR_DEFAULT to use default allocators. PYMEM_ALLOCATOR_NOT_SET does nothing. */ -PyAPI_FUNC(int) _PyMem_SetupAllocators(PyMemAllocatorName allocator); +extern int _PyMem_SetupAllocators(PyMemAllocatorName allocator); #ifdef __cplusplus } #endif -#endif /* !Py_INTERNAL_PYMEM_H */ +#endif // !Py_INTERNAL_PYMEM_H diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 0659084194d293..acc6cf953343fb 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -66,7 +66,7 @@ _Py_ThreadCanHandleSignals(PyInterpreterState *interp) #if defined(HAVE_THREAD_LOCAL) && !defined(Py_BUILD_CORE_MODULE) extern _Py_thread_local PyThreadState *_Py_tss_tstate; #endif -PyAPI_DATA(PyThreadState *) _PyThreadState_GetCurrent(void); +PyAPI_FUNC(PyThreadState *) _PyThreadState_GetCurrent(void); /* Get the current Python thread state. @@ -121,15 +121,11 @@ static inline PyInterpreterState* _PyInterpreterState_GET(void) { // PyThreadState functions -PyAPI_FUNC(PyThreadState *) _PyThreadState_New(PyInterpreterState *interp); -PyAPI_FUNC(void) _PyThreadState_Bind(PyThreadState *tstate); -PyAPI_FUNC(void) _PyThreadState_DeleteExcept(PyThreadState *tstate); - -extern void _PyThreadState_InitDetached(PyThreadState *, PyInterpreterState *); -extern void _PyThreadState_ClearDetached(PyThreadState *); -extern void _PyThreadState_BindDetached(PyThreadState *); -extern void _PyThreadState_UnbindDetached(PyThreadState *); +extern PyThreadState * _PyThreadState_New(PyInterpreterState *interp); +extern void _PyThreadState_Bind(PyThreadState *tstate); +extern void _PyThreadState_DeleteExcept(PyThreadState *tstate); +// Export for '_testinternalcapi' shared extension PyAPI_FUNC(PyObject*) _PyThreadState_GetDict(PyThreadState *tstate); /* The implementation of sys._current_frames() Returns a dict mapping @@ -145,25 +141,25 @@ extern PyObject* _PyThread_CurrentExceptions(void); /* Other */ -PyAPI_FUNC(PyThreadState *) _PyThreadState_Swap( +extern PyThreadState * _PyThreadState_Swap( _PyRuntimeState *runtime, PyThreadState *newts); -PyAPI_FUNC(PyStatus) _PyInterpreterState_Enable(_PyRuntimeState *runtime); +extern PyStatus _PyInterpreterState_Enable(_PyRuntimeState *runtime); #ifdef HAVE_FORK extern PyStatus _PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime); extern void _PySignal_AfterFork(void); #endif - +// Export for the stable ABI PyAPI_FUNC(int) _PyState_AddModule( PyThreadState *tstate, PyObject* module, PyModuleDef* def); -PyAPI_FUNC(int) _PyOS_InterruptOccurred(PyThreadState *tstate); +extern int _PyOS_InterruptOccurred(PyThreadState *tstate); #define HEAD_LOCK(runtime) \ PyThread_acquire_lock((runtime)->interpreters.mutex, WAIT_LOCK) @@ -172,6 +168,7 @@ PyAPI_FUNC(int) _PyOS_InterruptOccurred(PyThreadState *tstate); // Get the configuration of the current interpreter. // The caller must hold the GIL. +// Export for test_peg_generator. PyAPI_FUNC(const PyConfig*) _Py_GetConfig(void); diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index a16d4202b616db..0ec86ee6c50ca3 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -249,6 +249,7 @@ typedef struct pyruntimestate { struct _types_runtime_state types; /* All the objects that are shared by the runtime's interpreters. */ + struct _Py_cached_objects cached_objects; struct _Py_static_objects static_objects; /* The following fields are here to avoid allocation during init. @@ -274,8 +275,8 @@ typedef struct pyruntimestate { PyAPI_DATA(_PyRuntimeState) _PyRuntime; -PyAPI_FUNC(PyStatus) _PyRuntimeState_Init(_PyRuntimeState *runtime); -PyAPI_FUNC(void) _PyRuntimeState_Fini(_PyRuntimeState *runtime); +extern PyStatus _PyRuntimeState_Init(_PyRuntimeState *runtime); +extern void _PyRuntimeState_Fini(_PyRuntimeState *runtime); #ifdef HAVE_FORK extern PyStatus _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime); @@ -283,9 +284,9 @@ extern PyStatus _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime); /* Initialize _PyRuntimeState. Return NULL on success, or return an error message on failure. */ -PyAPI_FUNC(PyStatus) _PyRuntime_Initialize(void); +extern PyStatus _PyRuntime_Initialize(void); -PyAPI_FUNC(void) _PyRuntime_Finalize(void); +extern void _PyRuntime_Finalize(void); static inline PyThreadState* diff --git a/Include/internal/pycore_runtime_init.h b/Include/internal/pycore_runtime_init.h index e72e7422c7207e..e89d368be07aa7 100644 --- a/Include/internal/pycore_runtime_init.h +++ b/Include/internal/pycore_runtime_init.h @@ -97,11 +97,6 @@ extern PyTypeObject _PyExc_MemoryError; in accordance with the specification. */ \ .autoTSSkey = Py_tss_NEEDS_INIT, \ .parser = _parser_runtime_state_INIT, \ - .imports = { \ - .extensions = { \ - .main_tstate = _PyThreadState_INIT, \ - }, \ - }, \ .ceval = { \ .perf = _PyEval_RUNTIME_PERF_INIT, \ }, \ @@ -162,6 +157,7 @@ extern PyTypeObject _PyExc_MemoryError; { .threshold = 10, }, \ }, \ }, \ + .object_state = _py_object_state_INIT(INTERP), \ .dtoa = _dtoa_state_INIT(&(INTERP)), \ .dict_state = _dict_state_INIT, \ .func_state = { \ @@ -191,6 +187,16 @@ extern PyTypeObject _PyExc_MemoryError; .context_ver = 1, \ } +#ifdef Py_TRACE_REFS +# define _py_object_state_INIT(INTERP) \ + { \ + .refchain = {&INTERP.object_state.refchain, &INTERP.object_state.refchain}, \ + } +#else +# define _py_object_state_INIT(INTERP) \ + { 0 } +#endif + // global objects @@ -214,6 +220,7 @@ extern PyTypeObject _PyExc_MemoryError; .kind = 1, \ .compact = 1, \ .ascii = (ASCII), \ + .statically_allocated = 1, \ }, \ } #define _PyASCIIObject_INIT(LITERAL) \ diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h index 2d66647438b193..8c9c7f753d8579 100644 --- a/Include/internal/pycore_runtime_init_generated.h +++ b/Include/internal/pycore_runtime_init_generated.h @@ -539,6 +539,7 @@ extern "C" { INIT_STR(anon_lambda, ""), \ INIT_STR(anon_listcomp, ""), \ INIT_STR(anon_module, ""), \ + INIT_STR(anon_null, ""), \ INIT_STR(anon_setcomp, ""), \ INIT_STR(anon_string, ""), \ INIT_STR(anon_unknown, ""), \ diff --git a/Include/internal/pycore_setobject.h b/Include/internal/pycore_setobject.h new file mode 100644 index 00000000000000..1b63479e774412 --- /dev/null +++ b/Include/internal/pycore_setobject.h @@ -0,0 +1,27 @@ +#ifndef Py_INTERNAL_SETOBJECT_H +#define Py_INTERNAL_SETOBJECT_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +// Export for 'pickle' shared extension +PyAPI_FUNC(int) _PySet_NextEntry( + PyObject *set, + Py_ssize_t *pos, + PyObject **key, + Py_hash_t *hash); + +// Export for 'pickle' shared extension +PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable); + +// Export _PySet_Dummy for the gdb plugin's benefit +PyAPI_DATA(PyObject *) _PySet_Dummy; + +#ifdef __cplusplus +} +#endif +#endif // !Py_INTERNAL_SETOBJECT_H diff --git a/Include/internal/pycore_signal.h b/Include/internal/pycore_signal.h index 1a454ba6f4e8fb..46b57d556e5ef4 100644 --- a/Include/internal/pycore_signal.h +++ b/Include/internal/pycore_signal.h @@ -14,7 +14,8 @@ extern "C" { #include // NSIG -/* Restore signals that the interpreter has called SIG_IGN on to SIG_DFL. */ +// Restore signals that the interpreter has called SIG_IGN on to SIG_DFL. +// Export for '_posixsubprocess' shared extension. PyAPI_FUNC(void) _Py_RestoreSignals(void); #ifdef _SIG_MAXSIG diff --git a/Include/internal/pycore_structseq.h b/Include/internal/pycore_structseq.h index 6f5dfc12707cf8..5cff165627502b 100644 --- a/Include/internal/pycore_structseq.h +++ b/Include/internal/pycore_structseq.h @@ -11,7 +11,8 @@ extern "C" { /* other API */ -PyAPI_FUNC(PyTypeObject *) _PyStructSequence_NewType( +// Export for '_curses' shared extension +PyAPI_FUNC(PyTypeObject*) _PyStructSequence_NewType( PyStructSequence_Desc *desc, unsigned long tp_flags); diff --git a/Include/internal/pycore_symtable.h b/Include/internal/pycore_symtable.h index c8e0578a231756..1d782ca2c96e05 100644 --- a/Include/internal/pycore_symtable.h +++ b/Include/internal/pycore_symtable.h @@ -101,7 +101,7 @@ extern struct symtable* _PySymtable_Build( struct _mod *mod, PyObject *filename, PyFutureFeatures *future); -PyAPI_FUNC(PySTEntryObject *) PySymtable_Lookup(struct symtable *, void *); +extern PySTEntryObject* _PySymtable_Lookup(struct symtable *, void *); extern void _PySymtable_Free(struct symtable *); diff --git a/Include/internal/pycore_sysmodule.h b/Include/internal/pycore_sysmodule.h index b4b1febafa4479..89a2f7628645b9 100644 --- a/Include/internal/pycore_sysmodule.h +++ b/Include/internal/pycore_sysmodule.h @@ -8,7 +8,7 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif -PyAPI_FUNC(int) _PySys_Audit( +extern int _PySys_Audit( PyThreadState *tstate, const char *event, const char *argFormat, @@ -18,7 +18,7 @@ PyAPI_FUNC(int) _PySys_Audit( PyAPI_FUNC() to not export the symbol. */ extern void _PySys_ClearAuditHooks(PyThreadState *tstate); -PyAPI_FUNC(int) _PySys_SetAttr(PyObject *, PyObject *); +extern int _PySys_SetAttr(PyObject *, PyObject *); extern int _PySys_ClearAttrString(PyInterpreterState *interp, const char *name, int verbose); diff --git a/Include/internal/pycore_time.h b/Include/internal/pycore_time.h index 3d394e8d36a132..318fe4b3a32239 100644 --- a/Include/internal/pycore_time.h +++ b/Include/internal/pycore_time.h @@ -324,10 +324,12 @@ extern int _PyTime_GetPerfCounterWithInfo( // Create a deadline. // Pseudo code: _PyTime_GetMonotonicClock() + timeout. +// Export for '_ssl' shared extension. PyAPI_FUNC(_PyTime_t) _PyDeadline_Init(_PyTime_t timeout); // Get remaining time from a deadline. // Pseudo code: deadline - _PyTime_GetMonotonicClock(). +// Export for '_ssl' shared extension. PyAPI_FUNC(_PyTime_t) _PyDeadline_Get(_PyTime_t deadline); diff --git a/Include/internal/pycore_token.h b/Include/internal/pycore_token.h index c02e637fee1ee2..9c65cd802d597c 100644 --- a/Include/internal/pycore_token.h +++ b/Include/internal/pycore_token.h @@ -69,18 +69,16 @@ extern "C" { #define COLONEQUAL 53 #define EXCLAMATION 54 #define OP 55 -#define AWAIT 56 -#define ASYNC 57 -#define TYPE_IGNORE 58 -#define TYPE_COMMENT 59 -#define SOFT_KEYWORD 60 -#define FSTRING_START 61 -#define FSTRING_MIDDLE 62 -#define FSTRING_END 63 -#define COMMENT 64 -#define NL 65 -#define ERRORTOKEN 66 -#define N_TOKENS 68 +#define TYPE_IGNORE 56 +#define TYPE_COMMENT 57 +#define SOFT_KEYWORD 58 +#define FSTRING_START 59 +#define FSTRING_MIDDLE 60 +#define FSTRING_END 61 +#define COMMENT 62 +#define NL 63 +#define ERRORTOKEN 64 +#define N_TOKENS 66 #define NT_OFFSET 256 /* Special definitions for cooperation with parser */ diff --git a/Include/internal/pycore_traceback.h b/Include/internal/pycore_traceback.h index c393b2c136f2de..21fb4a25a0face 100644 --- a/Include/internal/pycore_traceback.h +++ b/Include/internal/pycore_traceback.h @@ -25,7 +25,7 @@ extern "C" { This function is signal safe. */ -PyAPI_FUNC(void) _Py_DumpTraceback( +extern void _Py_DumpTraceback( int fd, PyThreadState *tstate); @@ -52,7 +52,7 @@ PyAPI_FUNC(void) _Py_DumpTraceback( This function is signal safe. */ -PyAPI_FUNC(const char*) _Py_DumpTracebackThreads( +extern const char* _Py_DumpTracebackThreads( int fd, PyInterpreterState *interp, PyThreadState *current_tstate); @@ -64,23 +64,23 @@ PyAPI_FUNC(const char*) _Py_DumpTracebackThreads( string which is not ready (PyUnicode_WCHAR_KIND). This function is signal safe. */ -PyAPI_FUNC(void) _Py_DumpASCII(int fd, PyObject *text); +extern void _Py_DumpASCII(int fd, PyObject *text); /* Format an integer as decimal into the file descriptor fd. This function is signal safe. */ -PyAPI_FUNC(void) _Py_DumpDecimal( +extern void _Py_DumpDecimal( int fd, size_t value); /* Format an integer as hexadecimal with width digits into fd file descriptor. The function is signal safe. */ -PyAPI_FUNC(void) _Py_DumpHexadecimal( +extern void _Py_DumpHexadecimal( int fd, uintptr_t value, Py_ssize_t width); -PyAPI_FUNC(PyObject*) _PyTraceBack_FromFrame( +extern PyObject* _PyTraceBack_FromFrame( PyObject *tb_next, PyFrameObject *frame); @@ -89,11 +89,11 @@ PyAPI_FUNC(PyObject*) _PyTraceBack_FromFrame( /* Write the traceback tb to file f. Prefix each line with indent spaces followed by the margin (if it is not NULL). */ -PyAPI_FUNC(int) _PyTraceBack_Print_Indented( +extern int _PyTraceBack_Print_Indented( PyObject *tb, int indent, const char* margin, const char *header_margin, const char *header, PyObject *f); -PyAPI_FUNC(int) _Py_WriteIndentedMargin(int, const char*, PyObject *); -PyAPI_FUNC(int) _Py_WriteIndent(int, PyObject *); +extern int _Py_WriteIndentedMargin(int, const char*, PyObject *); +extern int _Py_WriteIndent(int, PyObject *); #ifdef __cplusplus } diff --git a/Include/internal/pycore_tracemalloc.h b/Include/internal/pycore_tracemalloc.h index cfc4d1fe43999e..7ddc5bac5d10af 100644 --- a/Include/internal/pycore_tracemalloc.h +++ b/Include/internal/pycore_tracemalloc.h @@ -117,14 +117,16 @@ struct _tracemalloc_runtime_state { } -/* Get the traceback where a memory block was allocated. - - Return a tuple of (filename: str, lineno: int) tuples. - - Return None if the tracemalloc module is disabled or if the memory block - is not tracked by tracemalloc. - - Raise an exception and return NULL on error. */ +// Get the traceback where a memory block was allocated. +// +// Return a tuple of (filename: str, lineno: int) tuples. +// +// Return None if the tracemalloc module is disabled or if the memory block +// is not tracked by tracemalloc. +// +// Raise an exception and return NULL on error. +// +// Export for '_testinternalcapi' shared extension. PyAPI_FUNC(PyObject*) _PyTraceMalloc_GetTraceback( unsigned int domain, uintptr_t ptr); diff --git a/Include/internal/pycore_tuple.h b/Include/internal/pycore_tuple.h index 335edad89792c3..4fa7a12206bcb2 100644 --- a/Include/internal/pycore_tuple.h +++ b/Include/internal/pycore_tuple.h @@ -8,8 +8,8 @@ extern "C" { # error "this header requires Py_BUILD_CORE define" #endif -#include "tupleobject.h" /* _PyTuple_CAST() */ - +extern void _PyTuple_MaybeUntrack(PyObject *); +extern void _PyTuple_DebugMallocStats(FILE *out); /* runtime lifecycle */ diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h index 8f3fbbcdb5ffcd..aba672effe3928 100644 --- a/Include/internal/pycore_typeobject.h +++ b/Include/internal/pycore_typeobject.h @@ -114,6 +114,7 @@ extern static_builtin_state * _PyStaticType_GetState(PyInterpreterState *, PyTyp extern void _PyStaticType_ClearWeakRefs(PyInterpreterState *, PyTypeObject *type); extern void _PyStaticType_Dealloc(PyInterpreterState *, PyTypeObject *); +// Export for 'math' shared extension via _PyType_IsReady() function PyAPI_FUNC(PyObject *) _PyType_GetDict(PyTypeObject *); extern PyObject * _PyType_GetBases(PyTypeObject *type); extern PyObject * _PyType_GetMRO(PyTypeObject *type); @@ -128,18 +129,17 @@ _PyType_IsReady(PyTypeObject *type) return _PyType_GetDict(type) != NULL; } -PyObject * -_Py_type_getattro_impl(PyTypeObject *type, PyObject *name, int *suppress_missing_attribute); -PyObject * -_Py_type_getattro(PyTypeObject *type, PyObject *name); +extern PyObject* _Py_type_getattro_impl(PyTypeObject *type, PyObject *name, + int *suppress_missing_attribute); +extern PyObject* _Py_type_getattro(PyTypeObject *type, PyObject *name); -PyObject *_Py_slot_tp_getattro(PyObject *self, PyObject *name); -PyObject *_Py_slot_tp_getattr_hook(PyObject *self, PyObject *name); +extern PyObject* _Py_slot_tp_getattro(PyObject *self, PyObject *name); +extern PyObject* _Py_slot_tp_getattr_hook(PyObject *self, PyObject *name); -PyAPI_DATA(PyTypeObject) _PyBufferWrapper_Type; +extern PyTypeObject _PyBufferWrapper_Type; -PyObject * -_PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *meth_found); +extern PyObject* _PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, + PyObject *name, int *meth_found); #ifdef __cplusplus } diff --git a/Include/internal/pycore_unicodeobject.h b/Include/internal/pycore_unicodeobject.h index ad59c3e385f2d3..8ec80ddb83d10f 100644 --- a/Include/internal/pycore_unicodeobject.h +++ b/Include/internal/pycore_unicodeobject.h @@ -11,6 +11,19 @@ extern "C" { #include "pycore_fileutils.h" // _Py_error_handler #include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI +/* --- Characters Type APIs ----------------------------------------------- */ + +extern int _PyUnicode_IsXidStart(Py_UCS4 ch); +extern int _PyUnicode_IsXidContinue(Py_UCS4 ch); +extern int _PyUnicode_ToLowerFull(Py_UCS4 ch, Py_UCS4 *res); +extern int _PyUnicode_ToTitleFull(Py_UCS4 ch, Py_UCS4 *res); +extern int _PyUnicode_ToUpperFull(Py_UCS4 ch, Py_UCS4 *res); +extern int _PyUnicode_ToFoldedFull(Py_UCS4 ch, Py_UCS4 *res); +extern int _PyUnicode_IsCaseIgnorable(Py_UCS4 ch); +extern int _PyUnicode_IsCased(Py_UCS4 ch); + +/* --- Unicode API -------------------------------------------------------- */ + PyAPI_FUNC(int) _PyUnicode_CheckConsistency( PyObject *op, int check_content); @@ -174,7 +187,7 @@ _PyUnicodeWriter_Dealloc(_PyUnicodeWriter *writer); /* Format the object based on the format_spec, as defined in PEP 3101 (Advanced String Formatting). */ -PyAPI_FUNC(int) _PyUnicode_FormatAdvancedWriter( +extern int _PyUnicode_FormatAdvancedWriter( _PyUnicodeWriter *writer, PyObject *obj, PyObject *format_spec, @@ -232,8 +245,9 @@ extern PyObject* _PyUnicode_DecodeUnicodeEscapeStateful( const char *errors, /* error handling */ Py_ssize_t *consumed); /* bytes consumed */ -/* Helper for PyUnicode_DecodeUnicodeEscape that detects invalid escape - chars. */ +// Helper for PyUnicode_DecodeUnicodeEscape that detects invalid escape +// chars. +// Export for test_peg_generator. PyAPI_FUNC(PyObject*) _PyUnicode_DecodeUnicodeEscapeInternal( const char *string, /* Unicode-Escape encoded string */ Py_ssize_t length, /* size of string */ @@ -356,6 +370,7 @@ PyAPI_FUNC(int) _PyUnicode_Equal(PyObject *, PyObject *); extern int _PyUnicode_WideCharString_Converter(PyObject *, void *); extern int _PyUnicode_WideCharString_Opt_Converter(PyObject *, void *); +// Export for test_peg_generator PyAPI_FUNC(Py_ssize_t) _PyUnicode_ScanIdentifier(PyObject *); /* --- Runtime lifecycle -------------------------------------------------- */ diff --git a/Include/internal/pycore_uops.h b/Include/internal/pycore_uops.h index edb141cc79f752..57a5970353b360 100644 --- a/Include/internal/pycore_uops.h +++ b/Include/internal/pycore_uops.h @@ -18,7 +18,7 @@ typedef struct { typedef struct { _PyExecutorObject base; - _PyUOpInstruction trace[_Py_UOP_MAX_TRACE_LENGTH]; // TODO: variable length + _PyUOpInstruction trace[1]; } _PyUOpExecutorObject; _PyInterpreterFrame *_PyUopExecute( diff --git a/Include/internal/pycore_warnings.h b/Include/internal/pycore_warnings.h index 452d6b96ce4f1c..9785d7cc467de2 100644 --- a/Include/internal/pycore_warnings.h +++ b/Include/internal/pycore_warnings.h @@ -19,7 +19,7 @@ struct _warnings_runtime_state { extern int _PyWarnings_InitState(PyInterpreterState *interp); -PyAPI_FUNC(PyObject*) _PyWarnings_Init(void); +extern PyObject* _PyWarnings_Init(void); extern void _PyErr_WarnUnawaitedCoroutine(PyObject *coro); extern void _PyErr_WarnUnawaitedAgenMethod(PyAsyncGenObject *agen, PyObject *method); diff --git a/Include/modsupport.h b/Include/modsupport.h index 51061c5bc8090a..88577e027b5275 100644 --- a/Include/modsupport.h +++ b/Include/modsupport.h @@ -22,10 +22,12 @@ PyAPI_FUNC(int) PyArg_UnpackTuple(PyObject *, const char *, Py_ssize_t, Py_ssize PyAPI_FUNC(PyObject *) Py_BuildValue(const char *, ...); PyAPI_FUNC(PyObject *) Py_VaBuildValue(const char *, va_list); +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030a0000 // Add an attribute with name 'name' and value 'obj' to the module 'mod. // On success, return 0. // On error, raise an exception and return -1. PyAPI_FUNC(int) PyModule_AddObjectRef(PyObject *mod, const char *name, PyObject *value); +#endif /* Py_LIMITED_API */ #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030d0000 // Similar to PyModule_AddObjectRef() but steal a reference to 'value'. diff --git a/Include/moduleobject.h b/Include/moduleobject.h index b8bdfe29d80406..ea08145381cee6 100644 --- a/Include/moduleobject.h +++ b/Include/moduleobject.h @@ -27,11 +27,6 @@ PyAPI_FUNC(PyObject *) PyModule_GetNameObject(PyObject *); PyAPI_FUNC(const char *) PyModule_GetName(PyObject *); Py_DEPRECATED(3.2) PyAPI_FUNC(const char *) PyModule_GetFilename(PyObject *); PyAPI_FUNC(PyObject *) PyModule_GetFilenameObject(PyObject *); -#ifndef Py_LIMITED_API -PyAPI_FUNC(void) _PyModule_Clear(PyObject *); -PyAPI_FUNC(void) _PyModule_ClearDict(PyObject *); -PyAPI_FUNC(int) _PyModuleSpec_IsInitializing(PyObject *); -#endif PyAPI_FUNC(PyModuleDef*) PyModule_GetDef(PyObject*); PyAPI_FUNC(void*) PyModule_GetState(PyObject*); @@ -103,12 +98,6 @@ struct PyModuleDef { freefunc m_free; }; - -// Internal C API -#ifdef Py_BUILD_CORE -extern int _PyModule_IsExtension(PyObject *obj); -#endif - #ifdef __cplusplus } #endif diff --git a/Include/object.h b/Include/object.h index 7f2e4e90615e7b..e26cedf8ca3c97 100644 --- a/Include/object.h +++ b/Include/object.h @@ -165,12 +165,28 @@ check by comparing the reference count field to the immortality reference count. */ struct _object { _PyObject_HEAD_EXTRA + +#if (defined(__GNUC__) || defined(__clang__)) \ + && !(defined __STDC_VERSION__ && __STDC_VERSION__ >= 201112L) + // On C99 and older, anonymous union is a GCC and clang extension + __extension__ +#endif +#ifdef _MSC_VER + // Ignore MSC warning C4201: "nonstandard extension used: + // nameless struct/union" + __pragma(warning(push)) + __pragma(warning(disable: 4201)) +#endif union { Py_ssize_t ob_refcnt; #if SIZEOF_VOID_P > 4 PY_UINT32_T ob_refcnt_split[2]; #endif }; +#ifdef _MSC_VER + __pragma(warning(pop)) +#endif + PyTypeObject *ob_type; }; @@ -594,10 +610,8 @@ you can count such references to the type object.) #if defined(Py_REF_DEBUG) && !defined(Py_LIMITED_API) PyAPI_FUNC(void) _Py_NegativeRefcount(const char *filename, int lineno, PyObject *op); -PyAPI_FUNC(void) _Py_IncRefTotal_DO_NOT_USE_THIS(void); -PyAPI_FUNC(void) _Py_DecRefTotal_DO_NOT_USE_THIS(void); -# define _Py_INC_REFTOTAL() _Py_IncRefTotal_DO_NOT_USE_THIS() -# define _Py_DEC_REFTOTAL() _Py_DecRefTotal_DO_NOT_USE_THIS() +PyAPI_FUNC(void) _Py_INCREF_IncRefTotal(void); +PyAPI_FUNC(void) _Py_DECREF_DecRefTotal(void); #endif // Py_REF_DEBUG && !Py_LIMITED_API PyAPI_FUNC(void) _Py_Dealloc(PyObject *); @@ -646,7 +660,7 @@ static inline Py_ALWAYS_INLINE void Py_INCREF(PyObject *op) #endif _Py_INCREF_STAT_INC(); #ifdef Py_REF_DEBUG - _Py_INC_REFTOTAL(); + _Py_INCREF_IncRefTotal(); #endif #endif } @@ -675,7 +689,7 @@ static inline void Py_DECREF(const char *filename, int lineno, PyObject *op) return; } _Py_DECREF_STAT_INC(); - _Py_DEC_REFTOTAL(); + _Py_DECREF_DecRefTotal(); if (--op->ob_refcnt != 0) { if (op->ob_refcnt < 0) { _Py_NegativeRefcount(filename, lineno, op); @@ -703,9 +717,6 @@ static inline Py_ALWAYS_INLINE void Py_DECREF(PyObject *op) #define Py_DECREF(op) Py_DECREF(_PyObject_CAST(op)) #endif -#undef _Py_INC_REFTOTAL -#undef _Py_DEC_REFTOTAL - /* Safely decref `op` and set `op` to NULL, especially useful in tp_clear * and tp_dealloc implementations. diff --git a/Include/opcode.h b/Include/opcode.h index 697520937d9055..e5c42d5a718286 100644 --- a/Include/opcode.h +++ b/Include/opcode.h @@ -6,228 +6,8 @@ extern "C" { #endif +#include "opcode_ids.h" -/* Instruction opcodes for compiled code */ -#define CACHE 0 -#define POP_TOP 1 -#define PUSH_NULL 2 -#define INTERPRETER_EXIT 3 -#define END_FOR 4 -#define END_SEND 5 -#define TO_BOOL 6 -#define NOP 9 -#define UNARY_NEGATIVE 11 -#define UNARY_NOT 12 -#define UNARY_INVERT 15 -#define EXIT_INIT_CHECK 16 -#define RESERVED 17 -#define MAKE_FUNCTION 24 -#define BINARY_SUBSCR 25 -#define BINARY_SLICE 26 -#define STORE_SLICE 27 -#define GET_LEN 30 -#define MATCH_MAPPING 31 -#define MATCH_SEQUENCE 32 -#define MATCH_KEYS 33 -#define PUSH_EXC_INFO 35 -#define CHECK_EXC_MATCH 36 -#define CHECK_EG_MATCH 37 -#define FORMAT_SIMPLE 40 -#define FORMAT_WITH_SPEC 41 -#define WITH_EXCEPT_START 49 -#define GET_AITER 50 -#define GET_ANEXT 51 -#define BEFORE_ASYNC_WITH 52 -#define BEFORE_WITH 53 -#define END_ASYNC_FOR 54 -#define CLEANUP_THROW 55 -#define STORE_SUBSCR 60 -#define DELETE_SUBSCR 61 -#define GET_ITER 68 -#define GET_YIELD_FROM_ITER 69 -#define LOAD_BUILD_CLASS 71 -#define LOAD_ASSERTION_ERROR 74 -#define RETURN_GENERATOR 75 -#define RETURN_VALUE 83 -#define SETUP_ANNOTATIONS 85 -#define LOAD_LOCALS 87 -#define POP_EXCEPT 89 -#define STORE_NAME 90 -#define DELETE_NAME 91 -#define UNPACK_SEQUENCE 92 -#define FOR_ITER 93 -#define UNPACK_EX 94 -#define STORE_ATTR 95 -#define DELETE_ATTR 96 -#define STORE_GLOBAL 97 -#define DELETE_GLOBAL 98 -#define SWAP 99 -#define LOAD_CONST 100 -#define LOAD_NAME 101 -#define BUILD_TUPLE 102 -#define BUILD_LIST 103 -#define BUILD_SET 104 -#define BUILD_MAP 105 -#define LOAD_ATTR 106 -#define COMPARE_OP 107 -#define IMPORT_NAME 108 -#define IMPORT_FROM 109 -#define JUMP_FORWARD 110 -#define POP_JUMP_IF_FALSE 114 -#define POP_JUMP_IF_TRUE 115 -#define LOAD_GLOBAL 116 -#define IS_OP 117 -#define CONTAINS_OP 118 -#define RERAISE 119 -#define COPY 120 -#define RETURN_CONST 121 -#define BINARY_OP 122 -#define SEND 123 -#define LOAD_FAST 124 -#define STORE_FAST 125 -#define DELETE_FAST 126 -#define LOAD_FAST_CHECK 127 -#define POP_JUMP_IF_NOT_NONE 128 -#define POP_JUMP_IF_NONE 129 -#define RAISE_VARARGS 130 -#define GET_AWAITABLE 131 -#define BUILD_SLICE 133 -#define JUMP_BACKWARD_NO_INTERRUPT 134 -#define MAKE_CELL 135 -#define LOAD_DEREF 137 -#define STORE_DEREF 138 -#define DELETE_DEREF 139 -#define JUMP_BACKWARD 140 -#define LOAD_SUPER_ATTR 141 -#define CALL_FUNCTION_EX 142 -#define LOAD_FAST_AND_CLEAR 143 -#define EXTENDED_ARG 144 -#define LIST_APPEND 145 -#define SET_ADD 146 -#define MAP_ADD 147 -#define COPY_FREE_VARS 149 -#define YIELD_VALUE 150 -#define RESUME 151 -#define MATCH_CLASS 152 -#define BUILD_CONST_KEY_MAP 156 -#define BUILD_STRING 157 -#define CONVERT_VALUE 158 -#define LIST_EXTEND 162 -#define SET_UPDATE 163 -#define DICT_MERGE 164 -#define DICT_UPDATE 165 -#define LOAD_FAST_LOAD_FAST 168 -#define STORE_FAST_LOAD_FAST 169 -#define STORE_FAST_STORE_FAST 170 -#define CALL 171 -#define KW_NAMES 172 -#define CALL_INTRINSIC_1 173 -#define CALL_INTRINSIC_2 174 -#define LOAD_FROM_DICT_OR_GLOBALS 175 -#define LOAD_FROM_DICT_OR_DEREF 176 -#define SET_FUNCTION_ATTRIBUTE 177 -#define ENTER_EXECUTOR 230 -#define MIN_INSTRUMENTED_OPCODE 237 -#define INSTRUMENTED_LOAD_SUPER_ATTR 237 -#define INSTRUMENTED_POP_JUMP_IF_NONE 238 -#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 239 -#define INSTRUMENTED_RESUME 240 -#define INSTRUMENTED_CALL 241 -#define INSTRUMENTED_RETURN_VALUE 242 -#define INSTRUMENTED_YIELD_VALUE 243 -#define INSTRUMENTED_CALL_FUNCTION_EX 244 -#define INSTRUMENTED_JUMP_FORWARD 245 -#define INSTRUMENTED_JUMP_BACKWARD 246 -#define INSTRUMENTED_RETURN_CONST 247 -#define INSTRUMENTED_FOR_ITER 248 -#define INSTRUMENTED_POP_JUMP_IF_FALSE 249 -#define INSTRUMENTED_POP_JUMP_IF_TRUE 250 -#define INSTRUMENTED_END_FOR 251 -#define INSTRUMENTED_END_SEND 252 -#define INSTRUMENTED_INSTRUCTION 253 -#define INSTRUMENTED_LINE 254 -#define MIN_PSEUDO_OPCODE 256 -#define SETUP_FINALLY 256 -#define SETUP_CLEANUP 257 -#define SETUP_WITH 258 -#define POP_BLOCK 259 -#define JUMP 260 -#define JUMP_NO_INTERRUPT 261 -#define LOAD_METHOD 262 -#define LOAD_SUPER_METHOD 263 -#define LOAD_ZERO_SUPER_METHOD 264 -#define LOAD_ZERO_SUPER_ATTR 265 -#define STORE_FAST_MAYBE_NULL 266 -#define LOAD_CLOSURE 267 -#define MAX_PSEUDO_OPCODE 267 -#define TO_BOOL_ALWAYS_TRUE 7 -#define TO_BOOL_BOOL 8 -#define TO_BOOL_INT 10 -#define TO_BOOL_LIST 13 -#define TO_BOOL_NONE 14 -#define TO_BOOL_STR 18 -#define BINARY_OP_MULTIPLY_INT 19 -#define BINARY_OP_ADD_INT 20 -#define BINARY_OP_SUBTRACT_INT 21 -#define BINARY_OP_MULTIPLY_FLOAT 22 -#define BINARY_OP_ADD_FLOAT 23 -#define BINARY_OP_SUBTRACT_FLOAT 28 -#define BINARY_OP_ADD_UNICODE 29 -#define BINARY_OP_INPLACE_ADD_UNICODE 34 -#define BINARY_SUBSCR_DICT 38 -#define BINARY_SUBSCR_GETITEM 39 -#define BINARY_SUBSCR_LIST_INT 42 -#define BINARY_SUBSCR_TUPLE_INT 43 -#define STORE_SUBSCR_DICT 44 -#define STORE_SUBSCR_LIST_INT 45 -#define SEND_GEN 46 -#define UNPACK_SEQUENCE_TWO_TUPLE 47 -#define UNPACK_SEQUENCE_TUPLE 48 -#define UNPACK_SEQUENCE_LIST 56 -#define STORE_ATTR_INSTANCE_VALUE 57 -#define STORE_ATTR_SLOT 58 -#define STORE_ATTR_WITH_HINT 59 -#define LOAD_GLOBAL_MODULE 62 -#define LOAD_GLOBAL_BUILTIN 63 -#define LOAD_SUPER_ATTR_ATTR 64 -#define LOAD_SUPER_ATTR_METHOD 65 -#define LOAD_ATTR_INSTANCE_VALUE 66 -#define LOAD_ATTR_MODULE 67 -#define LOAD_ATTR_WITH_HINT 70 -#define LOAD_ATTR_SLOT 72 -#define LOAD_ATTR_CLASS 73 -#define LOAD_ATTR_PROPERTY 76 -#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 77 -#define LOAD_ATTR_METHOD_WITH_VALUES 78 -#define LOAD_ATTR_METHOD_NO_DICT 79 -#define LOAD_ATTR_METHOD_LAZY_DICT 80 -#define LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 81 -#define LOAD_ATTR_NONDESCRIPTOR_NO_DICT 82 -#define COMPARE_OP_FLOAT 84 -#define COMPARE_OP_INT 86 -#define COMPARE_OP_STR 88 -#define FOR_ITER_LIST 111 -#define FOR_ITER_TUPLE 112 -#define FOR_ITER_RANGE 113 -#define FOR_ITER_GEN 132 -#define CALL_BOUND_METHOD_EXACT_ARGS 136 -#define CALL_PY_EXACT_ARGS 148 -#define CALL_PY_WITH_DEFAULTS 153 -#define CALL_NO_KW_TYPE_1 154 -#define CALL_NO_KW_STR_1 155 -#define CALL_NO_KW_TUPLE_1 159 -#define CALL_BUILTIN_CLASS 160 -#define CALL_NO_KW_BUILTIN_O 161 -#define CALL_NO_KW_BUILTIN_FAST 166 -#define CALL_BUILTIN_FAST_WITH_KEYWORDS 167 -#define CALL_NO_KW_LEN 178 -#define CALL_NO_KW_ISINSTANCE 179 -#define CALL_NO_KW_LIST_APPEND 180 -#define CALL_NO_KW_METHOD_DESCRIPTOR_O 181 -#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 182 -#define CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS 183 -#define CALL_NO_KW_METHOD_DESCRIPTOR_FAST 184 -#define CALL_NO_KW_ALLOC_AND_ENTER_INIT 185 #define NB_ADD 0 #define NB_AND 1 @@ -256,8 +36,6 @@ extern "C" { #define NB_INPLACE_TRUE_DIVIDE 24 #define NB_INPLACE_XOR 25 -/* Defined in Lib/opcode.py */ -#define ENABLE_SPECIALIZATION 1 #ifdef __cplusplus } diff --git a/Include/opcode_ids.h b/Include/opcode_ids.h new file mode 100644 index 00000000000000..2d9d24cca4542f --- /dev/null +++ b/Include/opcode_ids.h @@ -0,0 +1,235 @@ +// Auto-generated by Tools/build/generate_opcode_h.py from Lib/opcode.py + +#ifndef Py_OPCODE_IDS_H +#define Py_OPCODE_IDS_H +#ifdef __cplusplus +extern "C" { +#endif + + +/* Instruction opcodes for compiled code */ +#define CACHE 0 +#define POP_TOP 1 +#define PUSH_NULL 2 +#define INTERPRETER_EXIT 3 +#define END_FOR 4 +#define END_SEND 5 +#define TO_BOOL 6 +#define NOP 9 +#define UNARY_NEGATIVE 11 +#define UNARY_NOT 12 +#define UNARY_INVERT 15 +#define EXIT_INIT_CHECK 16 +#define RESERVED 17 +#define MAKE_FUNCTION 24 +#define BINARY_SUBSCR 25 +#define BINARY_SLICE 26 +#define STORE_SLICE 27 +#define GET_LEN 30 +#define MATCH_MAPPING 31 +#define MATCH_SEQUENCE 32 +#define MATCH_KEYS 33 +#define PUSH_EXC_INFO 35 +#define CHECK_EXC_MATCH 36 +#define CHECK_EG_MATCH 37 +#define FORMAT_SIMPLE 40 +#define FORMAT_WITH_SPEC 41 +#define WITH_EXCEPT_START 49 +#define GET_AITER 50 +#define GET_ANEXT 51 +#define BEFORE_ASYNC_WITH 52 +#define BEFORE_WITH 53 +#define END_ASYNC_FOR 54 +#define CLEANUP_THROW 55 +#define STORE_SUBSCR 60 +#define DELETE_SUBSCR 61 +#define GET_ITER 68 +#define GET_YIELD_FROM_ITER 69 +#define LOAD_BUILD_CLASS 71 +#define LOAD_ASSERTION_ERROR 74 +#define RETURN_GENERATOR 75 +#define RETURN_VALUE 83 +#define SETUP_ANNOTATIONS 85 +#define LOAD_LOCALS 87 +#define POP_EXCEPT 89 +#define STORE_NAME 90 +#define DELETE_NAME 91 +#define UNPACK_SEQUENCE 92 +#define FOR_ITER 93 +#define UNPACK_EX 94 +#define STORE_ATTR 95 +#define DELETE_ATTR 96 +#define STORE_GLOBAL 97 +#define DELETE_GLOBAL 98 +#define SWAP 99 +#define LOAD_CONST 100 +#define LOAD_NAME 101 +#define BUILD_TUPLE 102 +#define BUILD_LIST 103 +#define BUILD_SET 104 +#define BUILD_MAP 105 +#define LOAD_ATTR 106 +#define COMPARE_OP 107 +#define IMPORT_NAME 108 +#define IMPORT_FROM 109 +#define JUMP_FORWARD 110 +#define POP_JUMP_IF_FALSE 114 +#define POP_JUMP_IF_TRUE 115 +#define LOAD_GLOBAL 116 +#define IS_OP 117 +#define CONTAINS_OP 118 +#define RERAISE 119 +#define COPY 120 +#define RETURN_CONST 121 +#define BINARY_OP 122 +#define SEND 123 +#define LOAD_FAST 124 +#define STORE_FAST 125 +#define DELETE_FAST 126 +#define LOAD_FAST_CHECK 127 +#define POP_JUMP_IF_NOT_NONE 128 +#define POP_JUMP_IF_NONE 129 +#define RAISE_VARARGS 130 +#define GET_AWAITABLE 131 +#define BUILD_SLICE 133 +#define JUMP_BACKWARD_NO_INTERRUPT 134 +#define MAKE_CELL 135 +#define LOAD_DEREF 137 +#define STORE_DEREF 138 +#define DELETE_DEREF 139 +#define JUMP_BACKWARD 140 +#define LOAD_SUPER_ATTR 141 +#define CALL_FUNCTION_EX 142 +#define LOAD_FAST_AND_CLEAR 143 +#define EXTENDED_ARG 144 +#define LIST_APPEND 145 +#define SET_ADD 146 +#define MAP_ADD 147 +#define COPY_FREE_VARS 149 +#define YIELD_VALUE 150 +#define RESUME 151 +#define MATCH_CLASS 152 +#define BUILD_CONST_KEY_MAP 156 +#define BUILD_STRING 157 +#define CONVERT_VALUE 158 +#define LIST_EXTEND 162 +#define SET_UPDATE 163 +#define DICT_MERGE 164 +#define DICT_UPDATE 165 +#define LOAD_FAST_LOAD_FAST 168 +#define STORE_FAST_LOAD_FAST 169 +#define STORE_FAST_STORE_FAST 170 +#define CALL 171 +#define KW_NAMES 172 +#define CALL_INTRINSIC_1 173 +#define CALL_INTRINSIC_2 174 +#define LOAD_FROM_DICT_OR_GLOBALS 175 +#define LOAD_FROM_DICT_OR_DEREF 176 +#define SET_FUNCTION_ATTRIBUTE 177 +#define ENTER_EXECUTOR 230 +#define MIN_INSTRUMENTED_OPCODE 237 +#define INSTRUMENTED_LOAD_SUPER_ATTR 237 +#define INSTRUMENTED_POP_JUMP_IF_NONE 238 +#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 239 +#define INSTRUMENTED_RESUME 240 +#define INSTRUMENTED_CALL 241 +#define INSTRUMENTED_RETURN_VALUE 242 +#define INSTRUMENTED_YIELD_VALUE 243 +#define INSTRUMENTED_CALL_FUNCTION_EX 244 +#define INSTRUMENTED_JUMP_FORWARD 245 +#define INSTRUMENTED_JUMP_BACKWARD 246 +#define INSTRUMENTED_RETURN_CONST 247 +#define INSTRUMENTED_FOR_ITER 248 +#define INSTRUMENTED_POP_JUMP_IF_FALSE 249 +#define INSTRUMENTED_POP_JUMP_IF_TRUE 250 +#define INSTRUMENTED_END_FOR 251 +#define INSTRUMENTED_END_SEND 252 +#define INSTRUMENTED_INSTRUCTION 253 +#define INSTRUMENTED_LINE 254 +#define SETUP_FINALLY 256 +#define SETUP_CLEANUP 257 +#define SETUP_WITH 258 +#define POP_BLOCK 259 +#define JUMP 260 +#define JUMP_NO_INTERRUPT 261 +#define LOAD_METHOD 262 +#define LOAD_SUPER_METHOD 263 +#define LOAD_ZERO_SUPER_METHOD 264 +#define LOAD_ZERO_SUPER_ATTR 265 +#define STORE_FAST_MAYBE_NULL 266 +#define LOAD_CLOSURE 267 +#define TO_BOOL_ALWAYS_TRUE 7 +#define TO_BOOL_BOOL 8 +#define TO_BOOL_INT 10 +#define TO_BOOL_LIST 13 +#define TO_BOOL_NONE 14 +#define TO_BOOL_STR 18 +#define BINARY_OP_MULTIPLY_INT 19 +#define BINARY_OP_ADD_INT 20 +#define BINARY_OP_SUBTRACT_INT 21 +#define BINARY_OP_MULTIPLY_FLOAT 22 +#define BINARY_OP_ADD_FLOAT 23 +#define BINARY_OP_SUBTRACT_FLOAT 28 +#define BINARY_OP_ADD_UNICODE 29 +#define BINARY_OP_INPLACE_ADD_UNICODE 34 +#define BINARY_SUBSCR_DICT 38 +#define BINARY_SUBSCR_GETITEM 39 +#define BINARY_SUBSCR_LIST_INT 42 +#define BINARY_SUBSCR_STR_INT 43 +#define BINARY_SUBSCR_TUPLE_INT 44 +#define STORE_SUBSCR_DICT 45 +#define STORE_SUBSCR_LIST_INT 46 +#define SEND_GEN 47 +#define UNPACK_SEQUENCE_TWO_TUPLE 48 +#define UNPACK_SEQUENCE_TUPLE 56 +#define UNPACK_SEQUENCE_LIST 57 +#define STORE_ATTR_INSTANCE_VALUE 58 +#define STORE_ATTR_SLOT 59 +#define STORE_ATTR_WITH_HINT 62 +#define LOAD_GLOBAL_MODULE 63 +#define LOAD_GLOBAL_BUILTIN 64 +#define LOAD_SUPER_ATTR_ATTR 65 +#define LOAD_SUPER_ATTR_METHOD 66 +#define LOAD_ATTR_INSTANCE_VALUE 67 +#define LOAD_ATTR_MODULE 70 +#define LOAD_ATTR_WITH_HINT 72 +#define LOAD_ATTR_SLOT 73 +#define LOAD_ATTR_CLASS 76 +#define LOAD_ATTR_PROPERTY 77 +#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 78 +#define LOAD_ATTR_METHOD_WITH_VALUES 79 +#define LOAD_ATTR_METHOD_NO_DICT 80 +#define LOAD_ATTR_METHOD_LAZY_DICT 81 +#define LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 82 +#define LOAD_ATTR_NONDESCRIPTOR_NO_DICT 84 +#define COMPARE_OP_FLOAT 86 +#define COMPARE_OP_INT 88 +#define COMPARE_OP_STR 111 +#define FOR_ITER_LIST 112 +#define FOR_ITER_TUPLE 113 +#define FOR_ITER_RANGE 132 +#define FOR_ITER_GEN 136 +#define CALL_BOUND_METHOD_EXACT_ARGS 148 +#define CALL_PY_EXACT_ARGS 153 +#define CALL_PY_WITH_DEFAULTS 154 +#define CALL_NO_KW_TYPE_1 155 +#define CALL_NO_KW_STR_1 159 +#define CALL_NO_KW_TUPLE_1 160 +#define CALL_BUILTIN_CLASS 161 +#define CALL_NO_KW_BUILTIN_O 166 +#define CALL_NO_KW_BUILTIN_FAST 167 +#define CALL_BUILTIN_FAST_WITH_KEYWORDS 178 +#define CALL_NO_KW_LEN 179 +#define CALL_NO_KW_ISINSTANCE 180 +#define CALL_NO_KW_LIST_APPEND 181 +#define CALL_NO_KW_METHOD_DESCRIPTOR_O 182 +#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 183 +#define CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS 184 +#define CALL_NO_KW_METHOD_DESCRIPTOR_FAST 185 +#define CALL_NO_KW_ALLOC_AND_ENTER_INIT 186 + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_OPCODE_IDS_H */ diff --git a/Include/pyhash.h b/Include/pyhash.h index 182d223fab1cac..6e969f86fa2625 100644 --- a/Include/pyhash.h +++ b/Include/pyhash.h @@ -1,87 +1,10 @@ #ifndef Py_HASH_H - #define Py_HASH_H #ifdef __cplusplus extern "C" { #endif -/* Helpers for hash functions */ #ifndef Py_LIMITED_API -PyAPI_FUNC(Py_hash_t) _Py_HashDouble(PyObject *, double); -PyAPI_FUNC(Py_hash_t) _Py_HashPointer(const void*); -// Similar to _Py_HashPointer(), but don't replace -1 with -2 -PyAPI_FUNC(Py_hash_t) _Py_HashPointerRaw(const void*); -PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void*, Py_ssize_t); -#endif - -/* Prime multiplier used in string and various other hashes. */ -#define _PyHASH_MULTIPLIER 1000003UL /* 0xf4243 */ - -/* Parameters used for the numeric hash implementation. See notes for - _Py_HashDouble in Python/pyhash.c. Numeric hashes are based on - reduction modulo the prime 2**_PyHASH_BITS - 1. */ - -#if SIZEOF_VOID_P >= 8 -# define _PyHASH_BITS 61 -#else -# define _PyHASH_BITS 31 -#endif - -#define _PyHASH_MODULUS (((size_t)1 << _PyHASH_BITS) - 1) -#define _PyHASH_INF 314159 -#define _PyHASH_IMAG _PyHASH_MULTIPLIER - - -/* hash secret - * - * memory layout on 64 bit systems - * cccccccc cccccccc cccccccc uc -- unsigned char[24] - * pppppppp ssssssss ........ fnv -- two Py_hash_t - * k0k0k0k0 k1k1k1k1 ........ siphash -- two uint64_t - * ........ ........ ssssssss djbx33a -- 16 bytes padding + one Py_hash_t - * ........ ........ eeeeeeee pyexpat XML hash salt - * - * memory layout on 32 bit systems - * cccccccc cccccccc cccccccc uc - * ppppssss ........ ........ fnv -- two Py_hash_t - * k0k0k0k0 k1k1k1k1 ........ siphash -- two uint64_t (*) - * ........ ........ ssss.... djbx33a -- 16 bytes padding + one Py_hash_t - * ........ ........ eeee.... pyexpat XML hash salt - * - * (*) The siphash member may not be available on 32 bit platforms without - * an unsigned int64 data type. - */ -#ifndef Py_LIMITED_API -typedef union { - /* ensure 24 bytes */ - unsigned char uc[24]; - /* two Py_hash_t for FNV */ - struct { - Py_hash_t prefix; - Py_hash_t suffix; - } fnv; - /* two uint64 for SipHash24 */ - struct { - uint64_t k0; - uint64_t k1; - } siphash; - /* a different (!) Py_hash_t for small string optimization */ - struct { - unsigned char padding[16]; - Py_hash_t suffix; - } djbx33a; - struct { - unsigned char padding[16]; - Py_hash_t hashsalt; - } expat; -} _Py_HashSecret_t; -PyAPI_DATA(_Py_HashSecret_t) _Py_HashSecret; - -#ifdef Py_DEBUG -PyAPI_DATA(int) _Py_HashSecret_Initialized; -#endif - - /* hash function definition */ typedef struct { Py_hash_t (*const hash)(const void *, Py_ssize_t); @@ -94,7 +17,7 @@ PyAPI_FUNC(PyHash_FuncDef*) PyHash_GetFuncDef(void); #endif -/* cutoff for small string DJBX33A optimization in range [1, cutoff). +/* Cutoff for small string DJBX33A optimization in range [1, cutoff). * * About 50% of the strings in a typical Python application are smaller than * 6 to 7 chars. However DJBX33A is vulnerable to hash collision attacks. @@ -112,7 +35,7 @@ PyAPI_FUNC(PyHash_FuncDef*) PyHash_GetFuncDef(void); #endif /* Py_HASH_CUTOFF */ -/* hash algorithm selection +/* Hash algorithm selection * * The values for Py_HASH_* are hard-coded in the * configure script. @@ -140,5 +63,4 @@ PyAPI_FUNC(PyHash_FuncDef*) PyHash_GetFuncDef(void); #ifdef __cplusplus } #endif - -#endif /* !Py_HASH_H */ +#endif // !Py_HASH_H diff --git a/Include/pymacro.h b/Include/pymacro.h index 342d2a7b844adf..9d264fe6eea1d4 100644 --- a/Include/pymacro.h +++ b/Include/pymacro.h @@ -118,6 +118,15 @@ */ #if defined(__GNUC__) || defined(__clang__) # define Py_UNUSED(name) _unused_ ## name __attribute__((unused)) +#elif defined(_MSC_VER) + // Disable warning C4100: unreferenced formal parameter, + // declare the parameter, + // restore old compiler warnings. +# define Py_UNUSED(name) \ + __pragma(warning(push)) \ + __pragma(warning(suppress: 4100)) \ + _unused_ ## name \ + __pragma(warning(pop)) #else # define Py_UNUSED(name) _unused_ ## name #endif diff --git a/Include/pystats.h b/Include/pystats.h index 54c9b8d8b3538f..b1957596745f00 100644 --- a/Include/pystats.h +++ b/Include/pystats.h @@ -65,6 +65,7 @@ typedef struct _object_stats { uint64_t dict_materialized_new_key; uint64_t dict_materialized_too_big; uint64_t dict_materialized_str_subclass; + uint64_t dict_dematerialized; uint64_t type_cache_hits; uint64_t type_cache_misses; uint64_t type_cache_dunder_hits; @@ -74,12 +75,21 @@ typedef struct _object_stats { uint64_t optimization_traces_created; uint64_t optimization_traces_executed; uint64_t optimization_uops_executed; + /* Temporary value used during GC */ + uint64_t object_visits; } ObjectStats; +typedef struct _gc_stats { + uint64_t collections; + uint64_t object_visits; + uint64_t objects_collected; +} GCStats; + typedef struct _stats { OpcodeStats opcode_stats[256]; CallStats call_stats; ObjectStats object_stats; + GCStats *gc_stats; } PyStats; diff --git a/Lib/_opcode_metadata.py b/Lib/_opcode_metadata.py index fd8ecdb5c980f3..17101d1d94757b 100644 --- a/Lib/_opcode_metadata.py +++ b/Lib/_opcode_metadata.py @@ -25,6 +25,7 @@ "BINARY_SUBSCR_DICT", "BINARY_SUBSCR_GETITEM", "BINARY_SUBSCR_LIST_INT", + "BINARY_SUBSCR_STR_INT", "BINARY_SUBSCR_TUPLE_INT", ], "STORE_SUBSCR": [ diff --git a/Lib/asyncio/selector_events.py b/Lib/asyncio/selector_events.py index f895750e3cf959..d521b4e2e255a9 100644 --- a/Lib/asyncio/selector_events.py +++ b/Lib/asyncio/selector_events.py @@ -274,9 +274,8 @@ def _ensure_fd_no_transport(self, fd): def _add_reader(self, fd, callback, *args): self._check_closed() handle = events.Handle(callback, args, self, None) - try: - key = self._selector.get_key(fd) - except KeyError: + key = self._selector.get_map().get(fd) + if key is None: self._selector.register(fd, selectors.EVENT_READ, (handle, None)) else: @@ -290,30 +289,27 @@ def _add_reader(self, fd, callback, *args): def _remove_reader(self, fd): if self.is_closed(): return False - try: - key = self._selector.get_key(fd) - except KeyError: + key = self._selector.get_map().get(fd) + if key is None: return False + mask, (reader, writer) = key.events, key.data + mask &= ~selectors.EVENT_READ + if not mask: + self._selector.unregister(fd) else: - mask, (reader, writer) = key.events, key.data - mask &= ~selectors.EVENT_READ - if not mask: - self._selector.unregister(fd) - else: - self._selector.modify(fd, mask, (None, writer)) + self._selector.modify(fd, mask, (None, writer)) - if reader is not None: - reader.cancel() - return True - else: - return False + if reader is not None: + reader.cancel() + return True + else: + return False def _add_writer(self, fd, callback, *args): self._check_closed() handle = events.Handle(callback, args, self, None) - try: - key = self._selector.get_key(fd) - except KeyError: + key = self._selector.get_map().get(fd) + if key is None: self._selector.register(fd, selectors.EVENT_WRITE, (None, handle)) else: @@ -328,24 +324,22 @@ def _remove_writer(self, fd): """Remove a writer callback.""" if self.is_closed(): return False - try: - key = self._selector.get_key(fd) - except KeyError: + key = self._selector.get_map().get(fd) + if key is None: return False + mask, (reader, writer) = key.events, key.data + # Remove both writer and connector. + mask &= ~selectors.EVENT_WRITE + if not mask: + self._selector.unregister(fd) else: - mask, (reader, writer) = key.events, key.data - # Remove both writer and connector. - mask &= ~selectors.EVENT_WRITE - if not mask: - self._selector.unregister(fd) - else: - self._selector.modify(fd, mask, (reader, None)) + self._selector.modify(fd, mask, (reader, None)) - if writer is not None: - writer.cancel() - return True - else: - return False + if writer is not None: + writer.cancel() + return True + else: + return False def add_reader(self, fd, callback, *args): """Add a reader callback.""" diff --git a/Lib/asyncio/streams.py b/Lib/asyncio/streams.py index bf15f517e50dce..b7ad365709b19e 100644 --- a/Lib/asyncio/streams.py +++ b/Lib/asyncio/streams.py @@ -5,6 +5,7 @@ import collections import socket import sys +import warnings import weakref if hasattr(socket, 'AF_UNIX'): @@ -392,6 +393,11 @@ async def start_tls(self, sslcontext, *, self._transport = new_transport protocol._replace_writer(self) + def __del__(self, warnings=warnings): + if not self._transport.is_closing(): + self.close() + warnings.warn(f"unclosed {self!r}", ResourceWarning) + class StreamReader: diff --git a/Lib/asyncio/unix_events.py b/Lib/asyncio/unix_events.py index 17fb4d5f7646ce..a2680865ed968f 100644 --- a/Lib/asyncio/unix_events.py +++ b/Lib/asyncio/unix_events.py @@ -394,6 +394,9 @@ def _sock_sendfile_native_impl(self, fut, registered_fd, sock, fileno, fut.set_result(total_sent) return + # On 32-bit architectures truncate to 1GiB to avoid OverflowError + blocksize = min(blocksize, sys.maxsize//2 + 1) + try: sent = os.sendfile(fd, fileno, offset, blocksize) except (BlockingIOError, InterruptedError): diff --git a/Lib/calendar.py b/Lib/calendar.py index ea56f12ccc41d0..e43ba4a078bcac 100644 --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -585,8 +585,6 @@ def __enter__(self): _locale.setlocale(_locale.LC_TIME, self.locale) def __exit__(self, *args): - if self.oldlocale is None: - return _locale.setlocale(_locale.LC_TIME, self.oldlocale) @@ -690,7 +688,7 @@ def timegm(tuple): return seconds -def main(args): +def main(args=None): import argparse parser = argparse.ArgumentParser() textgroup = parser.add_argument_group('text only arguments') @@ -747,7 +745,7 @@ def main(args): help="month number (1-12, text only)" ) - options = parser.parse_args(args[1:]) + options = parser.parse_args(args) if options.locale and not options.encoding: parser.error("if --locale is specified --encoding is required") @@ -756,6 +754,9 @@ def main(args): locale = options.locale, options.encoding if options.type == "html": + if options.month: + parser.error("incorrect number of arguments") + sys.exit(1) if options.locale: cal = LocaleHTMLCalendar(locale=locale) else: @@ -767,11 +768,8 @@ def main(args): write = sys.stdout.buffer.write if options.year is None: write(cal.formatyearpage(datetime.date.today().year, **optdict)) - elif options.month is None: - write(cal.formatyearpage(options.year, **optdict)) else: - parser.error("incorrect number of arguments") - sys.exit(1) + write(cal.formatyearpage(options.year, **optdict)) else: if options.locale: cal = LocaleTextCalendar(locale=locale) @@ -795,4 +793,4 @@ def main(args): if __name__ == "__main__": - main(sys.argv) + main() diff --git a/Lib/contextlib.py b/Lib/contextlib.py index 95947aceccc304..f82e7bca35735b 100644 --- a/Lib/contextlib.py +++ b/Lib/contextlib.py @@ -557,11 +557,12 @@ def __enter__(self): return self def __exit__(self, *exc_details): - received_exc = exc_details[0] is not None + exc = exc_details[1] + received_exc = exc is not None # We manipulate the exception state so it behaves as though # we were actually nesting multiple with statements - frame_exc = sys.exc_info()[1] + frame_exc = sys.exception() def _fix_exception_context(new_exc, old_exc): # Context may not be correct, so find the end of the chain while 1: @@ -584,24 +585,28 @@ def _fix_exception_context(new_exc, old_exc): is_sync, cb = self._exit_callbacks.pop() assert is_sync try: + if exc is None: + exc_details = None, None, None + else: + exc_details = type(exc), exc, exc.__traceback__ if cb(*exc_details): suppressed_exc = True pending_raise = False - exc_details = (None, None, None) - except: - new_exc_details = sys.exc_info() + exc = None + except BaseException as new_exc: # simulate the stack of exceptions by setting the context - _fix_exception_context(new_exc_details[1], exc_details[1]) + _fix_exception_context(new_exc, exc) pending_raise = True - exc_details = new_exc_details + exc = new_exc + if pending_raise: try: - # bare "raise exc_details[1]" replaces our carefully + # bare "raise exc" replaces our carefully # set-up context - fixed_ctx = exc_details[1].__context__ - raise exc_details[1] + fixed_ctx = exc.__context__ + raise exc except BaseException: - exc_details[1].__context__ = fixed_ctx + exc.__context__ = fixed_ctx raise return received_exc and suppressed_exc @@ -697,11 +702,12 @@ async def __aenter__(self): return self async def __aexit__(self, *exc_details): - received_exc = exc_details[0] is not None + exc = exc_details[1] + received_exc = exc is not None # We manipulate the exception state so it behaves as though # we were actually nesting multiple with statements - frame_exc = sys.exc_info()[1] + frame_exc = sys.exception() def _fix_exception_context(new_exc, old_exc): # Context may not be correct, so find the end of the chain while 1: @@ -723,6 +729,10 @@ def _fix_exception_context(new_exc, old_exc): while self._exit_callbacks: is_sync, cb = self._exit_callbacks.pop() try: + if exc is None: + exc_details = None, None, None + else: + exc_details = type(exc), exc, exc.__traceback__ if is_sync: cb_suppress = cb(*exc_details) else: @@ -731,21 +741,21 @@ def _fix_exception_context(new_exc, old_exc): if cb_suppress: suppressed_exc = True pending_raise = False - exc_details = (None, None, None) - except: - new_exc_details = sys.exc_info() + exc = None + except BaseException as new_exc: # simulate the stack of exceptions by setting the context - _fix_exception_context(new_exc_details[1], exc_details[1]) + _fix_exception_context(new_exc, exc) pending_raise = True - exc_details = new_exc_details + exc = new_exc + if pending_raise: try: - # bare "raise exc_details[1]" replaces our carefully + # bare "raise exc" replaces our carefully # set-up context - fixed_ctx = exc_details[1].__context__ - raise exc_details[1] + fixed_ctx = exc.__context__ + raise exc except BaseException: - exc_details[1].__context__ = fixed_ctx + exc.__context__ = fixed_ctx raise return received_exc and suppressed_exc diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py index e766a7b554afe1..21f3fa5c213f1f 100644 --- a/Lib/dataclasses.py +++ b/Lib/dataclasses.py @@ -575,15 +575,15 @@ def _init_fn(fields, std_fields, kw_only_fields, frozen, has_post_init, # message, and future-proofs us in case we build up the function # using ast. - seen_default = False + seen_default = None for f in std_fields: # Only consider the non-kw-only fields in the __init__ call. if f.init: if not (f.default is MISSING and f.default_factory is MISSING): - seen_default = True + seen_default = f elif seen_default: raise TypeError(f'non-default argument {f.name!r} ' - 'follows default argument') + f'follows default argument {seen_default.name!r}') locals = {f'__dataclass_type_{f.name}__': f.type for f in fields} locals.update({ @@ -1036,7 +1036,7 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, # Was this class defined with an explicit __hash__? Note that if # __eq__ is defined in this class, then python will automatically # set __hash__ to None. This is a heuristic, as it's possible - # that such a __hash__ == None was not auto-generated, but it + # that such a __hash__ == None was not auto-generated, but it's # close enough. class_hash = cls.__dict__.get('__hash__', MISSING) has_explicit_hash = not (class_hash is MISSING or diff --git a/Lib/dis.py b/Lib/dis.py index f7a31f2f96b99b..bf1a1e2ff7ac19 100644 --- a/Lib/dis.py +++ b/Lib/dis.py @@ -288,13 +288,16 @@ def show_code(co, *, file=None): _OPNAME_WIDTH = 20 _OPARG_WIDTH = 5 +def _get_cache_size(opname): + return _inline_cache_entries.get(opname, 0) + def _get_jump_target(op, arg, offset): """Gets the bytecode offset of the jump target if this is a jump instruction. Otherwise return None. """ deop = _deoptop(op) - caches = _inline_cache_entries[deop] + caches = _get_cache_size(_all_opname[deop]) if deop in hasjrel: if _is_backward_jump(deop): arg = -arg @@ -353,7 +356,7 @@ def cache_offset(self): @property def end_offset(self): """End index of the cache entries following the operation.""" - return self.cache_offset + _inline_cache_entries[self.opcode]*2 + return self.cache_offset + _get_cache_size(_all_opname[self.opcode])*2 @property def jump_target(self): @@ -430,7 +433,8 @@ def get_instructions(x, *, first_line=None, show_caches=False, adaptive=False): co.co_names, co.co_consts, linestarts, line_offset, co_positions=co.co_positions(), - show_caches=show_caches) + show_caches=show_caches, + original_code=co.co_code) def _get_const_value(op, arg, co_consts): """Helper to get the value of the const in a hasconst op. @@ -504,7 +508,7 @@ def _get_instructions_bytes(code, varname_from_oparg=None, names=None, co_consts=None, linestarts=None, line_offset=0, exception_entries=(), co_positions=None, - show_caches=False): + show_caches=False, original_code=None): """Iterate over the instructions in a bytecode string. Generates a sequence of Instruction namedtuples giving the details of each @@ -513,14 +517,18 @@ def _get_instructions_bytes(code, varname_from_oparg=None, arguments. """ + # Use the basic, unadaptive code for finding labels and actually walking the + # bytecode, since replacements like ENTER_EXECUTOR and INSTRUMENTED_* can + # mess that logic up pretty badly: + original_code = original_code or code co_positions = co_positions or iter(()) get_name = None if names is None else names.__getitem__ - labels = set(findlabels(code)) + labels = set(findlabels(original_code)) for start, end, target, _, _ in exception_entries: for i in range(start, end): labels.add(target) starts_line = None - for offset, start_offset, op, arg in _unpack_opargs(code): + for offset, start_offset, op, arg in _unpack_opargs(original_code): if linestarts is not None: starts_line = linestarts.get(offset, None) if starts_line is not None: @@ -530,7 +538,8 @@ def _get_instructions_bytes(code, varname_from_oparg=None, argrepr = '' positions = Positions(*next(co_positions, ())) deop = _deoptop(op) - caches = _inline_cache_entries[deop] + caches = _get_cache_size(_all_opname[deop]) + op = code[offset] if arg is not None: # Set argval to the dereferenced value of the argument when # available, and argrepr to the string representation of argval. @@ -543,15 +552,15 @@ def _get_instructions_bytes(code, varname_from_oparg=None, if deop == LOAD_GLOBAL: argval, argrepr = _get_name_info(arg//2, get_name) if (arg & 1) and argrepr: - argrepr = "NULL + " + argrepr + argrepr = f"{argrepr} + NULL" elif deop == LOAD_ATTR: argval, argrepr = _get_name_info(arg//2, get_name) if (arg & 1) and argrepr: - argrepr = "NULL|self + " + argrepr + argrepr = f"{argrepr} + NULL|self" elif deop == LOAD_SUPER_ATTR: argval, argrepr = _get_name_info(arg//4, get_name) if (arg & 1) and argrepr: - argrepr = "NULL|self + " + argrepr + argrepr = f"{argrepr} + NULL|self" else: argval, argrepr = _get_name_info(arg, get_name) elif deop in hasjabs: @@ -591,7 +600,6 @@ def _get_instructions_bytes(code, varname_from_oparg=None, yield Instruction(_all_opname[op], op, arg, argval, argrepr, offset, start_offset, starts_line, is_jump_target, positions) - caches = _inline_cache_entries[deop] if not caches: continue if not show_caches: @@ -622,7 +630,8 @@ def disassemble(co, lasti=-1, *, file=None, show_caches=False, adaptive=False): lasti, co._varname_from_oparg, co.co_names, co.co_consts, linestarts, file=file, exception_entries=exception_entries, - co_positions=co.co_positions(), show_caches=show_caches) + co_positions=co.co_positions(), show_caches=show_caches, + original_code=co.co_code) def _disassemble_recursive(co, *, file=None, depth=None, show_caches=False, adaptive=False): disassemble(co, file=file, show_caches=show_caches, adaptive=adaptive) @@ -640,7 +649,7 @@ def _disassemble_recursive(co, *, file=None, depth=None, show_caches=False, adap def _disassemble_bytes(code, lasti=-1, varname_from_oparg=None, names=None, co_consts=None, linestarts=None, *, file=None, line_offset=0, exception_entries=(), - co_positions=None, show_caches=False): + co_positions=None, show_caches=False, original_code=None): # Omit the line number column entirely if we have no line number info show_lineno = bool(linestarts) if show_lineno: @@ -661,7 +670,8 @@ def _disassemble_bytes(code, lasti=-1, varname_from_oparg=None, line_offset=line_offset, exception_entries=exception_entries, co_positions=co_positions, - show_caches=show_caches): + show_caches=show_caches, + original_code=original_code): new_source_line = (show_lineno and instr.starts_line is not None and instr.offset > 0) @@ -672,7 +682,7 @@ def _disassemble_bytes(code, lasti=-1, varname_from_oparg=None, else: # Each CACHE takes 2 bytes is_current_instr = instr.offset <= lasti \ - <= instr.offset + 2 * _inline_cache_entries[_deoptop(instr.opcode)] + <= instr.offset + 2 * _get_cache_size(_all_opname[_deoptop(instr.opcode)]) print(instr._disassemble(lineno_width, is_current_instr, offset_width), file=file) if exception_entries: @@ -705,7 +715,7 @@ def _unpack_opargs(code): continue op = code[i] deop = _deoptop(op) - caches = _inline_cache_entries[deop] + caches = _get_cache_size(_all_opname[deop]) if deop in hasarg: arg = code[i+1] | extended_arg extended_arg = (arg << 8) if deop == EXTENDED_ARG else 0 @@ -823,7 +833,8 @@ def __iter__(self): line_offset=self._line_offset, exception_entries=self.exception_entries, co_positions=co.co_positions(), - show_caches=self.show_caches) + show_caches=self.show_caches, + original_code=co.co_code) def __repr__(self): return "{}({!r})".format(self.__class__.__name__, @@ -859,7 +870,8 @@ def dis(self): lasti=offset, exception_entries=self.exception_entries, co_positions=co.co_positions(), - show_caches=self.show_caches) + show_caches=self.show_caches, + original_code=co.co_code) return output.getvalue() diff --git a/Lib/doctest.py b/Lib/doctest.py index 2776d74bf9b586..a63df46a112e64 100644 --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -1110,7 +1110,7 @@ def _find_lineno(self, obj, source_lines): if source_lines is None: return None pat = re.compile(r'^\s*class\s*%s\b' % - getattr(obj, '__name__', '-')) + re.escape(getattr(obj, '__name__', '-'))) for i, line in enumerate(source_lines): if pat.match(line): lineno = i diff --git a/Lib/email/feedparser.py b/Lib/email/feedparser.py index 53d71f50225152..06d6b4a3afcd07 100644 --- a/Lib/email/feedparser.py +++ b/Lib/email/feedparser.py @@ -189,7 +189,7 @@ def close(self): assert not self._msgstack # Look for final set of defects if root.get_content_maintype() == 'multipart' \ - and not root.is_multipart(): + and not root.is_multipart() and not self._headersonly: defect = errors.MultipartInvariantViolationDefect() self.policy.handle_defect(root, defect) return root diff --git a/Lib/email/utils.py b/Lib/email/utils.py index 11ad75e94e9345..81da5394ea1695 100644 --- a/Lib/email/utils.py +++ b/Lib/email/utils.py @@ -106,54 +106,12 @@ def formataddr(pair, charset='utf-8'): return address -def _pre_parse_validation(email_header_fields): - accepted_values = [] - for v in email_header_fields: - s = v.replace('\\(', '').replace('\\)', '') - if s.count('(') != s.count(')'): - v = "('', '')" - accepted_values.append(v) - - return accepted_values - - -def _post_parse_validation(parsed_email_header_tuples): - accepted_values = [] - # The parser would have parsed a correctly formatted domain-literal - # The existence of an [ after parsing indicates a parsing failure - for v in parsed_email_header_tuples: - if '[' in v[1]: - v = ('', '') - accepted_values.append(v) - - return accepted_values - def getaddresses(fieldvalues): - """Return a list of (REALNAME, EMAIL) or ('','') for each fieldvalue. - - When parsing fails for a fieldvalue, a 2-tuple of ('', '') is returned in - its place. - - If the resulting list of parsed address is not the same as the number of - fieldvalues in the input list a parsing error has occurred. A list - containing a single empty 2-tuple [('', '')] is returned in its place. - This is done to avoid invalid output. - """ - fieldvalues = [str(v) for v in fieldvalues] - fieldvalues = _pre_parse_validation(fieldvalues) - all = COMMASPACE.join(v for v in fieldvalues) + """Return a list of (REALNAME, EMAIL) for each fieldvalue.""" + all = COMMASPACE.join(str(v) for v in fieldvalues) a = _AddressList(all) - result = _post_parse_validation(a.addresslist) - - n = 0 - for v in fieldvalues: - n += v.count(',') + 1 - - if len(result) != n: - return [('', '')] - - return result + return a.addresslist def _format_timetuple_and_zone(timetuple, zone): @@ -254,18 +212,9 @@ def parseaddr(addr): Return a tuple of realname and email address, unless the parse fails, in which case return a 2-tuple of ('', ''). """ - if isinstance(addr, list): - addr = addr[0] - - if not isinstance(addr, str): - return ('', '') - - addr = _pre_parse_validation([addr])[0] - addrs = _post_parse_validation(_AddressList(addr).addresslist) - - if not addrs or len(addrs) > 1: - return ('', '') - + addrs = _AddressList(addr).addresslist + if not addrs: + return '', '' return addrs[0] diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 5f4f1d75b43e64..1fb1d505cfd0c5 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -10,7 +10,7 @@ __all__ = ["version", "bootstrap"] _PACKAGE_NAMES = ('pip',) -_PIP_VERSION = "23.1.2" +_PIP_VERSION = "23.2.1" _PROJECTS = [ ("pip", _PIP_VERSION, "py3"), ] diff --git a/Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl b/Lib/ensurepip/_bundled/pip-23.2.1-py3-none-any.whl similarity index 74% rename from Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl rename to Lib/ensurepip/_bundled/pip-23.2.1-py3-none-any.whl index 6a2515615ccda3..ba28ef02e265f0 100644 Binary files a/Lib/ensurepip/_bundled/pip-23.1.2-py3-none-any.whl and b/Lib/ensurepip/_bundled/pip-23.2.1-py3-none-any.whl differ diff --git a/Lib/functools.py b/Lib/functools.py index 8518450a8d499d..be44ccdae6b692 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -934,6 +934,9 @@ def __init__(self, func): self.dispatcher = singledispatch(func) self.func = func + import weakref # see comment in singledispatch function + self._method_cache = weakref.WeakKeyDictionary() + def register(self, cls, method=None): """generic_method.register(cls, func) -> func @@ -942,13 +945,27 @@ def register(self, cls, method=None): return self.dispatcher.register(cls, func=method) def __get__(self, obj, cls=None): + if self._method_cache is not None: + try: + _method = self._method_cache[obj] + except TypeError: + self._method_cache = None + except KeyError: + pass + else: + return _method + + dispatch = self.dispatcher.dispatch def _method(*args, **kwargs): - method = self.dispatcher.dispatch(args[0].__class__) - return method.__get__(obj, cls)(*args, **kwargs) + return dispatch(args[0].__class__).__get__(obj, cls)(*args, **kwargs) _method.__isabstractmethod__ = self.__isabstractmethod__ _method.register = self.register update_wrapper(_method, self.func) + + if self._method_cache is not None: + self._method_cache[obj] = _method + return _method @property diff --git a/Lib/gettext.py b/Lib/gettext.py index 6c5ec4e517f637..b72b15f82d4355 100644 --- a/Lib/gettext.py +++ b/Lib/gettext.py @@ -422,10 +422,12 @@ def gettext(self, message): missing = object() tmsg = self._catalog.get(message, missing) if tmsg is missing: - if self._fallback: - return self._fallback.gettext(message) - return message - return tmsg + tmsg = self._catalog.get((message, self.plural(1)), missing) + if tmsg is not missing: + return tmsg + if self._fallback: + return self._fallback.gettext(message) + return message def ngettext(self, msgid1, msgid2, n): try: @@ -444,10 +446,12 @@ def pgettext(self, context, message): missing = object() tmsg = self._catalog.get(ctxt_msg_id, missing) if tmsg is missing: - if self._fallback: - return self._fallback.pgettext(context, message) - return message - return tmsg + tmsg = self._catalog.get((ctxt_msg_id, self.plural(1)), missing) + if tmsg is not missing: + return tmsg + if self._fallback: + return self._fallback.pgettext(context, message) + return message def npgettext(self, context, msgid1, msgid2, n): ctxt_msg_id = self.CONTEXT % (context, msgid1) diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index 16a82bef2ba71f..5f0d659b1ed535 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -453,6 +453,7 @@ def _write_atomic(path, data, mode=0o666): # Python 3.13a1 3555 (generate specialized opcodes metadata from bytecodes.c) # Python 3.13a1 3556 (Convert LOAD_CLOSURE to a pseudo-op) # Python 3.13a1 3557 (Make the conversion to boolean in jumps explicit) +# Python 3.13a1 3558 (Reorder the stack items for CALL) # Python 3.14 will start with 3600 @@ -469,7 +470,7 @@ def _write_atomic(path, data, mode=0o666): # Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array # in PC/launcher.c must also be updated. -MAGIC_NUMBER = (3557).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = (3558).to_bytes(2, 'little') + b'\r\n' _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c diff --git a/Lib/inspect.py b/Lib/inspect.py index 675714dc8b3f70..c8211833dd0831 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -1078,7 +1078,8 @@ def get_lineno(self): # First, let's see if there are any method definitions for member in self.cls.__dict__.values(): - if isinstance(member, types.FunctionType): + if (isinstance(member, types.FunctionType) and + member.__module__ == self.cls.__module__): for lineno, end_lineno in self.lineno_found: if lineno <= member.__code__.co_firstlineno <= end_lineno: return lineno diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index af1d5c4800cce8..f5aba434fd4253 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1923,8 +1923,40 @@ def __init__(self, address): self._ip = self._ip_int_from_string(addr_str) + def _explode_shorthand_ip_string(self): + ipv4_mapped = self.ipv4_mapped + if ipv4_mapped is None: + long_form = super()._explode_shorthand_ip_string() + else: + prefix_len = 30 + raw_exploded_str = super()._explode_shorthand_ip_string() + long_form = "%s%s" % (raw_exploded_str[:prefix_len], str(ipv4_mapped)) + return long_form + + def _ipv4_mapped_ipv6_to_str(self): + """Return convenient text representation of IPv4-mapped IPv6 address + + See RFC 4291 2.5.5.2, 2.2 p.3 for details. + + Returns: + A string, 'x:x:x:x:x:x:d.d.d.d', where the 'x's are the hexadecimal values of + the six high-order 16-bit pieces of the address, and the 'd's are + the decimal values of the four low-order 8-bit pieces of the + address (standard IPv4 representation) as defined in RFC 4291 2.2 p.3. + + """ + ipv4_mapped = self.ipv4_mapped + if ipv4_mapped is None: + raise AddressValueError("Can not apply to non-IPv4-mapped IPv6 address %s" % str(self)) + high_order_bits = self._ip >> 32 + return "%s:%s" % (self._string_from_ip_int(high_order_bits), str(ipv4_mapped)) + def __str__(self): - ip_str = super().__str__() + ipv4_mapped = self.ipv4_mapped + if ipv4_mapped is None: + ip_str = super().__str__() + else: + ip_str = self._ipv4_mapped_ipv6_to_str() return ip_str + '%' + self._scope_id if self._scope_id else ip_str def __hash__(self): diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index 46e86cb87ecfcb..527fc5c631730a 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -916,8 +916,7 @@ def getHandlerNames(): """ Return all known handler names as an immutable set. """ - result = set(_handlers.keys()) - return frozenset(result) + return frozenset(_handlers) class Handler(Filterer): diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py index 9847104446eaf6..671cc9596b02dd 100644 --- a/Lib/logging/handlers.py +++ b/Lib/logging/handlers.py @@ -1399,7 +1399,7 @@ def flush(self): records to the target, if there is one. Override if you want different behaviour. - The record buffer is also cleared by this operation. + The record buffer is only cleared if a target has been set. """ self.acquire() try: diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py index b6534939b4d98b..273c22a7654f05 100644 --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -90,7 +90,10 @@ def dispatch(c, id, methodname, args=(), kwds={}): kind, result = c.recv() if kind == '#RETURN': return result - raise convert_to_error(kind, result) + try: + raise convert_to_error(kind, result) + finally: + del result # break reference cycle def convert_to_error(kind, result): if kind == '#ERROR': @@ -833,7 +836,10 @@ def _callmethod(self, methodname, args=(), kwds={}): conn = self._Client(token.address, authkey=self._authkey) dispatch(conn, None, 'decref', (token.id,)) return proxy - raise convert_to_error(kind, result) + try: + raise convert_to_error(kind, result) + finally: + del result # break reference cycle def _getvalue(self): ''' diff --git a/Lib/multiprocessing/resource_tracker.py b/Lib/multiprocessing/resource_tracker.py index ea369507297f86..3783c1ffc6e4a9 100644 --- a/Lib/multiprocessing/resource_tracker.py +++ b/Lib/multiprocessing/resource_tracker.py @@ -221,9 +221,10 @@ def main(fd): for rtype, rtype_cache in cache.items(): if rtype_cache: try: - warnings.warn('resource_tracker: There appear to be %d ' - 'leaked %s objects to clean up at shutdown' % - (len(rtype_cache), rtype)) + warnings.warn( + f'resource_tracker: There appear to be {len(rtype_cache)} ' + f'leaked {rtype} objects to clean up at shutdown: {rtype_cache}' + ) except Exception: pass for name in rtype_cache: diff --git a/Lib/opcode.py b/Lib/opcode.py index 08dfd2674dca78..5a9f8ddd0738db 100644 --- a/Lib/opcode.py +++ b/Lib/opcode.py @@ -19,26 +19,11 @@ cmp_op = ('<', '<=', '==', '!=', '>', '>=') - -ENABLE_SPECIALIZATION = True - -def is_pseudo(op): - return op >= MIN_PSEUDO_OPCODE and op <= MAX_PSEUDO_OPCODE - opmap = {} -# pseudo opcodes (used in the compiler) mapped to the values -# they can become in the actual code. -_pseudo_ops = {} - def def_op(name, op): opmap[name] = op -def pseudo_op(name, op, real_ops): - def_op(name, op) - _pseudo_ops[name] = real_ops - - # Instruction opcodes for compiled code # Blank lines correspond to available opcodes @@ -215,29 +200,27 @@ def pseudo_op(name, op, real_ops): # 255 is reserved -MIN_PSEUDO_OPCODE = 256 - -pseudo_op('SETUP_FINALLY', 256, ['NOP']) -pseudo_op('SETUP_CLEANUP', 257, ['NOP']) -pseudo_op('SETUP_WITH', 258, ['NOP']) -pseudo_op('POP_BLOCK', 259, ['NOP']) +# Pseudo ops are above 255: -pseudo_op('JUMP', 260, ['JUMP_FORWARD', 'JUMP_BACKWARD']) -pseudo_op('JUMP_NO_INTERRUPT', 261, ['JUMP_FORWARD', 'JUMP_BACKWARD_NO_INTERRUPT']) +def_op('SETUP_FINALLY', 256) +def_op('SETUP_CLEANUP', 257) +def_op('SETUP_WITH', 258) +def_op('POP_BLOCK', 259) -pseudo_op('LOAD_METHOD', 262, ['LOAD_ATTR']) -pseudo_op('LOAD_SUPER_METHOD', 263, ['LOAD_SUPER_ATTR']) -pseudo_op('LOAD_ZERO_SUPER_METHOD', 264, ['LOAD_SUPER_ATTR']) -pseudo_op('LOAD_ZERO_SUPER_ATTR', 265, ['LOAD_SUPER_ATTR']) +def_op('JUMP', 260) +def_op('JUMP_NO_INTERRUPT', 261) -pseudo_op('STORE_FAST_MAYBE_NULL', 266, ['STORE_FAST']) -pseudo_op('LOAD_CLOSURE', 267, ['LOAD_FAST']) +def_op('LOAD_METHOD', 262) +def_op('LOAD_SUPER_METHOD', 263) +def_op('LOAD_ZERO_SUPER_METHOD', 264) +def_op('LOAD_ZERO_SUPER_ATTR', 265) -MAX_PSEUDO_OPCODE = MIN_PSEUDO_OPCODE + len(_pseudo_ops) - 1 +def_op('STORE_FAST_MAYBE_NULL', 266) +def_op('LOAD_CLOSURE', 267) -del def_op, pseudo_op +del def_op -opname = ['<%r>' % (op,) for op in range(MAX_PSEUDO_OPCODE + 1)] +opname = ['<%r>' % (op,) for op in range(max(opmap.values()) + 1)] for op, i in opmap.items(): opname[i] = op @@ -257,6 +240,9 @@ def pseudo_op(name, op, real_ops): __all__.extend(["hasarg", "hasconst", "hasname", "hasjump", "hasjrel", "hasjabs", "hasfree", "haslocal", "hasexc"]) + _intrinsic_1_descs = _opcode.get_intrinsic1_descs() + _intrinsic_2_descs = _opcode.get_intrinsic2_descs() + hascompare = [opmap["COMPARE_OP"]] _nb_ops = [ @@ -288,29 +274,6 @@ def pseudo_op(name, op, real_ops): ("NB_INPLACE_XOR", "^="), ] -_intrinsic_1_descs = [ - "INTRINSIC_1_INVALID", - "INTRINSIC_PRINT", - "INTRINSIC_IMPORT_STAR", - "INTRINSIC_STOPITERATION_ERROR", - "INTRINSIC_ASYNC_GEN_WRAP", - "INTRINSIC_UNARY_POSITIVE", - "INTRINSIC_LIST_TO_TUPLE", - "INTRINSIC_TYPEVAR", - "INTRINSIC_PARAMSPEC", - "INTRINSIC_TYPEVARTUPLE", - "INTRINSIC_SUBSCRIPT_GENERIC", - "INTRINSIC_TYPEALIAS", -] - -_intrinsic_2_descs = [ - "INTRINSIC_2_INVALID", - "INTRINSIC_PREP_RERAISE_STAR", - "INTRINSIC_TYPEVAR_WITH_BOUND", - "INTRINSIC_TYPEVAR_WITH_CONSTRAINTS", - "INTRINSIC_SET_FUNCTION_TYPE_PARAMS", -] - _cache_format = { "LOAD_GLOBAL": { @@ -367,6 +330,6 @@ def pseudo_op(name, op, real_ops): }, } -_inline_cache_entries = [ - sum(_cache_format.get(opname[opcode], {}).values()) for opcode in range(256) -] +_inline_cache_entries = { + name : sum(value.values()) for (name, value) in _cache_format.items() +} diff --git a/Lib/pathlib.py b/Lib/pathlib.py index cdf9c02aa4de6e..b2f916e36e6e8f 100644 --- a/Lib/pathlib.py +++ b/Lib/pathlib.py @@ -643,10 +643,12 @@ def relative_to(self, other, /, *_deprecated, walk_up=False): for step, path in enumerate([other] + list(other.parents)): if self.is_relative_to(path): break + elif not walk_up: + raise ValueError(f"{str(self)!r} is not in the subpath of {str(other)!r}") + elif path.name == '..': + raise ValueError(f"'..' segment in {str(other)!r} cannot be walked") else: raise ValueError(f"{str(self)!r} and {str(other)!r} have different anchors") - if step and not walk_up: - raise ValueError(f"{str(self)!r} is not in the subpath of {str(other)!r}") parts = ['..'] * step + self._tail[len(path._tail):] return self.with_segments(*parts) @@ -1100,6 +1102,11 @@ def _glob(self, pattern, case_sensitive, follow_symlinks): pattern_parts.append('') if pattern_parts[-1] == '**': # GH-70303: '**' only matches directories. Add trailing slash. + warnings.warn( + "Pattern ending '**' will match files and directories in a " + "future Python release. Add a trailing slash to match only " + "directories and remove this warning.", + FutureWarning, 3) pattern_parts.append('') if case_sensitive is None: diff --git a/Lib/pydoc.py b/Lib/pydoc.py index 185f09e603df2e..c9a55799b39f0c 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -197,6 +197,24 @@ def splitdoc(doc): return lines[0], '\n'.join(lines[2:]) return '', '\n'.join(lines) +def _getargspec(object): + try: + signature = inspect.signature(object) + if signature: + return str(signature) + except (ValueError, TypeError): + argspec = getattr(object, '__text_signature__', None) + if argspec: + if argspec[:2] == '($': + argspec = '(' + argspec[2:] + if getattr(object, '__self__', None) is not None: + # Strip the bound argument. + m = re.match(r'\(\w+(?:(?=\))|,\s*(?:/(?:(?=\))|,\s*))?)', argspec) + if m: + argspec = '(' + argspec[m.end():] + return argspec + return None + def classname(object, modname): """Get a class name and qualify it with a module name if necessary.""" name = object.__name__ @@ -1003,14 +1021,9 @@ def spilldata(msg, attrs, predicate): title = title + '(%s)' % ', '.join(parents) decl = '' - try: - signature = inspect.signature(object) - except (ValueError, TypeError): - signature = None - if signature: - argspec = str(signature) - if argspec and argspec != '()': - decl = name + self.escape(argspec) + '\n\n' + argspec = _getargspec(object) + if argspec and argspec != '()': + decl = name + self.escape(argspec) + '\n\n' doc = getdoc(object) if decl: @@ -1063,18 +1076,13 @@ def docroutine(self, object, name=None, mod=None, anchor, name, reallink) argspec = None if inspect.isroutine(object): - try: - signature = inspect.signature(object) - except (ValueError, TypeError): - signature = None - if signature: - argspec = str(signature) - if realname == '': - title = '%s lambda ' % name - # XXX lambda's won't usually have func_annotations['return'] - # since the syntax doesn't support but it is possible. - # So removing parentheses isn't truly safe. - argspec = argspec[1:-1] # remove parentheses + argspec = _getargspec(object) + if argspec and realname == '': + title = '%s lambda ' % name + # XXX lambda's won't usually have func_annotations['return'] + # since the syntax doesn't support but it is possible. + # So removing parentheses isn't truly safe. + argspec = argspec[1:-1] # remove parentheses if not argspec: argspec = '(...)' @@ -1321,14 +1329,9 @@ def makename(c, m=object.__module__): contents = [] push = contents.append - try: - signature = inspect.signature(object) - except (ValueError, TypeError): - signature = None - if signature: - argspec = str(signature) - if argspec and argspec != '()': - push(name + argspec + '\n') + argspec = _getargspec(object) + if argspec and argspec != '()': + push(name + argspec + '\n') doc = getdoc(object) if doc: @@ -1492,18 +1495,13 @@ def docroutine(self, object, name=None, mod=None, cl=None): argspec = None if inspect.isroutine(object): - try: - signature = inspect.signature(object) - except (ValueError, TypeError): - signature = None - if signature: - argspec = str(signature) - if realname == '': - title = self.bold(name) + ' lambda ' - # XXX lambda's won't usually have func_annotations['return'] - # since the syntax doesn't support but it is possible. - # So removing parentheses isn't truly safe. - argspec = argspec[1:-1] # remove parentheses + argspec = _getargspec(object) + if argspec and realname == '': + title = self.bold(name) + ' lambda ' + # XXX lambda's won't usually have func_annotations['return'] + # since the syntax doesn't support but it is possible. + # So removing parentheses isn't truly safe. + argspec = argspec[1:-1] # remove parentheses if not argspec: argspec = '(...)' decl = asyncqualifier + title + argspec + note diff --git a/Lib/re/_compiler.py b/Lib/re/_compiler.py index d0a4c55caf6e41..f5fd160ba00435 100644 --- a/Lib/re/_compiler.py +++ b/Lib/re/_compiler.py @@ -100,6 +100,13 @@ def _compile(code, pattern, flags): emit(ANY_ALL) else: emit(ANY) + elif op is POSSESSIVE_REPEAT: + # gh-106052: Possessive quantifiers do not work when the + # subpattern contains backtracking, i.e. "(?:ab?c)*+". + # Implement it as equivalent greedy qualifier in atomic group. + p = [(MAX_REPEAT, av)] + p = [(ATOMIC_GROUP, p)] + _compile(code, p, flags) elif op in REPEATING_CODES: if _simple(av[2]): emit(REPEATING_CODES[op][2]) diff --git a/Lib/re/_parser.py b/Lib/re/_parser.py index 22d10ab6e31d37..d00b7e67d55958 100644 --- a/Lib/re/_parser.py +++ b/Lib/re/_parser.py @@ -773,8 +773,10 @@ def _parse(source, state, verbose, nested, first=False): source.tell() - start) if char == "=": subpatternappend((ASSERT, (dir, p))) - else: + elif p: subpatternappend((ASSERT_NOT, (dir, p))) + else: + subpatternappend((FAILURE, ())) continue elif char == "(": diff --git a/Lib/reprlib.py b/Lib/reprlib.py index a92b3e3dbb613a..840dd0e20132b1 100644 --- a/Lib/reprlib.py +++ b/Lib/reprlib.py @@ -29,6 +29,7 @@ def wrapper(self): wrapper.__name__ = getattr(user_function, '__name__') wrapper.__qualname__ = getattr(user_function, '__qualname__') wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + wrapper.__wrapped__ = user_function return wrapper return decorating_function diff --git a/Lib/selectors.py b/Lib/selectors.py index 13497a24097232..20367c9152f331 100644 --- a/Lib/selectors.py +++ b/Lib/selectors.py @@ -339,11 +339,8 @@ def __init__(self): def register(self, fileobj, events, data=None): key = super().register(fileobj, events, data) - poller_events = 0 - if events & EVENT_READ: - poller_events |= self._EVENT_READ - if events & EVENT_WRITE: - poller_events |= self._EVENT_WRITE + poller_events = ((events & EVENT_READ and self._EVENT_READ) + | (events & EVENT_WRITE and self._EVENT_WRITE) ) try: self._selector.register(key.fd, poller_events) except: @@ -369,11 +366,8 @@ def modify(self, fileobj, events, data=None): changed = False if events != key.events: - selector_events = 0 - if events & EVENT_READ: - selector_events |= self._EVENT_READ - if events & EVENT_WRITE: - selector_events |= self._EVENT_WRITE + selector_events = ((events & EVENT_READ and self._EVENT_READ) + | (events & EVENT_WRITE and self._EVENT_WRITE)) try: self._selector.modify(key.fd, selector_events) except: @@ -404,15 +398,13 @@ def select(self, timeout=None): fd_event_list = self._selector.poll(timeout) except InterruptedError: return ready - for fd, event in fd_event_list: - events = 0 - if event & ~self._EVENT_READ: - events |= EVENT_WRITE - if event & ~self._EVENT_WRITE: - events |= EVENT_READ - key = self._fd_to_key.get(fd) + fd_to_key_get = self._fd_to_key.get + for fd, event in fd_event_list: + key = fd_to_key_get(fd) if key: + events = ((event & ~self._EVENT_READ and EVENT_WRITE) + | (event & ~self._EVENT_WRITE and EVENT_READ)) ready.append((key, events & key.events)) return ready diff --git a/Lib/shelve.py b/Lib/shelve.py index e053c397345a07..50584716e9ea64 100644 --- a/Lib/shelve.py +++ b/Lib/shelve.py @@ -226,6 +226,13 @@ def __init__(self, filename, flag='c', protocol=None, writeback=False): import dbm Shelf.__init__(self, dbm.open(filename, flag), protocol, writeback) + def clear(self): + """Remove all items from the shelf.""" + # Call through to the clear method on dbm-backed shelves. + # see https://github.com/python/cpython/issues/107089 + self.cache.clear() + self.dict.clear() + def open(filename, flag='c', protocol=None, writeback=False): """Open a persistent dictionary for reading and writing. diff --git a/Lib/smtplib.py b/Lib/smtplib.py index 18c91746fd7bf2..b3cc68a789a7d8 100755 --- a/Lib/smtplib.py +++ b/Lib/smtplib.py @@ -542,7 +542,7 @@ def mail(self, sender, options=()): raise SMTPNotSupportedError( 'SMTPUTF8 not supported by server') optionlist = ' ' + ' '.join(options) - self.putcmd("mail", "FROM:%s%s" % (quoteaddr(sender), optionlist)) + self.putcmd("mail", "from:%s%s" % (quoteaddr(sender), optionlist)) return self.getreply() def rcpt(self, recip, options=()): @@ -550,7 +550,7 @@ def rcpt(self, recip, options=()): optionlist = '' if options and self.does_esmtp: optionlist = ' ' + ' '.join(options) - self.putcmd("rcpt", "TO:%s%s" % (quoteaddr(recip), optionlist)) + self.putcmd("rcpt", "to:%s%s" % (quoteaddr(recip), optionlist)) return self.getreply() def data(self, msg): diff --git a/Lib/statistics.py b/Lib/statistics.py index 6bd214bbfe2ff5..a8036e9928c464 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -137,6 +137,7 @@ from itertools import count, groupby, repeat from bisect import bisect_left, bisect_right from math import hypot, sqrt, fabs, exp, erf, tau, log, fsum, sumprod +from math import isfinite, isinf from functools import reduce from operator import itemgetter from collections import Counter, namedtuple, defaultdict @@ -1004,6 +1005,27 @@ def _mean_stdev(data): # Handle Nans and Infs gracefully return float(xbar), float(xbar) / float(ss) +def _sqrtprod(x: float, y: float) -> float: + "Return sqrt(x * y) computed with improved accuracy and without overflow/underflow." + h = sqrt(x * y) + if not isfinite(h): + if isinf(h) and not isinf(x) and not isinf(y): + # Finite inputs overflowed, so scale down, and recompute. + scale = 2.0 ** -512 # sqrt(1 / sys.float_info.max) + return _sqrtprod(scale * x, scale * y) / scale + return h + if not h: + if x and y: + # Non-zero inputs underflowed, so scale up, and recompute. + # Scale: 1 / sqrt(sys.float_info.min * sys.float_info.epsilon) + scale = 2.0 ** 537 + return _sqrtprod(scale * x, scale * y) / scale + return h + # Improve accuracy with a differential correction. + # https://www.wolframalpha.com/input/?i=Maclaurin+series+sqrt%28h**2+%2B+x%29+at+x%3D0 + d = sumprod((x, h), (y, -h)) + return h + d / (2.0 * h) + # === Statistics for relations between two inputs === @@ -1083,7 +1105,7 @@ def correlation(x, y, /, *, method='linear'): sxx = sumprod(x, x) syy = sumprod(y, y) try: - return sxy / sqrt(sxx * syy) + return sxy / _sqrtprod(sxx, syy) except ZeroDivisionError: raise StatisticsError('at least one of the inputs is constant') diff --git a/Lib/subprocess.py b/Lib/subprocess.py index fbc76b8d0f14b2..6df5dd551ea67e 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -346,7 +346,7 @@ def _args_from_interpreter_flags(): if dev_mode: args.extend(('-X', 'dev')) for opt in ('faulthandler', 'tracemalloc', 'importtime', - 'showrefcount', 'utf8'): + 'frozen_modules', 'showrefcount', 'utf8'): if opt in xoptions: value = xoptions[opt] if value is True: diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index c1f9487ae80511..f881a5d4674699 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -3149,6 +3149,44 @@ def test_rapid_restart(self): if hasattr(manager, "shutdown"): self.addCleanup(manager.shutdown) + +class FakeConnection: + def send(self, payload): + pass + + def recv(self): + return '#ERROR', pyqueue.Empty() + +class TestManagerExceptions(unittest.TestCase): + # Issue 106558: Manager exceptions avoids creating cyclic references. + def setUp(self): + self.mgr = multiprocessing.Manager() + + def tearDown(self): + self.mgr.shutdown() + self.mgr.join() + + def test_queue_get(self): + queue = self.mgr.Queue() + if gc.isenabled(): + gc.disable() + self.addCleanup(gc.enable) + try: + queue.get_nowait() + except pyqueue.Empty as e: + wr = weakref.ref(e) + self.assertEqual(wr(), None) + + def test_dispatch(self): + if gc.isenabled(): + gc.disable() + self.addCleanup(gc.enable) + try: + multiprocessing.managers.dispatch(FakeConnection(), None, None) + except pyqueue.Empty as e: + wr = weakref.ref(e) + self.assertEqual(wr(), None) + # # # diff --git a/Lib/test/audit-tests.py b/Lib/test/audit-tests.py index 0edc9d9c472766..9504829e96f00e 100644 --- a/Lib/test/audit-tests.py +++ b/Lib/test/audit-tests.py @@ -514,6 +514,17 @@ def test_not_in_gc(): assert hook not in o +def test_sys_monitoring_register_callback(): + import sys + + def hook(event, args): + if event.startswith("sys.monitoring"): + print(event, args) + + sys.addaudithook(hook) + sys.monitoring.register_callback(1, 1, None) + + if __name__ == "__main__": from test.support import suppress_msvcrt_asserts diff --git a/Lib/test/clinic.test.c b/Lib/test/clinic.test.c index da99e58c77f021..019dc10073e986 100644 --- a/Lib/test/clinic.test.c +++ b/Lib/test/clinic.test.c @@ -4,9 +4,11 @@ output preset block /*[clinic end generated code: output=da39a3ee5e6b4b0d input=3c81ac2402d06a8b]*/ /*[clinic input] +module m +class m.T "TestObj *" "TestType" class Test "TestObj *" "TestType" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=fc7e50384d12b83f]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=f761b4d55cb179cf]*/ /*[clinic input] test_object_converter @@ -4948,3 +4950,520 @@ Test_meth_coexist(TestObj *self, PyObject *Py_UNUSED(ignored)) static PyObject * Test_meth_coexist_impl(TestObj *self) /*[clinic end generated code: output=808a293d0cd27439 input=2a1d75b5e6fec6dd]*/ + + +/*[clinic input] +output push +output preset buffer +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=5bff3376ee0df0b5]*/ + +/*[clinic input] +buffer_clear + a: int +We'll call 'destination buffer clear' after this. + +Argument Clinic's buffer preset puts most generated code into the +'buffer' destination, except from 'impl_definition', which is put into +the 'block' destination, so we should expect everything but +'impl_definition' to be cleared. +[clinic start generated code]*/ + +static PyObject * +buffer_clear_impl(PyObject *module, int a) +/*[clinic end generated code: output=f14bba74677e1846 input=a4c308a6fdab043c]*/ + +/*[clinic input] +destination buffer clear +output pop +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=f20d06adb8252084]*/ + + +/*[clinic input] +output push +destination test1 new buffer +output everything suppress +output docstring_definition test1 +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=5a77c454970992fc]*/ + +/*[clinic input] +new_dest + a: int +Only this docstring should be outputted to test1. +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=da5af421ed8996ed]*/ + +/*[clinic input] +dump test1 +output pop +[clinic start generated code]*/ + +PyDoc_STRVAR(new_dest__doc__, +"new_dest($module, /, a)\n" +"--\n" +"\n" +"Only this docstring should be outputted to test1."); +/*[clinic end generated code: output=9cac703f51d90e84 input=090db8df4945576d]*/ + + +/*[clinic input] +mangled_c_keyword_identifier + i as int: int +The 'int' param should be mangled as 'int_value' +[clinic start generated code]*/ + +PyDoc_STRVAR(mangled_c_keyword_identifier__doc__, +"mangled_c_keyword_identifier($module, /, i)\n" +"--\n" +"\n" +"The \'int\' param should be mangled as \'int_value\'"); + +#define MANGLED_C_KEYWORD_IDENTIFIER_METHODDEF \ + {"mangled_c_keyword_identifier", _PyCFunction_CAST(mangled_c_keyword_identifier), METH_FASTCALL|METH_KEYWORDS, mangled_c_keyword_identifier__doc__}, + +static PyObject * +mangled_c_keyword_identifier_impl(PyObject *module, int int_value); + +static PyObject * +mangled_c_keyword_identifier(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(i), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"i", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "mangled_c_keyword_identifier", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + int int_value; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + int_value = _PyLong_AsInt(args[0]); + if (int_value == -1 && PyErr_Occurred()) { + goto exit; + } + return_value = mangled_c_keyword_identifier_impl(module, int_value); + +exit: + return return_value; +} + +static PyObject * +mangled_c_keyword_identifier_impl(PyObject *module, int int_value) +/*[clinic end generated code: output=c049d7d79be26cda input=060876448ab567a2]*/ + + +/*[clinic input] +bool_return -> bool +[clinic start generated code]*/ + +PyDoc_STRVAR(bool_return__doc__, +"bool_return($module, /)\n" +"--\n" +"\n"); + +#define BOOL_RETURN_METHODDEF \ + {"bool_return", (PyCFunction)bool_return, METH_NOARGS, bool_return__doc__}, + +static int +bool_return_impl(PyObject *module); + +static PyObject * +bool_return(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + PyObject *return_value = NULL; + int _return_value; + + _return_value = bool_return_impl(module); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyBool_FromLong((long)_return_value); + +exit: + return return_value; +} + +static int +bool_return_impl(PyObject *module) +/*[clinic end generated code: output=3a65f07830e48e98 input=93ba95d39ee98f39]*/ + + +/*[clinic input] +double_return -> double +[clinic start generated code]*/ + +PyDoc_STRVAR(double_return__doc__, +"double_return($module, /)\n" +"--\n" +"\n"); + +#define DOUBLE_RETURN_METHODDEF \ + {"double_return", (PyCFunction)double_return, METH_NOARGS, double_return__doc__}, + +static double +double_return_impl(PyObject *module); + +static PyObject * +double_return(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + PyObject *return_value = NULL; + double _return_value; + + _return_value = double_return_impl(module); + if ((_return_value == -1.0) && PyErr_Occurred()) { + goto exit; + } + return_value = PyFloat_FromDouble(_return_value); + +exit: + return return_value; +} + +static double +double_return_impl(PyObject *module) +/*[clinic end generated code: output=076dc72595d3f66d input=da11b6255e4cbfd7]*/ + + +/*[clinic input] +Test.__init__ + a: object + [ + b: object + ] + / +Should generate two PyArg_ParseTuple calls. +[clinic start generated code]*/ + +PyDoc_STRVAR(Test___init____doc__, +"Test(a, [b])\n" +"Should generate two PyArg_ParseTuple calls."); + +static int +Test___init___impl(TestObj *self, PyObject *a, int group_right_1, + PyObject *b); + +static int +Test___init__(PyObject *self, PyObject *args, PyObject *kwargs) +{ + int return_value = -1; + PyTypeObject *base_tp = TestType; + PyObject *a; + int group_right_1 = 0; + PyObject *b = NULL; + + if ((Py_IS_TYPE(self, base_tp) || + Py_TYPE(self)->tp_new == base_tp->tp_new) && + !_PyArg_NoKeywords("Test", kwargs)) { + goto exit; + } + switch (PyTuple_GET_SIZE(args)) { + case 1: + if (!PyArg_ParseTuple(args, "O:__init__", &a)) { + goto exit; + } + break; + case 2: + if (!PyArg_ParseTuple(args, "OO:__init__", &a, &b)) { + goto exit; + } + group_right_1 = 1; + break; + default: + PyErr_SetString(PyExc_TypeError, "Test.__init__ requires 1 to 2 arguments"); + goto exit; + } + return_value = Test___init___impl((TestObj *)self, a, group_right_1, b); + +exit: + return return_value; +} + +static int +Test___init___impl(TestObj *self, PyObject *a, int group_right_1, + PyObject *b) +/*[clinic end generated code: output=2bbb8ea60e8f57a6 input=10f5d0f1e8e466ef]*/ + + +/*[clinic input] +Test._pyarg_parsestackandkeywords + cls: defining_class + key: str(accept={str, robuffer}, zeroes=True) + / +Check that _PyArg_ParseStackAndKeywords() is generated. +[clinic start generated code]*/ + +PyDoc_STRVAR(Test__pyarg_parsestackandkeywords__doc__, +"_pyarg_parsestackandkeywords($self, key, /)\n" +"--\n" +"\n" +"Check that _PyArg_ParseStackAndKeywords() is generated."); + +#define TEST__PYARG_PARSESTACKANDKEYWORDS_METHODDEF \ + {"_pyarg_parsestackandkeywords", _PyCFunction_CAST(Test__pyarg_parsestackandkeywords), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, Test__pyarg_parsestackandkeywords__doc__}, + +static PyObject * +Test__pyarg_parsestackandkeywords_impl(TestObj *self, PyTypeObject *cls, + const char *key, + Py_ssize_t key_length); + +static PyObject * +Test__pyarg_parsestackandkeywords(TestObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + # define KWTUPLE (PyObject *)&_Py_SINGLETON(tuple_empty) + #else + # define KWTUPLE NULL + #endif + + static const char * const _keywords[] = {"", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .format = "s#:_pyarg_parsestackandkeywords", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + const char *key; + Py_ssize_t key_length; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &key, &key_length)) { + goto exit; + } + return_value = Test__pyarg_parsestackandkeywords_impl(self, cls, key, key_length); + +exit: + return return_value; +} + +static PyObject * +Test__pyarg_parsestackandkeywords_impl(TestObj *self, PyTypeObject *cls, + const char *key, + Py_ssize_t key_length) +/*[clinic end generated code: output=4fda8a7f2547137c input=fc72ef4b4cfafabc]*/ + + +/*[clinic input] +Test.__init__ -> long +Test overriding the __init__ return converter +[clinic start generated code]*/ + +PyDoc_STRVAR(Test___init____doc__, +"Test()\n" +"--\n" +"\n" +"Test overriding the __init__ return converter"); + +static long +Test___init___impl(TestObj *self); + +static int +Test___init__(PyObject *self, PyObject *args, PyObject *kwargs) +{ + int return_value = -1; + PyTypeObject *base_tp = TestType; + long _return_value; + + if ((Py_IS_TYPE(self, base_tp) || + Py_TYPE(self)->tp_new == base_tp->tp_new) && + !_PyArg_NoPositional("Test", args)) { + goto exit; + } + if ((Py_IS_TYPE(self, base_tp) || + Py_TYPE(self)->tp_new == base_tp->tp_new) && + !_PyArg_NoKeywords("Test", kwargs)) { + goto exit; + } + _return_value = Test___init___impl((TestObj *)self); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromLong(_return_value); + +exit: + return return_value; +} + +static long +Test___init___impl(TestObj *self) +/*[clinic end generated code: output=daf6ee12c4e443fb input=311af0dc7f17e8e9]*/ + + +/*[clinic input] +fn_with_default_binop_expr + arg: object(c_default='CONST_A + CONST_B') = a+b +[clinic start generated code]*/ + +PyDoc_STRVAR(fn_with_default_binop_expr__doc__, +"fn_with_default_binop_expr($module, /, arg=a+b)\n" +"--\n" +"\n"); + +#define FN_WITH_DEFAULT_BINOP_EXPR_METHODDEF \ + {"fn_with_default_binop_expr", _PyCFunction_CAST(fn_with_default_binop_expr), METH_FASTCALL|METH_KEYWORDS, fn_with_default_binop_expr__doc__}, + +static PyObject * +fn_with_default_binop_expr_impl(PyObject *module, PyObject *arg); + +static PyObject * +fn_with_default_binop_expr(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(arg), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"arg", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "fn_with_default_binop_expr", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + PyObject *arg = CONST_A + CONST_B; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + arg = args[0]; +skip_optional_pos: + return_value = fn_with_default_binop_expr_impl(module, arg); + +exit: + return return_value; +} + +static PyObject * +fn_with_default_binop_expr_impl(PyObject *module, PyObject *arg) +/*[clinic end generated code: output=018672772e4092ff input=1b55c8ae68d89453]*/ + + +/*[python input] +class Custom_converter(CConverter): + type = "str" + default = "Hello!" + converter = "c_converter_func" +[python start generated code]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=d612708f0efb8e3c]*/ + +/*[clinic input] +docstr_fallback_to_converter_default + a: Custom +Check docstring default value fallback. + +Verify that the docstring formatter fetches the default +value from the converter if no 'py_default' is found. +The signature should have the default a='Hello!', +as given by the Custom converter. +[clinic start generated code]*/ + +PyDoc_STRVAR(docstr_fallback_to_converter_default__doc__, +"docstr_fallback_to_converter_default($module, /, a=\'Hello!\')\n" +"--\n" +"\n" +"Check docstring default value fallback.\n" +"\n" +"Verify that the docstring formatter fetches the default\n" +"value from the converter if no \'py_default\' is found.\n" +"The signature should have the default a=\'Hello!\',\n" +"as given by the Custom converter."); + +#define DOCSTR_FALLBACK_TO_CONVERTER_DEFAULT_METHODDEF \ + {"docstr_fallback_to_converter_default", _PyCFunction_CAST(docstr_fallback_to_converter_default), METH_FASTCALL|METH_KEYWORDS, docstr_fallback_to_converter_default__doc__}, + +static PyObject * +docstr_fallback_to_converter_default_impl(PyObject *module, str a); + +static PyObject * +docstr_fallback_to_converter_default(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "docstr_fallback_to_converter_default", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + str a; + + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!c_converter_func(args[0], &a)) { + goto exit; + } + return_value = docstr_fallback_to_converter_default_impl(module, a); + +exit: + return return_value; +} + +static PyObject * +docstr_fallback_to_converter_default_impl(PyObject *module, str a) +/*[clinic end generated code: output=ae24a9c6f60ee8a6 input=0cbe6a4d24bc2274]*/ diff --git a/Lib/test/list_tests.py b/Lib/test/list_tests.py index fe3ee80b8d461f..b1ef332522d2ce 100644 --- a/Lib/test/list_tests.py +++ b/Lib/test/list_tests.py @@ -6,7 +6,7 @@ from functools import cmp_to_key from test import seq_tests -from test.support import ALWAYS_EQ, NEVER_EQ +from test.support import ALWAYS_EQ, NEVER_EQ, C_RECURSION_LIMIT class CommonTest(seq_tests.CommonTest): @@ -61,7 +61,7 @@ def test_repr(self): def test_repr_deep(self): a = self.type2test([]) - for i in range(sys.getrecursionlimit() + 100): + for i in range(C_RECURSION_LIMIT + 1): a = self.type2test([a]) self.assertRaises(RecursionError, repr, a) diff --git a/Lib/test/mapping_tests.py b/Lib/test/mapping_tests.py index 613206a0855aea..5492bbf86d1f87 100644 --- a/Lib/test/mapping_tests.py +++ b/Lib/test/mapping_tests.py @@ -2,6 +2,7 @@ import unittest import collections import sys +from test.support import C_RECURSION_LIMIT class BasicTestMappingProtocol(unittest.TestCase): @@ -624,7 +625,7 @@ def __repr__(self): def test_repr_deep(self): d = self._empty_mapping() - for i in range(sys.getrecursionlimit() + 100): + for i in range(C_RECURSION_LIMIT + 1): d0 = d d = self._empty_mapping() d[1] = d0 diff --git a/Lib/test/pythoninfo.py b/Lib/test/pythoninfo.py index b84c14400d42f0..e4e098dd84cfb9 100644 --- a/Lib/test/pythoninfo.py +++ b/Lib/test/pythoninfo.py @@ -637,11 +637,11 @@ def collect_decimal(info_add): def collect_testcapi(info_add): try: - import _testcapi + import _testinternalcapi except ImportError: return - call_func(info_add, 'pymem.allocator', _testcapi, 'pymem_getallocatorsname') + call_func(info_add, 'pymem.allocator', _testinternalcapi, 'pymem_getallocatorsname') def collect_resource(info_add): diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 3b332f49951f0c..64c66d8e25d9cd 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -6,7 +6,7 @@ import contextlib import functools import getpass -import opcode +import _opcode import os import re import stat @@ -64,7 +64,8 @@ "run_with_tz", "PGO", "missing_compiler_executable", "ALWAYS_EQ", "NEVER_EQ", "LARGEST", "SMALLEST", "LOOPBACK_TIMEOUT", "INTERNET_TIMEOUT", "SHORT_TIMEOUT", "LONG_TIMEOUT", - "Py_DEBUG", "EXCEEDS_RECURSION_LIMIT", + "Py_DEBUG", "EXCEEDS_RECURSION_LIMIT", "C_RECURSION_LIMIT", + "skip_on_s390x", ] @@ -1092,7 +1093,7 @@ def requires_limited_api(test): def requires_specialization(test): return unittest.skipUnless( - opcode.ENABLE_SPECIALIZATION, "requires specialization")(test) + _opcode.ENABLE_SPECIALIZATION, "requires specialization")(test) def _filter_suite(suite, pred): """Recursively filter test cases in a suite based on a predicate.""" @@ -2460,3 +2461,10 @@ def adjust_int_max_str_digits(max_digits): #For recursion tests, easily exceeds default recursion limit EXCEEDS_RECURSION_LIMIT = 5000 + +# The default C recursion limit (from Include/cpython/pystate.h). +C_RECURSION_LIMIT = 1500 + +#Windows doesn't have os.uname() but it doesn't support s390x. +skip_on_s390x = unittest.skipIf(hasattr(os, 'uname') and os.uname().machine == 's390x', + 'skipped on s390x') diff --git a/Lib/test/test__opcode.py b/Lib/test/test__opcode.py index b3a9bcbe160453..c1f612dc4a63cb 100644 --- a/Lib/test/test__opcode.py +++ b/Lib/test/test__opcode.py @@ -106,7 +106,7 @@ def test_specialization_stats(self): specialized_opcodes = [ op.lower() for op in opcode._specializations - if opcode._inline_cache_entries[opcode.opmap[op]] + if opcode._inline_cache_entries.get(op, 0) ] self.assertIn('load_attr', specialized_opcodes) self.assertIn('binary_subscr', specialized_opcodes) diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index a03fa4c7187b05..5346b39043f0f5 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -1084,6 +1084,7 @@ def next(self): return self enum._test_simple_enum(_Precedence, ast._Precedence) + @unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI") @support.cpython_only def test_ast_recursion_limit(self): fail_depth = support.EXCEEDS_RECURSION_LIMIT diff --git a/Lib/test/test_asyncio/test_selector_events.py b/Lib/test/test_asyncio/test_selector_events.py index 47693ea4d3ce2e..c22b780b5edcb8 100644 --- a/Lib/test/test_asyncio/test_selector_events.py +++ b/Lib/test/test_asyncio/test_selector_events.py @@ -178,7 +178,7 @@ def test_sock_connect_resolve_using_socket_params(self, m_gai): sock.connect.assert_called_with(('127.0.0.1', 0)) def test_add_reader(self): - self.loop._selector.get_key.side_effect = KeyError + self.loop._selector.get_map.return_value = {} cb = lambda: True self.loop.add_reader(1, cb) @@ -192,8 +192,8 @@ def test_add_reader(self): def test_add_reader_existing(self): reader = mock.Mock() writer = mock.Mock() - self.loop._selector.get_key.return_value = selectors.SelectorKey( - 1, 1, selectors.EVENT_WRITE, (reader, writer)) + self.loop._selector.get_map.return_value = {1: selectors.SelectorKey( + 1, 1, selectors.EVENT_WRITE, (reader, writer))} cb = lambda: True self.loop.add_reader(1, cb) @@ -208,8 +208,8 @@ def test_add_reader_existing(self): def test_add_reader_existing_writer(self): writer = mock.Mock() - self.loop._selector.get_key.return_value = selectors.SelectorKey( - 1, 1, selectors.EVENT_WRITE, (None, writer)) + self.loop._selector.get_map.return_value = {1: selectors.SelectorKey( + 1, 1, selectors.EVENT_WRITE, (None, writer))} cb = lambda: True self.loop.add_reader(1, cb) @@ -222,8 +222,8 @@ def test_add_reader_existing_writer(self): self.assertEqual(writer, w) def test_remove_reader(self): - self.loop._selector.get_key.return_value = selectors.SelectorKey( - 1, 1, selectors.EVENT_READ, (None, None)) + self.loop._selector.get_map.return_value = {1: selectors.SelectorKey( + 1, 1, selectors.EVENT_READ, (None, None))} self.assertFalse(self.loop.remove_reader(1)) self.assertTrue(self.loop._selector.unregister.called) @@ -231,9 +231,9 @@ def test_remove_reader(self): def test_remove_reader_read_write(self): reader = mock.Mock() writer = mock.Mock() - self.loop._selector.get_key.return_value = selectors.SelectorKey( + self.loop._selector.get_map.return_value = {1: selectors.SelectorKey( 1, 1, selectors.EVENT_READ | selectors.EVENT_WRITE, - (reader, writer)) + (reader, writer))} self.assertTrue( self.loop.remove_reader(1)) @@ -243,12 +243,12 @@ def test_remove_reader_read_write(self): self.loop._selector.modify.call_args[0]) def test_remove_reader_unknown(self): - self.loop._selector.get_key.side_effect = KeyError + self.loop._selector.get_map.return_value = {} self.assertFalse( self.loop.remove_reader(1)) def test_add_writer(self): - self.loop._selector.get_key.side_effect = KeyError + self.loop._selector.get_map.return_value = {} cb = lambda: True self.loop.add_writer(1, cb) @@ -262,8 +262,8 @@ def test_add_writer(self): def test_add_writer_existing(self): reader = mock.Mock() writer = mock.Mock() - self.loop._selector.get_key.return_value = selectors.SelectorKey( - 1, 1, selectors.EVENT_READ, (reader, writer)) + self.loop._selector.get_map.return_value = {1: selectors.SelectorKey( + 1, 1, selectors.EVENT_READ, (reader, writer))} cb = lambda: True self.loop.add_writer(1, cb) @@ -277,8 +277,8 @@ def test_add_writer_existing(self): self.assertEqual(cb, w._callback) def test_remove_writer(self): - self.loop._selector.get_key.return_value = selectors.SelectorKey( - 1, 1, selectors.EVENT_WRITE, (None, None)) + self.loop._selector.get_map.return_value = {1: selectors.SelectorKey( + 1, 1, selectors.EVENT_WRITE, (None, None))} self.assertFalse(self.loop.remove_writer(1)) self.assertTrue(self.loop._selector.unregister.called) @@ -286,9 +286,9 @@ def test_remove_writer(self): def test_remove_writer_read_write(self): reader = mock.Mock() writer = mock.Mock() - self.loop._selector.get_key.return_value = selectors.SelectorKey( + self.loop._selector.get_map.return_value = {1: selectors.SelectorKey( 1, 1, selectors.EVENT_READ | selectors.EVENT_WRITE, - (reader, writer)) + (reader, writer))} self.assertTrue( self.loop.remove_writer(1)) @@ -298,7 +298,7 @@ def test_remove_writer_read_write(self): self.loop._selector.modify.call_args[0]) def test_remove_writer_unknown(self): - self.loop._selector.get_key.side_effect = KeyError + self.loop._selector.get_map.return_value = {} self.assertFalse( self.loop.remove_writer(1)) diff --git a/Lib/test/test_asyncio/test_streams.py b/Lib/test/test_asyncio/test_streams.py index 7f9dc621808358..9c92e75886c593 100644 --- a/Lib/test/test_asyncio/test_streams.py +++ b/Lib/test/test_asyncio/test_streams.py @@ -1074,6 +1074,29 @@ def test_eof_feed_when_closing_writer(self): self.assertEqual(messages, []) + def test_unclosed_resource_warnings(self): + async def inner(httpd): + rd, wr = await asyncio.open_connection(*httpd.address) + + wr.write(b'GET / HTTP/1.0\r\n\r\n') + data = await rd.readline() + self.assertEqual(data, b'HTTP/1.0 200 OK\r\n') + data = await rd.read() + self.assertTrue(data.endswith(b'\r\n\r\nTest message')) + with self.assertWarns(ResourceWarning): + del wr + gc.collect() + + + messages = [] + self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx)) + + with test_utils.run_test_server() as httpd: + self.loop.run_until_complete(inner(httpd)) + + self.assertEqual(messages, []) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_audit.py b/Lib/test/test_audit.py index 0b69864751d83d..b12ffa5d872e83 100644 --- a/Lib/test/test_audit.py +++ b/Lib/test/test_audit.py @@ -257,5 +257,18 @@ def test_not_in_gc(self): self.fail(stderr) + def test_sys_monitoring_register_callback(self): + returncode, events, stderr = self.run_python("test_sys_monitoring_register_callback") + if returncode: + self.fail(stderr) + + if support.verbose: + print(*events, sep='\n') + actual = [(ev[0], ev[2]) for ev in events] + expected = [("sys.monitoring.register_callback", "(None,)")] + + self.assertEqual(actual, expected) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_bytes.py b/Lib/test/test_bytes.py index 7c62b722059d12..afd506f07520d8 100644 --- a/Lib/test/test_bytes.py +++ b/Lib/test/test_bytes.py @@ -1354,7 +1354,7 @@ def do_tests(setitem): except ValueError: pass try: - setitem(b, 0, None) + setitem(b, 0, object()) self.fail("Didn't raise TypeError") except TypeError: pass diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py index 0df084e17a3c8e..1f9ffc5e9a5c33 100644 --- a/Lib/test/test_calendar.py +++ b/Lib/test/test_calendar.py @@ -3,11 +3,13 @@ from test import support from test.support.script_helper import assert_python_ok, assert_python_failure -import time -import locale -import sys +import contextlib import datetime +import io +import locale import os +import sys +import time # From https://en.wikipedia.org/wiki/Leap_year_starting_on_Saturday result_0_02_text = """\ @@ -549,26 +551,92 @@ def test_months(self): # verify it "acts like a sequence" in two forms of iteration self.assertEqual(value[::-1], list(reversed(value))) - def test_locale_calendars(self): + def test_locale_text_calendar(self): + try: + cal = calendar.LocaleTextCalendar(locale='') + local_weekday = cal.formatweekday(1, 10) + local_weekday_abbr = cal.formatweekday(1, 3) + local_month = cal.formatmonthname(2010, 10, 10) + except locale.Error: + # cannot set the system default locale -- skip rest of test + raise unittest.SkipTest('cannot set the system default locale') + self.assertIsInstance(local_weekday, str) + self.assertIsInstance(local_weekday_abbr, str) + self.assertIsInstance(local_month, str) + self.assertEqual(len(local_weekday), 10) + self.assertEqual(len(local_weekday_abbr), 3) + self.assertGreaterEqual(len(local_month), 10) + + cal = calendar.LocaleTextCalendar(locale=None) + local_weekday = cal.formatweekday(1, 10) + local_weekday_abbr = cal.formatweekday(1, 3) + local_month = cal.formatmonthname(2010, 10, 10) + self.assertIsInstance(local_weekday, str) + self.assertIsInstance(local_weekday_abbr, str) + self.assertIsInstance(local_month, str) + self.assertEqual(len(local_weekday), 10) + self.assertEqual(len(local_weekday_abbr), 3) + self.assertGreaterEqual(len(local_month), 10) + + cal = calendar.LocaleTextCalendar(locale='C') + local_weekday = cal.formatweekday(1, 10) + local_weekday_abbr = cal.formatweekday(1, 3) + local_month = cal.formatmonthname(2010, 10, 10) + self.assertIsInstance(local_weekday, str) + self.assertIsInstance(local_weekday_abbr, str) + self.assertIsInstance(local_month, str) + self.assertEqual(len(local_weekday), 10) + self.assertEqual(len(local_weekday_abbr), 3) + self.assertGreaterEqual(len(local_month), 10) + + def test_locale_html_calendar(self): + try: + cal = calendar.LocaleHTMLCalendar(locale='') + local_weekday = cal.formatweekday(1) + local_month = cal.formatmonthname(2010, 10) + except locale.Error: + # cannot set the system default locale -- skip rest of test + raise unittest.SkipTest('cannot set the system default locale') + self.assertIsInstance(local_weekday, str) + self.assertIsInstance(local_month, str) + + cal = calendar.LocaleHTMLCalendar(locale=None) + local_weekday = cal.formatweekday(1) + local_month = cal.formatmonthname(2010, 10) + self.assertIsInstance(local_weekday, str) + self.assertIsInstance(local_month, str) + + cal = calendar.LocaleHTMLCalendar(locale='C') + local_weekday = cal.formatweekday(1) + local_month = cal.formatmonthname(2010, 10) + self.assertIsInstance(local_weekday, str) + self.assertIsInstance(local_month, str) + + def test_locale_calendars_reset_locale_properly(self): # ensure that Locale{Text,HTML}Calendar resets the locale properly # (it is still not thread-safe though) old_october = calendar.TextCalendar().formatmonthname(2010, 10, 10) try: cal = calendar.LocaleTextCalendar(locale='') local_weekday = cal.formatweekday(1, 10) + local_weekday_abbr = cal.formatweekday(1, 3) local_month = cal.formatmonthname(2010, 10, 10) except locale.Error: # cannot set the system default locale -- skip rest of test raise unittest.SkipTest('cannot set the system default locale') self.assertIsInstance(local_weekday, str) + self.assertIsInstance(local_weekday_abbr, str) self.assertIsInstance(local_month, str) self.assertEqual(len(local_weekday), 10) + self.assertEqual(len(local_weekday_abbr), 3) self.assertGreaterEqual(len(local_month), 10) + cal = calendar.LocaleHTMLCalendar(locale='') local_weekday = cal.formatweekday(1) local_month = cal.formatmonthname(2010, 10) self.assertIsInstance(local_weekday, str) self.assertIsInstance(local_month, str) + new_october = calendar.TextCalendar().formatmonthname(2010, 10, 10) self.assertEqual(old_october, new_october) @@ -589,6 +657,21 @@ def test_locale_calendar_formatweekday(self): except locale.Error: raise unittest.SkipTest('cannot set the en_US locale') + def test_locale_calendar_formatmonthname(self): + try: + # formatmonthname uses the same month names regardless of the width argument. + cal = calendar.LocaleTextCalendar(locale='en_US') + # For too short widths, a full name (with year) is used. + self.assertEqual(cal.formatmonthname(2022, 6, 2, withyear=False), "June") + self.assertEqual(cal.formatmonthname(2022, 6, 2, withyear=True), "June 2022") + self.assertEqual(cal.formatmonthname(2022, 6, 3, withyear=False), "June") + self.assertEqual(cal.formatmonthname(2022, 6, 3, withyear=True), "June 2022") + # For long widths, a centered name is used. + self.assertEqual(cal.formatmonthname(2022, 6, 10, withyear=False), " June ") + self.assertEqual(cal.formatmonthname(2022, 6, 15, withyear=True), " June 2022 ") + except locale.Error: + raise unittest.SkipTest('cannot set the en_US locale') + def test_locale_html_calendar_custom_css_class_month_name(self): try: cal = calendar.LocaleHTMLCalendar(locale='') @@ -847,46 +930,104 @@ def conv(s): return s.replace('\n', os.linesep).encode() class CommandLineTestCase(unittest.TestCase): - def run_ok(self, *args): + def setUp(self): + self.runners = [self.run_cli_ok, self.run_cmd_ok] + + @contextlib.contextmanager + def captured_stdout_with_buffer(self): + orig_stdout = sys.stdout + buffer = io.BytesIO() + sys.stdout = io.TextIOWrapper(buffer) + try: + yield sys.stdout + finally: + sys.stdout.flush() + sys.stdout.buffer.seek(0) + sys.stdout = orig_stdout + + @contextlib.contextmanager + def captured_stderr_with_buffer(self): + orig_stderr = sys.stderr + buffer = io.BytesIO() + sys.stderr = io.TextIOWrapper(buffer) + try: + yield sys.stderr + finally: + sys.stderr.flush() + sys.stderr.buffer.seek(0) + sys.stderr = orig_stderr + + def run_cli_ok(self, *args): + with self.captured_stdout_with_buffer() as stdout: + calendar.main(args) + return stdout.buffer.read() + + def run_cmd_ok(self, *args): return assert_python_ok('-m', 'calendar', *args)[1] - def assertFailure(self, *args): + def assertCLIFails(self, *args): + with self.captured_stderr_with_buffer() as stderr: + self.assertRaises(SystemExit, calendar.main, args) + stderr = stderr.buffer.read() + self.assertIn(b'usage:', stderr) + return stderr + + def assertCmdFails(self, *args): rc, stdout, stderr = assert_python_failure('-m', 'calendar', *args) self.assertIn(b'usage:', stderr) self.assertEqual(rc, 2) + return rc, stdout, stderr + + def assertFailure(self, *args): + self.assertCLIFails(*args) + self.assertCmdFails(*args) def test_help(self): - stdout = self.run_ok('-h') + stdout = self.run_cmd_ok('-h') self.assertIn(b'usage:', stdout) self.assertIn(b'calendar.py', stdout) self.assertIn(b'--help', stdout) + # special case: stdout but sys.exit() + with self.captured_stdout_with_buffer() as output: + self.assertRaises(SystemExit, calendar.main, ['-h']) + output = output.buffer.read() + self.assertIn(b'usage:', output) + self.assertIn(b'--help', output) + def test_illegal_arguments(self): self.assertFailure('-z') self.assertFailure('spam') self.assertFailure('2004', 'spam') + self.assertFailure('2004', '1', 'spam') + self.assertFailure('2004', '1', '1') + self.assertFailure('2004', '1', '1', 'spam') self.assertFailure('-t', 'html', '2004', '1') def test_output_current_year(self): - stdout = self.run_ok() - year = datetime.datetime.now().year - self.assertIn((' %s' % year).encode(), stdout) - self.assertIn(b'January', stdout) - self.assertIn(b'Mo Tu We Th Fr Sa Su', stdout) + for run in self.runners: + output = run() + year = datetime.datetime.now().year + self.assertIn(conv(' %s' % year), output) + self.assertIn(b'January', output) + self.assertIn(b'Mo Tu We Th Fr Sa Su', output) def test_output_year(self): - stdout = self.run_ok('2004') - self.assertEqual(stdout, conv(result_2004_text)) + for run in self.runners: + output = run('2004') + self.assertEqual(output, conv(result_2004_text)) def test_output_month(self): - stdout = self.run_ok('2004', '1') - self.assertEqual(stdout, conv(result_2004_01_text)) + for run in self.runners: + output = run('2004', '1') + self.assertEqual(output, conv(result_2004_01_text)) def test_option_encoding(self): self.assertFailure('-e') self.assertFailure('--encoding') - stdout = self.run_ok('--encoding', 'utf-16-le', '2004') - self.assertEqual(stdout, result_2004_text.encode('utf-16-le')) + for run in self.runners: + output = run('--encoding', 'utf-16-le', '2004') + self.assertEqual(output, result_2004_text.encode('utf-16-le')) def test_option_locale(self): self.assertFailure('-L') @@ -904,66 +1045,75 @@ def test_option_locale(self): locale.setlocale(locale.LC_TIME, oldlocale) except (locale.Error, ValueError): self.skipTest('cannot set the system default locale') - stdout = self.run_ok('--locale', lang, '--encoding', enc, '2004') - self.assertIn('2004'.encode(enc), stdout) + for run in self.runners: + for type in ('text', 'html'): + output = run( + '--type', type, '--locale', lang, '--encoding', enc, '2004' + ) + self.assertIn('2004'.encode(enc), output) def test_option_width(self): self.assertFailure('-w') self.assertFailure('--width') self.assertFailure('-w', 'spam') - stdout = self.run_ok('--width', '3', '2004') - self.assertIn(b'Mon Tue Wed Thu Fri Sat Sun', stdout) + for run in self.runners: + output = run('--width', '3', '2004') + self.assertIn(b'Mon Tue Wed Thu Fri Sat Sun', output) def test_option_lines(self): self.assertFailure('-l') self.assertFailure('--lines') self.assertFailure('-l', 'spam') - stdout = self.run_ok('--lines', '2', '2004') - self.assertIn(conv('December\n\nMo Tu We'), stdout) + for run in self.runners: + output = run('--lines', '2', '2004') + self.assertIn(conv('December\n\nMo Tu We'), output) def test_option_spacing(self): self.assertFailure('-s') self.assertFailure('--spacing') self.assertFailure('-s', 'spam') - stdout = self.run_ok('--spacing', '8', '2004') - self.assertIn(b'Su Mo', stdout) + for run in self.runners: + output = run('--spacing', '8', '2004') + self.assertIn(b'Su Mo', output) def test_option_months(self): self.assertFailure('-m') self.assertFailure('--month') self.assertFailure('-m', 'spam') - stdout = self.run_ok('--months', '1', '2004') - self.assertIn(conv('\nMo Tu We Th Fr Sa Su\n'), stdout) + for run in self.runners: + output = run('--months', '1', '2004') + self.assertIn(conv('\nMo Tu We Th Fr Sa Su\n'), output) def test_option_type(self): self.assertFailure('-t') self.assertFailure('--type') self.assertFailure('-t', 'spam') - stdout = self.run_ok('--type', 'text', '2004') - self.assertEqual(stdout, conv(result_2004_text)) - stdout = self.run_ok('--type', 'html', '2004') - self.assertEqual(stdout[:6], b'Calendar for 2004', stdout) + for run in self.runners: + output = run('--type', 'text', '2004') + self.assertEqual(output, conv(result_2004_text)) + output = run('--type', 'html', '2004') + self.assertEqual(output[:6], b'Calendar for 2004', output) def test_html_output_current_year(self): - stdout = self.run_ok('--type', 'html') - year = datetime.datetime.now().year - self.assertIn(('Calendar for %s' % year).encode(), - stdout) - self.assertIn(b'January', - stdout) + for run in self.runners: + output = run('--type', 'html') + year = datetime.datetime.now().year + self.assertIn(('Calendar for %s' % year).encode(), output) + self.assertIn(b'January', output) def test_html_output_year_encoding(self): - stdout = self.run_ok('-t', 'html', '--encoding', 'ascii', '2004') - self.assertEqual(stdout, - result_2004_html.format(**default_format).encode('ascii')) + for run in self.runners: + output = run('-t', 'html', '--encoding', 'ascii', '2004') + self.assertEqual(output, result_2004_html.format(**default_format).encode('ascii')) def test_html_output_year_css(self): self.assertFailure('-t', 'html', '-c') self.assertFailure('-t', 'html', '--css') - stdout = self.run_ok('-t', 'html', '--css', 'custom.css', '2004') - self.assertIn(b'', stdout) + for run in self.runners: + output = run('-t', 'html', '--css', 'custom.css', '2004') + self.assertIn(b'', output) class MiscTestCase(unittest.TestCase): diff --git a/Lib/test/test_call.py b/Lib/test/test_call.py index 09a531f8cc627b..c3c3b1853b5736 100644 --- a/Lib/test/test_call.py +++ b/Lib/test/test_call.py @@ -1,5 +1,5 @@ import unittest -from test.support import cpython_only, requires_limited_api +from test.support import cpython_only, requires_limited_api, skip_on_s390x try: import _testcapi except ImportError: @@ -918,6 +918,7 @@ def test_multiple_values(self): @cpython_only class TestRecursion(unittest.TestCase): + @skip_on_s390x def test_super_deep(self): def recurse(n): diff --git a/Lib/test/test_capi/check_config.py b/Lib/test/test_capi/check_config.py index aaedd82f39af50..eb99ae16f2b69e 100644 --- a/Lib/test/test_capi/check_config.py +++ b/Lib/test/test_capi/check_config.py @@ -12,7 +12,7 @@ def import_singlephase(): try: import _testsinglephase except ImportError: - sys.modules.pop('_testsinglephase') + sys.modules.pop('_testsinglephase', None) return False else: del sys.modules['_testsinglephase'] diff --git a/Lib/test/test_capi/test_abstract.py b/Lib/test/test_capi/test_abstract.py new file mode 100644 index 00000000000000..3f51e5b28104d9 --- /dev/null +++ b/Lib/test/test_capi/test_abstract.py @@ -0,0 +1,722 @@ +import unittest +import sys +from collections import OrderedDict +from test import support +from test.support import import_helper +import _testcapi + + +NULL = None + +class TestObject: + @property + def evil(self): + raise RuntimeError('do not get evil') + @evil.setter + def evil(self, value): + raise RuntimeError('do not set evil') + @evil.deleter + def evil(self): + raise RuntimeError('do not del evil') + +class ProxyGetItem: + def __init__(self, obj): + self.obj = obj + def __getitem__(self, key): + return self.obj[key] + +class ProxySetItem: + def __init__(self, obj): + self.obj = obj + def __setitem__(self, key, value): + self.obj[key] = value + +class ProxyDelItem: + def __init__(self, obj): + self.obj = obj + def __delitem__(self, key): + del self.obj[key] + +def gen(): + yield 'a' + yield 'b' + yield 'c' + + +class CAPITest(unittest.TestCase): + + def test_object_getattr(self): + xgetattr = _testcapi.object_getattr + obj = TestObject() + obj.a = 11 + setattr(obj, '\U0001f40d', 22) + self.assertEqual(xgetattr(obj, 'a'), 11) + self.assertRaises(AttributeError, xgetattr, obj, 'b') + self.assertEqual(xgetattr(obj, '\U0001f40d'), 22) + + self.assertRaises(RuntimeError, xgetattr, obj, 'evil') + self.assertRaises(TypeError, xgetattr, obj, 1) + # CRASHES xgetattr(obj, NULL) + # CRASHES xgetattr(NULL, 'a') + + def test_object_getattrstring(self): + getattrstring = _testcapi.object_getattrstring + obj = TestObject() + obj.a = 11 + setattr(obj, '\U0001f40d', 22) + self.assertEqual(getattrstring(obj, b'a'), 11) + self.assertRaises(AttributeError, getattrstring, obj, b'b') + self.assertEqual(getattrstring(obj, '\U0001f40d'.encode()), 22) + + self.assertRaises(RuntimeError, getattrstring, obj, b'evil') + self.assertRaises(UnicodeDecodeError, getattrstring, obj, b'\xff') + # CRASHES getattrstring(obj, NULL) + # CRASHES getattrstring(NULL, b'a') + + def test_object_getoptionalattr(self): + getoptionalattr = _testcapi.object_getoptionalattr + obj = TestObject() + obj.a = 11 + setattr(obj, '\U0001f40d', 22) + self.assertEqual(getoptionalattr(obj, 'a'), 11) + self.assertIs(getoptionalattr(obj, 'b'), AttributeError) + self.assertEqual(getoptionalattr(obj, '\U0001f40d'), 22) + + self.assertRaises(RuntimeError, getoptionalattr, obj, 'evil') + self.assertRaises(TypeError, getoptionalattr, obj, 1) + # CRASHES getoptionalattr(obj, NULL) + # CRASHES getoptionalattr(NULL, 'a') + + def test_object_getoptionalattrstring(self): + getoptionalattrstring = _testcapi.object_getoptionalattrstring + obj = TestObject() + obj.a = 11 + setattr(obj, '\U0001f40d', 22) + self.assertEqual(getoptionalattrstring(obj, b'a'), 11) + self.assertIs(getoptionalattrstring(obj, b'b'), AttributeError) + self.assertEqual(getoptionalattrstring(obj, '\U0001f40d'.encode()), 22) + + self.assertRaises(RuntimeError, getoptionalattrstring, obj, b'evil') + self.assertRaises(UnicodeDecodeError, getoptionalattrstring, obj, b'\xff') + # CRASHES getoptionalattrstring(obj, NULL) + # CRASHES getoptionalattrstring(NULL, b'a') + + def test_object_hasattr(self): + xhasattr = _testcapi.object_hasattr + obj = TestObject() + obj.a = 1 + setattr(obj, '\U0001f40d', 2) + self.assertTrue(xhasattr(obj, 'a')) + self.assertFalse(xhasattr(obj, 'b')) + self.assertTrue(xhasattr(obj, '\U0001f40d')) + + self.assertFalse(xhasattr(obj, 'evil')) + self.assertFalse(xhasattr(obj, 1)) + # CRASHES xhasattr(obj, NULL) + # CRASHES xhasattr(NULL, 'a') + + def test_object_hasattrstring(self): + hasattrstring = _testcapi.object_hasattrstring + obj = TestObject() + obj.a = 1 + setattr(obj, '\U0001f40d', 2) + self.assertTrue(hasattrstring(obj, b'a')) + self.assertFalse(hasattrstring(obj, b'b')) + self.assertTrue(hasattrstring(obj, '\U0001f40d'.encode())) + + self.assertFalse(hasattrstring(obj, b'evil')) + self.assertFalse(hasattrstring(obj, b'\xff')) + # CRASHES hasattrstring(obj, NULL) + # CRASHES hasattrstring(NULL, b'a') + + def test_object_setattr(self): + xsetattr = _testcapi.object_setattr + obj = TestObject() + xsetattr(obj, 'a', 5) + self.assertEqual(obj.a, 5) + xsetattr(obj, '\U0001f40d', 8) + self.assertEqual(getattr(obj, '\U0001f40d'), 8) + + # PyObject_SetAttr(obj, attr_name, NULL) removes the attribute + xsetattr(obj, 'a', NULL) + self.assertFalse(hasattr(obj, 'a')) + self.assertRaises(AttributeError, xsetattr, obj, 'b', NULL) + self.assertRaises(RuntimeError, xsetattr, obj, 'evil', NULL) + + self.assertRaises(RuntimeError, xsetattr, obj, 'evil', 'good') + self.assertRaises(AttributeError, xsetattr, 42, 'a', 5) + self.assertRaises(TypeError, xsetattr, obj, 1, 5) + # CRASHES xsetattr(obj, NULL, 5) + # CRASHES xsetattr(NULL, 'a', 5) + + def test_object_setattrstring(self): + setattrstring = _testcapi.object_setattrstring + obj = TestObject() + setattrstring(obj, b'a', 5) + self.assertEqual(obj.a, 5) + setattrstring(obj, '\U0001f40d'.encode(), 8) + self.assertEqual(getattr(obj, '\U0001f40d'), 8) + + # PyObject_SetAttrString(obj, attr_name, NULL) removes the attribute + setattrstring(obj, b'a', NULL) + self.assertFalse(hasattr(obj, 'a')) + self.assertRaises(AttributeError, setattrstring, obj, b'b', NULL) + self.assertRaises(RuntimeError, setattrstring, obj, b'evil', NULL) + + self.assertRaises(RuntimeError, setattrstring, obj, b'evil', 'good') + self.assertRaises(AttributeError, setattrstring, 42, b'a', 5) + self.assertRaises(TypeError, setattrstring, obj, 1, 5) + self.assertRaises(UnicodeDecodeError, setattrstring, obj, b'\xff', 5) + # CRASHES setattrstring(obj, NULL, 5) + # CRASHES setattrstring(NULL, b'a', 5) + + def test_object_delattr(self): + xdelattr = _testcapi.object_delattr + obj = TestObject() + obj.a = 1 + setattr(obj, '\U0001f40d', 2) + xdelattr(obj, 'a') + self.assertFalse(hasattr(obj, 'a')) + self.assertRaises(AttributeError, xdelattr, obj, 'b') + xdelattr(obj, '\U0001f40d') + self.assertFalse(hasattr(obj, '\U0001f40d')) + + self.assertRaises(AttributeError, xdelattr, 42, 'numerator') + self.assertRaises(RuntimeError, xdelattr, obj, 'evil') + self.assertRaises(TypeError, xdelattr, obj, 1) + # CRASHES xdelattr(obj, NULL) + # CRASHES xdelattr(NULL, 'a') + + def test_object_delattrstring(self): + delattrstring = _testcapi.object_delattrstring + obj = TestObject() + obj.a = 1 + setattr(obj, '\U0001f40d', 2) + delattrstring(obj, b'a') + self.assertFalse(hasattr(obj, 'a')) + self.assertRaises(AttributeError, delattrstring, obj, b'b') + delattrstring(obj, '\U0001f40d'.encode()) + self.assertFalse(hasattr(obj, '\U0001f40d')) + + self.assertRaises(AttributeError, delattrstring, 42, b'numerator') + self.assertRaises(RuntimeError, delattrstring, obj, b'evil') + self.assertRaises(UnicodeDecodeError, delattrstring, obj, b'\xff') + # CRASHES delattrstring(obj, NULL) + # CRASHES delattrstring(NULL, b'a') + + + def test_mapping_check(self): + check = _testcapi.mapping_check + self.assertTrue(check({1: 2})) + self.assertTrue(check([1, 2])) + self.assertTrue(check((1, 2))) + self.assertTrue(check('abc')) + self.assertTrue(check(b'abc')) + self.assertFalse(check(42)) + self.assertFalse(check(object())) + self.assertFalse(check(NULL)) + + def test_mapping_size(self): + for size in _testcapi.mapping_size, _testcapi.mapping_length: + self.assertEqual(size({1: 2}), 1) + self.assertEqual(size([1, 2]), 2) + self.assertEqual(size((1, 2)), 2) + self.assertEqual(size('abc'), 3) + self.assertEqual(size(b'abc'), 3) + + self.assertRaises(TypeError, size, 42) + self.assertRaises(TypeError, size, object()) + self.assertRaises(SystemError, size, NULL) + + def test_object_getitem(self): + getitem = _testcapi.object_getitem + dct = {'a': 1, '\U0001f40d': 2} + self.assertEqual(getitem(dct, 'a'), 1) + self.assertRaises(KeyError, getitem, dct, 'b') + self.assertEqual(getitem(dct, '\U0001f40d'), 2) + + dct2 = ProxyGetItem(dct) + self.assertEqual(getitem(dct2, 'a'), 1) + self.assertRaises(KeyError, getitem, dct2, 'b') + + self.assertEqual(getitem(['a', 'b', 'c'], 1), 'b') + + self.assertRaises(TypeError, getitem, 42, 'a') + self.assertRaises(TypeError, getitem, {}, []) # unhashable + self.assertRaises(SystemError, getitem, {}, NULL) + self.assertRaises(IndexError, getitem, [], 1) + self.assertRaises(TypeError, getitem, [], 'a') + self.assertRaises(SystemError, getitem, NULL, 'a') + + def test_mapping_getitemstring(self): + getitemstring = _testcapi.mapping_getitemstring + dct = {'a': 1, '\U0001f40d': 2} + self.assertEqual(getitemstring(dct, b'a'), 1) + self.assertRaises(KeyError, getitemstring, dct, b'b') + self.assertEqual(getitemstring(dct, '\U0001f40d'.encode()), 2) + + dct2 = ProxyGetItem(dct) + self.assertEqual(getitemstring(dct2, b'a'), 1) + self.assertRaises(KeyError, getitemstring, dct2, b'b') + + self.assertRaises(TypeError, getitemstring, 42, b'a') + self.assertRaises(UnicodeDecodeError, getitemstring, {}, b'\xff') + self.assertRaises(SystemError, getitemstring, {}, NULL) + self.assertRaises(TypeError, getitemstring, [], b'a') + self.assertRaises(SystemError, getitemstring, NULL, b'a') + + def test_mapping_haskey(self): + haskey = _testcapi.mapping_haskey + dct = {'a': 1, '\U0001f40d': 2} + self.assertTrue(haskey(dct, 'a')) + self.assertFalse(haskey(dct, 'b')) + self.assertTrue(haskey(dct, '\U0001f40d')) + + dct2 = ProxyGetItem(dct) + self.assertTrue(haskey(dct2, 'a')) + self.assertFalse(haskey(dct2, 'b')) + + self.assertTrue(haskey(['a', 'b', 'c'], 1)) + + self.assertFalse(haskey(42, 'a')) + self.assertFalse(haskey({}, [])) # unhashable + self.assertFalse(haskey({}, NULL)) + self.assertFalse(haskey([], 1)) + self.assertFalse(haskey([], 'a')) + self.assertFalse(haskey(NULL, 'a')) + + def test_mapping_haskeystring(self): + haskeystring = _testcapi.mapping_haskeystring + dct = {'a': 1, '\U0001f40d': 2} + self.assertTrue(haskeystring(dct, b'a')) + self.assertFalse(haskeystring(dct, b'b')) + self.assertTrue(haskeystring(dct, '\U0001f40d'.encode())) + + dct2 = ProxyGetItem(dct) + self.assertTrue(haskeystring(dct2, b'a')) + self.assertFalse(haskeystring(dct2, b'b')) + + self.assertFalse(haskeystring(42, b'a')) + self.assertFalse(haskeystring({}, b'\xff')) + self.assertFalse(haskeystring({}, NULL)) + self.assertFalse(haskeystring([], b'a')) + self.assertFalse(haskeystring(NULL, b'a')) + + def test_object_setitem(self): + setitem = _testcapi.object_setitem + dct = {} + setitem(dct, 'a', 5) + self.assertEqual(dct, {'a': 5}) + setitem(dct, '\U0001f40d', 8) + self.assertEqual(dct, {'a': 5, '\U0001f40d': 8}) + + dct = {} + dct2 = ProxySetItem(dct) + setitem(dct2, 'a', 5) + self.assertEqual(dct, {'a': 5}) + + lst = ['a', 'b', 'c'] + setitem(lst, 1, 'x') + self.assertEqual(lst, ['a', 'x', 'c']) + + self.assertRaises(TypeError, setitem, 42, 'a', 5) + self.assertRaises(TypeError, setitem, {}, [], 5) # unhashable + self.assertRaises(SystemError, setitem, {}, NULL, 5) + self.assertRaises(SystemError, setitem, {}, 'a', NULL) + self.assertRaises(IndexError, setitem, [], 1, 5) + self.assertRaises(TypeError, setitem, [], 'a', 5) + self.assertRaises(TypeError, setitem, (), 1, 5) + self.assertRaises(SystemError, setitem, NULL, 'a', 5) + + def test_mapping_setitemstring(self): + setitemstring = _testcapi.mapping_setitemstring + dct = {} + setitemstring(dct, b'a', 5) + self.assertEqual(dct, {'a': 5}) + setitemstring(dct, '\U0001f40d'.encode(), 8) + self.assertEqual(dct, {'a': 5, '\U0001f40d': 8}) + + dct = {} + dct2 = ProxySetItem(dct) + setitemstring(dct2, b'a', 5) + self.assertEqual(dct, {'a': 5}) + + self.assertRaises(TypeError, setitemstring, 42, b'a', 5) + self.assertRaises(UnicodeDecodeError, setitemstring, {}, b'\xff', 5) + self.assertRaises(SystemError, setitemstring, {}, NULL, 5) + self.assertRaises(SystemError, setitemstring, {}, b'a', NULL) + self.assertRaises(TypeError, setitemstring, [], b'a', 5) + self.assertRaises(SystemError, setitemstring, NULL, b'a', 5) + + def test_object_delitem(self): + for delitem in _testcapi.object_delitem, _testcapi.mapping_delitem: + dct = {'a': 1, 'c': 2, '\U0001f40d': 3} + delitem(dct, 'a') + self.assertEqual(dct, {'c': 2, '\U0001f40d': 3}) + self.assertRaises(KeyError, delitem, dct, 'b') + delitem(dct, '\U0001f40d') + self.assertEqual(dct, {'c': 2}) + + dct = {'a': 1, 'c': 2} + dct2 = ProxyDelItem(dct) + delitem(dct2, 'a') + self.assertEqual(dct, {'c': 2}) + self.assertRaises(KeyError, delitem, dct2, 'b') + + lst = ['a', 'b', 'c'] + delitem(lst, 1) + self.assertEqual(lst, ['a', 'c']) + + self.assertRaises(TypeError, delitem, 42, 'a') + self.assertRaises(TypeError, delitem, {}, []) # unhashable + self.assertRaises(SystemError, delitem, {}, NULL) + self.assertRaises(IndexError, delitem, [], 1) + self.assertRaises(TypeError, delitem, [], 'a') + self.assertRaises(SystemError, delitem, NULL, 'a') + + def test_mapping_delitemstring(self): + delitemstring = _testcapi.mapping_delitemstring + dct = {'a': 1, 'c': 2, '\U0001f40d': 3} + delitemstring(dct, b'a') + self.assertEqual(dct, {'c': 2, '\U0001f40d': 3}) + self.assertRaises(KeyError, delitemstring, dct, b'b') + delitemstring(dct, '\U0001f40d'.encode()) + self.assertEqual(dct, {'c': 2}) + + dct = {'a': 1, 'c': 2} + dct2 = ProxyDelItem(dct) + delitemstring(dct2, b'a') + self.assertEqual(dct, {'c': 2}) + self.assertRaises(KeyError, delitemstring, dct2, b'b') + + self.assertRaises(TypeError, delitemstring, 42, b'a') + self.assertRaises(UnicodeDecodeError, delitemstring, {}, b'\xff') + self.assertRaises(SystemError, delitemstring, {}, NULL) + self.assertRaises(TypeError, delitemstring, [], b'a') + self.assertRaises(SystemError, delitemstring, NULL, b'a') + + def test_mapping_keys_valuesitems(self): + class Mapping1(dict): + def keys(self): + return list(super().keys()) + def values(self): + return list(super().values()) + def items(self): + return list(super().items()) + class Mapping2(dict): + def keys(self): + return tuple(super().keys()) + def values(self): + return tuple(super().values()) + def items(self): + return tuple(super().items()) + dict_obj = {'foo': 1, 'bar': 2, 'spam': 3} + + for mapping in [{}, OrderedDict(), Mapping1(), Mapping2(), + dict_obj, OrderedDict(dict_obj), + Mapping1(dict_obj), Mapping2(dict_obj)]: + self.assertListEqual(_testcapi.mapping_keys(mapping), + list(mapping.keys())) + self.assertListEqual(_testcapi.mapping_values(mapping), + list(mapping.values())) + self.assertListEqual(_testcapi.mapping_items(mapping), + list(mapping.items())) + + def test_mapping_keys_valuesitems_bad_arg(self): + self.assertRaises(AttributeError, _testcapi.mapping_keys, object()) + self.assertRaises(AttributeError, _testcapi.mapping_values, object()) + self.assertRaises(AttributeError, _testcapi.mapping_items, object()) + self.assertRaises(AttributeError, _testcapi.mapping_keys, []) + self.assertRaises(AttributeError, _testcapi.mapping_values, []) + self.assertRaises(AttributeError, _testcapi.mapping_items, []) + self.assertRaises(SystemError, _testcapi.mapping_keys, NULL) + self.assertRaises(SystemError, _testcapi.mapping_values, NULL) + self.assertRaises(SystemError, _testcapi.mapping_items, NULL) + + class BadMapping: + def keys(self): + return None + def values(self): + return None + def items(self): + return None + bad_mapping = BadMapping() + self.assertRaises(TypeError, _testcapi.mapping_keys, bad_mapping) + self.assertRaises(TypeError, _testcapi.mapping_values, bad_mapping) + self.assertRaises(TypeError, _testcapi.mapping_items, bad_mapping) + + def test_sequence_check(self): + check = _testcapi.sequence_check + self.assertFalse(check({1: 2})) + self.assertTrue(check([1, 2])) + self.assertTrue(check((1, 2))) + self.assertTrue(check('abc')) + self.assertTrue(check(b'abc')) + self.assertFalse(check(42)) + self.assertFalse(check(object())) + # CRASHES check(NULL) + + def test_sequence_size(self): + for size in _testcapi.sequence_size, _testcapi.sequence_length: + self.assertEqual(size([1, 2]), 2) + self.assertEqual(size((1, 2)), 2) + self.assertEqual(size('abc'), 3) + self.assertEqual(size(b'abc'), 3) + + self.assertRaises(TypeError, size, {}) + self.assertRaises(TypeError, size, 42) + self.assertRaises(TypeError, size, object()) + self.assertRaises(SystemError, size, NULL) + + def test_sequence_getitem(self): + getitem = _testcapi.sequence_getitem + lst = ['a', 'b', 'c'] + self.assertEqual(getitem(lst, 1), 'b') + self.assertEqual(getitem(lst, -1), 'c') + self.assertRaises(IndexError, getitem, lst, 3) + + self.assertRaises(TypeError, getitem, 42, 1) + self.assertRaises(TypeError, getitem, {}, 1) + self.assertRaises(SystemError, getitem, NULL, 1) + + def test_sequence_concat(self): + concat = _testcapi.sequence_concat + self.assertEqual(concat(['a', 'b'], [1, 2]), ['a', 'b', 1, 2]) + self.assertEqual(concat(('a', 'b'), (1, 2)), ('a', 'b', 1, 2)) + + self.assertRaises(TypeError, concat, [], ()) + self.assertRaises(TypeError, concat, (), []) + self.assertRaises(TypeError, concat, [], 42) + self.assertRaises(TypeError, concat, 42, []) + self.assertRaises(TypeError, concat, 42, 43) + self.assertRaises(SystemError, concat, [], NULL) + self.assertRaises(SystemError, concat, NULL, []) + + def test_sequence_repeat(self): + repeat = _testcapi.sequence_repeat + self.assertEqual(repeat(['a', 'b'], 2), ['a', 'b', 'a', 'b']) + self.assertEqual(repeat(('a', 'b'), 2), ('a', 'b', 'a', 'b')) + self.assertEqual(repeat(['a', 'b'], 0), []) + self.assertEqual(repeat(['a', 'b'], -1), []) + + self.assertRaises(TypeError, repeat, set(), 2) + self.assertRaises(TypeError, repeat, 42, 2) + self.assertRaises(SystemError, repeat, NULL, 2) + + def test_sequence_inplaceconcat(self): + inplaceconcat = _testcapi.sequence_inplaceconcat + lst = ['a', 'b'] + res = inplaceconcat(lst, [1, 2]) + self.assertEqual(res, ['a', 'b', 1, 2]) + self.assertIs(res, lst) + lst = ['a', 'b'] + res = inplaceconcat(lst, (1, 2)) + self.assertEqual(res, ['a', 'b', 1, 2]) + self.assertIs(res, lst) + self.assertEqual(inplaceconcat(('a', 'b'), (1, 2)), ('a', 'b', 1, 2)) + + self.assertRaises(TypeError, inplaceconcat, (), []) + self.assertRaises(TypeError, inplaceconcat, [], 42) + self.assertRaises(TypeError, inplaceconcat, 42, []) + self.assertRaises(TypeError, inplaceconcat, 42, 43) + self.assertRaises(SystemError, inplaceconcat, [], NULL) + self.assertRaises(SystemError, inplaceconcat, NULL, []) + + def test_sequence_inplacerepeat(self): + inplacerepeat = _testcapi.sequence_inplacerepeat + lst = ['a', 'b'] + res = inplacerepeat(lst, 2) + self.assertEqual(res, ['a', 'b', 'a', 'b']) + self.assertIs(res, lst) + self.assertEqual(inplacerepeat(('a', 'b'), 2), ('a', 'b', 'a', 'b')) + self.assertEqual(inplacerepeat(['a', 'b'], 0), []) + self.assertEqual(inplacerepeat(['a', 'b'], -1), []) + + self.assertRaises(TypeError, inplacerepeat, set(), 2) + self.assertRaises(TypeError, inplacerepeat, 42, 2) + self.assertRaises(SystemError, inplacerepeat, NULL, 2) + + def test_sequence_setitem(self): + setitem = _testcapi.sequence_setitem + lst = ['a', 'b', 'c'] + setitem(lst, 1, 'x') + self.assertEqual(lst, ['a', 'x', 'c']) + setitem(lst, -1, 'y') + self.assertEqual(lst, ['a', 'x', 'y']) + + setitem(lst, 0, NULL) + self.assertEqual(lst, ['x', 'y']) + self.assertRaises(IndexError, setitem, lst, 3, 'x') + + self.assertRaises(TypeError, setitem, 42, 1, 'x') + self.assertRaises(TypeError, setitem, {}, 1, 'x') + self.assertRaises(SystemError, setitem, NULL, 1, 'x') + + def test_sequence_delitem(self): + delitem = _testcapi.sequence_delitem + lst = ['a', 'b', 'c'] + delitem(lst, 1) + self.assertEqual(lst, ['a', 'c']) + delitem(lst, -1) + self.assertEqual(lst, ['a']) + self.assertRaises(IndexError, delitem, lst, 3) + + self.assertRaises(TypeError, delitem, 42, 1) + self.assertRaises(TypeError, delitem, {}, 1) + self.assertRaises(SystemError, delitem, NULL, 1) + + def test_sequence_setslice(self): + setslice = _testcapi.sequence_setslice + + # Correct case: + data = [1, 2, 3, 4, 5] + data_copy = data.copy() + + setslice(data, 1, 3, [8, 9]) + data_copy[1:3] = [8, 9] + self.assertEqual(data, data_copy) + self.assertEqual(data, [1, 8, 9, 4, 5]) + + # Custom class: + class Custom: + def __setitem__(self, index, value): + self.index = index + self.value = value + + c = Custom() + setslice(c, 0, 5, 'abc') + self.assertEqual(c.index, slice(0, 5)) + self.assertEqual(c.value, 'abc') + + # Immutable sequences must raise: + bad_seq1 = (1, 2, 3, 4) + self.assertRaises(TypeError, setslice, bad_seq1, 1, 3, (8, 9)) + self.assertEqual(bad_seq1, (1, 2, 3, 4)) + + bad_seq2 = 'abcd' + self.assertRaises(TypeError, setslice, bad_seq2, 1, 3, 'xy') + self.assertEqual(bad_seq2, 'abcd') + + # Not a sequence: + self.assertRaises(TypeError, setslice, object(), 1, 3, 'xy') + self.assertRaises(SystemError, setslice, NULL, 1, 3, 'xy') + + data_copy = data.copy() + setslice(data_copy, 1, 3, NULL) + self.assertEqual(data_copy, [1, 4, 5]) + + def test_sequence_delslice(self): + delslice = _testcapi.sequence_delslice + + # Correct case: + data = [1, 2, 3, 4, 5] + data_copy = data.copy() + + delslice(data, 1, 3) + del data_copy[1:3] + self.assertEqual(data, data_copy) + self.assertEqual(data, [1, 4, 5]) + + # Custom class: + class Custom: + def __delitem__(self, index): + self.index = index + + c = Custom() + delslice(c, 0, 5) + self.assertEqual(c.index, slice(0, 5)) + + # Immutable sequences must raise: + bad_seq1 = (1, 2, 3, 4) + self.assertRaises(TypeError, delslice, bad_seq1, 1, 3) + self.assertEqual(bad_seq1, (1, 2, 3, 4)) + + bad_seq2 = 'abcd' + self.assertRaises(TypeError, delslice, bad_seq2, 1, 3) + self.assertEqual(bad_seq2, 'abcd') + + # Not a sequence: + self.assertRaises(TypeError, delslice, object(), 1, 3) + self.assertRaises(SystemError, delslice, NULL, 1, 3) + + mapping = {1: 'a', 2: 'b', 3: 'c'} + self.assertRaises(KeyError, delslice, mapping, 1, 3) + self.assertEqual(mapping, {1: 'a', 2: 'b', 3: 'c'}) + + def test_sequence_count(self): + count = _testcapi.sequence_count + + lst = ['a', 'b', 'a'] + self.assertEqual(count(lst, 'a'), 2) + self.assertEqual(count(lst, 'c'), 0) + self.assertEqual(count(iter(lst), 'a'), 2) + self.assertEqual(count(iter(lst), 'c'), 0) + self.assertEqual(count({'a': 2}, 'a'), 1) + + self.assertRaises(TypeError, count, 42, 'a') + self.assertRaises(SystemError, count, [], NULL) + self.assertRaises(SystemError, count, [1], NULL) + self.assertRaises(SystemError, count, NULL, 'a') + + def test_sequence_contains(self): + contains = _testcapi.sequence_contains + + lst = ['a', 'b', 'a'] + self.assertEqual(contains(lst, 'a'), 1) + self.assertEqual(contains(lst, 'c'), 0) + self.assertEqual(contains(iter(lst), 'a'), 1) + self.assertEqual(contains(iter(lst), 'c'), 0) + self.assertEqual(contains({'a': 2}, 'a'), 1) + + # XXX Only for empty sequences. Should be SystemError? + self.assertEqual(contains([], NULL), 0) + + self.assertRaises(TypeError, contains, 42, 'a') + self.assertRaises(SystemError, contains, [1], NULL) + # CRASHES contains({}, NULL) + # CRASHES contains(set(), NULL) + # CRASHES contains(NULL, 'a') + + def test_sequence_index(self): + index = _testcapi.sequence_index + + lst = ['a', 'b', 'a'] + self.assertEqual(index(lst, 'a'), 0) + self.assertEqual(index(lst, 'b'), 1) + self.assertRaises(ValueError, index, lst, 'c') + self.assertEqual(index(iter(lst), 'a'), 0) + self.assertEqual(index(iter(lst), 'b'), 1) + self.assertRaises(ValueError, index, iter(lst), 'c') + dct = {'a': 2, 'b': 3} + self.assertEqual(index(dct, 'a'), 0) + self.assertEqual(index(dct, 'b'), 1) + self.assertRaises(ValueError, index, dct, 'c') + + self.assertRaises(TypeError, index, 42, 'a') + self.assertRaises(SystemError, index, [], NULL) + self.assertRaises(SystemError, index, [1], NULL) + self.assertRaises(SystemError, index, NULL, 'a') + + def test_sequence_list(self): + xlist = _testcapi.sequence_list + self.assertEqual(xlist(['a', 'b', 'c']), ['a', 'b', 'c']) + self.assertEqual(xlist(('a', 'b', 'c')), ['a', 'b', 'c']) + self.assertEqual(xlist(iter(['a', 'b', 'c'])), ['a', 'b', 'c']) + self.assertEqual(xlist(gen()), ['a', 'b', 'c']) + + self.assertRaises(TypeError, xlist, 42) + self.assertRaises(SystemError, xlist, NULL) + + def test_sequence_tuple(self): + xtuple = _testcapi.sequence_tuple + self.assertEqual(xtuple(['a', 'b', 'c']), ('a', 'b', 'c')) + self.assertEqual(xtuple(('a', 'b', 'c')), ('a', 'b', 'c')) + self.assertEqual(xtuple(iter(['a', 'b', 'c'])), ('a', 'b', 'c')) + self.assertEqual(xtuple(gen()), ('a', 'b', 'c')) + + self.assertRaises(TypeError, xtuple, 42) + self.assertRaises(SystemError, xtuple, NULL) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_dict.py b/Lib/test/test_capi/test_dict.py new file mode 100644 index 00000000000000..9da6efd695ecab --- /dev/null +++ b/Lib/test/test_capi/test_dict.py @@ -0,0 +1,413 @@ +import unittest +import sys +from collections import OrderedDict, UserDict +from types import MappingProxyType +from test import support +from test.support import import_helper +import _testcapi + + +NULL = None + +class DictSubclass(dict): + def __getitem__(self, key): + raise RuntimeError('do not get evil') + def __setitem__(self, key, value): + raise RuntimeError('do not set evil') + def __delitem__(self, key): + raise RuntimeError('do not del evil') + +def gen(): + yield 'a' + yield 'b' + yield 'c' + + +class CAPITest(unittest.TestCase): + + def test_dict_check(self): + check = _testcapi.dict_check + self.assertTrue(check({1: 2})) + self.assertTrue(check(OrderedDict({1: 2}))) + self.assertFalse(check(UserDict({1: 2}))) + self.assertFalse(check([1, 2])) + self.assertFalse(check(object())) + #self.assertFalse(check(NULL)) + + def test_dict_checkexact(self): + check = _testcapi.dict_checkexact + self.assertTrue(check({1: 2})) + self.assertFalse(check(OrderedDict({1: 2}))) + self.assertFalse(check(UserDict({1: 2}))) + self.assertFalse(check([1, 2])) + self.assertFalse(check(object())) + #self.assertFalse(check(NULL)) + + def test_dict_new(self): + dict_new = _testcapi.dict_new + dct = dict_new() + self.assertEqual(dct, {}) + self.assertIs(type(dct), dict) + dct2 = dict_new() + self.assertIsNot(dct2, dct) + + def test_dictproxy_new(self): + dictproxy_new = _testcapi.dictproxy_new + for dct in {1: 2}, OrderedDict({1: 2}), UserDict({1: 2}): + proxy = dictproxy_new(dct) + self.assertIs(type(proxy), MappingProxyType) + self.assertEqual(proxy, dct) + with self.assertRaises(TypeError): + proxy[1] = 3 + self.assertEqual(proxy[1], 2) + dct[1] = 4 + self.assertEqual(proxy[1], 4) + + self.assertRaises(TypeError, dictproxy_new, []) + self.assertRaises(TypeError, dictproxy_new, 42) + # CRASHES dictproxy_new(NULL) + + def test_dict_copy(self): + copy = _testcapi.dict_copy + for dct in {1: 2}, OrderedDict({1: 2}): + dct_copy = copy(dct) + self.assertIs(type(dct_copy), dict) + self.assertEqual(dct_copy, dct) + + self.assertRaises(SystemError, copy, UserDict()) + self.assertRaises(SystemError, copy, []) + self.assertRaises(SystemError, copy, 42) + self.assertRaises(SystemError, copy, NULL) + + def test_dict_clear(self): + clear = _testcapi.dict_clear + dct = {1: 2} + clear(dct) + self.assertEqual(dct, {}) + + # NOTE: It is not safe to call it with OrderedDict. + + # Has no effect for non-dicts. + dct = UserDict({1: 2}) + clear(dct) + self.assertEqual(dct, {1: 2}) + lst = [1, 2] + clear(lst) + self.assertEqual(lst, [1, 2]) + clear(object()) + + # CRASHES? clear(NULL) + + def test_dict_size(self): + size = _testcapi.dict_size + self.assertEqual(size({1: 2}), 1) + self.assertEqual(size(OrderedDict({1: 2})), 1) + + self.assertRaises(SystemError, size, UserDict()) + self.assertRaises(SystemError, size, []) + self.assertRaises(SystemError, size, 42) + self.assertRaises(SystemError, size, object()) + self.assertRaises(SystemError, size, NULL) + + def test_dict_getitem(self): + getitem = _testcapi.dict_getitem + dct = {'a': 1, '\U0001f40d': 2} + self.assertEqual(getitem(dct, 'a'), 1) + self.assertIs(getitem(dct, 'b'), KeyError) + self.assertEqual(getitem(dct, '\U0001f40d'), 2) + + dct2 = DictSubclass(dct) + self.assertEqual(getitem(dct2, 'a'), 1) + self.assertIs(getitem(dct2, 'b'), KeyError) + + self.assertIs(getitem({}, []), KeyError) # unhashable + self.assertIs(getitem(42, 'a'), KeyError) + self.assertIs(getitem([1], 0), KeyError) + # CRASHES getitem({}, NULL) + # CRASHES getitem(NULL, 'a') + + def test_dict_getitemstring(self): + getitemstring = _testcapi.dict_getitemstring + dct = {'a': 1, '\U0001f40d': 2} + self.assertEqual(getitemstring(dct, b'a'), 1) + self.assertIs(getitemstring(dct, b'b'), KeyError) + self.assertEqual(getitemstring(dct, '\U0001f40d'.encode()), 2) + + dct2 = DictSubclass(dct) + self.assertEqual(getitemstring(dct2, b'a'), 1) + self.assertIs(getitemstring(dct2, b'b'), KeyError) + + self.assertIs(getitemstring({}, b'\xff'), KeyError) + self.assertIs(getitemstring(42, b'a'), KeyError) + self.assertIs(getitemstring([], b'a'), KeyError) + # CRASHES getitemstring({}, NULL) + # CRASHES getitemstring(NULL, b'a') + + def test_dict_getitemref(self): + getitem = _testcapi.dict_getitemref + dct = {'a': 1, '\U0001f40d': 2} + self.assertEqual(getitem(dct, 'a'), 1) + self.assertIs(getitem(dct, 'b'), KeyError) + self.assertEqual(getitem(dct, '\U0001f40d'), 2) + + dct2 = DictSubclass(dct) + self.assertEqual(getitem(dct2, 'a'), 1) + self.assertIs(getitem(dct2, 'b'), KeyError) + + self.assertRaises(SystemError, getitem, 42, 'a') + self.assertRaises(TypeError, getitem, {}, []) # unhashable + self.assertRaises(SystemError, getitem, [], 1) + self.assertRaises(SystemError, getitem, [], 'a') + # CRASHES getitem({}, NULL) + # CRASHES getitem(NULL, 'a') + + def test_dict_getitemstringref(self): + getitemstring = _testcapi.dict_getitemstringref + dct = {'a': 1, '\U0001f40d': 2} + self.assertEqual(getitemstring(dct, b'a'), 1) + self.assertIs(getitemstring(dct, b'b'), KeyError) + self.assertEqual(getitemstring(dct, '\U0001f40d'.encode()), 2) + + dct2 = DictSubclass(dct) + self.assertEqual(getitemstring(dct2, b'a'), 1) + self.assertIs(getitemstring(dct2, b'b'), KeyError) + + self.assertRaises(SystemError, getitemstring, 42, b'a') + self.assertRaises(UnicodeDecodeError, getitemstring, {}, b'\xff') + self.assertRaises(SystemError, getitemstring, [], b'a') + # CRASHES getitemstring({}, NULL) + # CRASHES getitemstring(NULL, b'a') + + def test_dict_getitemwitherror(self): + getitem = _testcapi.dict_getitemwitherror + dct = {'a': 1, '\U0001f40d': 2} + self.assertEqual(getitem(dct, 'a'), 1) + self.assertIs(getitem(dct, 'b'), KeyError) + self.assertEqual(getitem(dct, '\U0001f40d'), 2) + + dct2 = DictSubclass(dct) + self.assertEqual(getitem(dct2, 'a'), 1) + self.assertIs(getitem(dct2, 'b'), KeyError) + + self.assertRaises(SystemError, getitem, 42, 'a') + self.assertRaises(TypeError, getitem, {}, []) # unhashable + self.assertRaises(SystemError, getitem, [], 1) + self.assertRaises(SystemError, getitem, [], 'a') + # CRASHES getitem({}, NULL) + # CRASHES getitem(NULL, 'a') + + def test_dict_contains(self): + contains = _testcapi.dict_contains + dct = {'a': 1, '\U0001f40d': 2} + self.assertTrue(contains(dct, 'a')) + self.assertFalse(contains(dct, 'b')) + self.assertTrue(contains(dct, '\U0001f40d')) + + dct2 = DictSubclass(dct) + self.assertTrue(contains(dct2, 'a')) + self.assertFalse(contains(dct2, 'b')) + + self.assertRaises(TypeError, contains, {}, []) # unhashable + # CRASHES contains({}, NULL) + # CRASHES contains(UserDict(), 'a') + # CRASHES contains(42, 'a') + # CRASHES contains(NULL, 'a') + + def test_dict_setitem(self): + setitem = _testcapi.dict_setitem + dct = {} + setitem(dct, 'a', 5) + self.assertEqual(dct, {'a': 5}) + setitem(dct, '\U0001f40d', 8) + self.assertEqual(dct, {'a': 5, '\U0001f40d': 8}) + + dct2 = DictSubclass() + setitem(dct2, 'a', 5) + self.assertEqual(dct2, {'a': 5}) + + self.assertRaises(TypeError, setitem, {}, [], 5) # unhashable + self.assertRaises(SystemError, setitem, UserDict(), 'a', 5) + self.assertRaises(SystemError, setitem, [1], 0, 5) + self.assertRaises(SystemError, setitem, 42, 'a', 5) + # CRASHES setitem({}, NULL, 5) + # CRASHES setitem({}, 'a', NULL) + # CRASHES setitem(NULL, 'a', 5) + + def test_dict_setitemstring(self): + setitemstring = _testcapi.dict_setitemstring + dct = {} + setitemstring(dct, b'a', 5) + self.assertEqual(dct, {'a': 5}) + setitemstring(dct, '\U0001f40d'.encode(), 8) + self.assertEqual(dct, {'a': 5, '\U0001f40d': 8}) + + dct2 = DictSubclass() + setitemstring(dct2, b'a', 5) + self.assertEqual(dct2, {'a': 5}) + + self.assertRaises(UnicodeDecodeError, setitemstring, {}, b'\xff', 5) + self.assertRaises(SystemError, setitemstring, UserDict(), b'a', 5) + self.assertRaises(SystemError, setitemstring, 42, b'a', 5) + # CRASHES setitemstring({}, NULL, 5) + # CRASHES setitemstring({}, b'a', NULL) + # CRASHES setitemstring(NULL, b'a', 5) + + def test_dict_delitem(self): + delitem = _testcapi.dict_delitem + dct = {'a': 1, 'c': 2, '\U0001f40d': 3} + delitem(dct, 'a') + self.assertEqual(dct, {'c': 2, '\U0001f40d': 3}) + self.assertRaises(KeyError, delitem, dct, 'b') + delitem(dct, '\U0001f40d') + self.assertEqual(dct, {'c': 2}) + + dct2 = DictSubclass({'a': 1, 'c': 2}) + delitem(dct2, 'a') + self.assertEqual(dct2, {'c': 2}) + self.assertRaises(KeyError, delitem, dct2, 'b') + + self.assertRaises(TypeError, delitem, {}, []) # unhashable + self.assertRaises(SystemError, delitem, UserDict({'a': 1}), 'a') + self.assertRaises(SystemError, delitem, [1], 0) + self.assertRaises(SystemError, delitem, 42, 'a') + # CRASHES delitem({}, NULL) + # CRASHES delitem(NULL, 'a') + + def test_dict_delitemstring(self): + delitemstring = _testcapi.dict_delitemstring + dct = {'a': 1, 'c': 2, '\U0001f40d': 3} + delitemstring(dct, b'a') + self.assertEqual(dct, {'c': 2, '\U0001f40d': 3}) + self.assertRaises(KeyError, delitemstring, dct, b'b') + delitemstring(dct, '\U0001f40d'.encode()) + self.assertEqual(dct, {'c': 2}) + + dct2 = DictSubclass({'a': 1, 'c': 2}) + delitemstring(dct2, b'a') + self.assertEqual(dct2, {'c': 2}) + self.assertRaises(KeyError, delitemstring, dct2, b'b') + + self.assertRaises(UnicodeDecodeError, delitemstring, {}, b'\xff') + self.assertRaises(SystemError, delitemstring, UserDict({'a': 1}), b'a') + self.assertRaises(SystemError, delitemstring, 42, b'a') + # CRASHES delitemstring({}, NULL) + # CRASHES delitemstring(NULL, b'a') + + def test_dict_setdefault(self): + setdefault = _testcapi.dict_setdefault + dct = {} + self.assertEqual(setdefault(dct, 'a', 5), 5) + self.assertEqual(dct, {'a': 5}) + self.assertEqual(setdefault(dct, 'a', 8), 5) + self.assertEqual(dct, {'a': 5}) + + dct2 = DictSubclass() + self.assertEqual(setdefault(dct2, 'a', 5), 5) + self.assertEqual(dct2, {'a': 5}) + self.assertEqual(setdefault(dct2, 'a', 8), 5) + self.assertEqual(dct2, {'a': 5}) + + self.assertRaises(TypeError, setdefault, {}, [], 5) # unhashable + self.assertRaises(SystemError, setdefault, UserDict(), 'a', 5) + self.assertRaises(SystemError, setdefault, [1], 0, 5) + self.assertRaises(SystemError, setdefault, 42, 'a', 5) + # CRASHES setdefault({}, NULL, 5) + # CRASHES setdefault({}, 'a', NULL) + # CRASHES setdefault(NULL, 'a', 5) + + def test_mapping_keys_valuesitems(self): + class BadMapping(dict): + def keys(self): + return None + def values(self): + return None + def items(self): + return None + dict_obj = {'foo': 1, 'bar': 2, 'spam': 3} + for mapping in [dict_obj, DictSubclass(dict_obj), BadMapping(dict_obj)]: + self.assertListEqual(_testcapi.dict_keys(mapping), + list(dict_obj.keys())) + self.assertListEqual(_testcapi.dict_values(mapping), + list(dict_obj.values())) + self.assertListEqual(_testcapi.dict_items(mapping), + list(dict_obj.items())) + + def test_dict_keys_valuesitems_bad_arg(self): + for mapping in UserDict(), [], object(): + self.assertRaises(SystemError, _testcapi.dict_keys, mapping) + self.assertRaises(SystemError, _testcapi.dict_values, mapping) + self.assertRaises(SystemError, _testcapi.dict_items, mapping) + + def test_dict_next(self): + dict_next = _testcapi.dict_next + self.assertIsNone(dict_next({}, 0)) + dct = {'a': 1, 'b': 2, 'c': 3} + pos = 0 + pairs = [] + while True: + res = dict_next(dct, pos) + if res is None: + break + rc, pos, key, value = res + self.assertEqual(rc, 1) + pairs.append((key, value)) + self.assertEqual(pairs, list(dct.items())) + + # CRASHES dict_next(NULL, 0) + + def test_dict_update(self): + update = _testcapi.dict_update + for cls1 in dict, DictSubclass: + for cls2 in dict, DictSubclass, UserDict: + dct = cls1({'a': 1, 'b': 2}) + update(dct, cls2({'b': 3, 'c': 4})) + self.assertEqual(dct, {'a': 1, 'b': 3, 'c': 4}) + + self.assertRaises(AttributeError, update, {}, []) + self.assertRaises(AttributeError, update, {}, 42) + self.assertRaises(SystemError, update, UserDict(), {}) + self.assertRaises(SystemError, update, 42, {}) + self.assertRaises(SystemError, update, {}, NULL) + self.assertRaises(SystemError, update, NULL, {}) + + def test_dict_merge(self): + merge = _testcapi.dict_merge + for cls1 in dict, DictSubclass: + for cls2 in dict, DictSubclass, UserDict: + dct = cls1({'a': 1, 'b': 2}) + merge(dct, cls2({'b': 3, 'c': 4}), 0) + self.assertEqual(dct, {'a': 1, 'b': 2, 'c': 4}) + dct = cls1({'a': 1, 'b': 2}) + merge(dct, cls2({'b': 3, 'c': 4}), 1) + self.assertEqual(dct, {'a': 1, 'b': 3, 'c': 4}) + + self.assertRaises(AttributeError, merge, {}, [], 0) + self.assertRaises(AttributeError, merge, {}, 42, 0) + self.assertRaises(SystemError, merge, UserDict(), {}, 0) + self.assertRaises(SystemError, merge, 42, {}, 0) + self.assertRaises(SystemError, merge, {}, NULL, 0) + self.assertRaises(SystemError, merge, NULL, {}, 0) + + def test_dict_mergefromseq2(self): + mergefromseq2 = _testcapi.dict_mergefromseq2 + for cls1 in dict, DictSubclass: + for cls2 in list, iter: + dct = cls1({'a': 1, 'b': 2}) + mergefromseq2(dct, cls2([('b', 3), ('c', 4)]), 0) + self.assertEqual(dct, {'a': 1, 'b': 2, 'c': 4}) + dct = cls1({'a': 1, 'b': 2}) + mergefromseq2(dct, cls2([('b', 3), ('c', 4)]), 1) + self.assertEqual(dct, {'a': 1, 'b': 3, 'c': 4}) + + self.assertRaises(ValueError, mergefromseq2, {}, [(1,)], 0) + self.assertRaises(ValueError, mergefromseq2, {}, [(1, 2, 3)], 0) + self.assertRaises(TypeError, mergefromseq2, {}, [1], 0) + self.assertRaises(TypeError, mergefromseq2, {}, 42, 0) + # CRASHES mergefromseq2(UserDict(), [], 0) + # CRASHES mergefromseq2(42, [], 0) + # CRASHES mergefromseq2({}, NULL, 0) + # CRASHES mergefromseq2(NULL, {}, 0) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_mem.py b/Lib/test/test_capi/test_mem.py index a9ff410cb93ab8..527000875b7241 100644 --- a/Lib/test/test_capi/test_mem.py +++ b/Lib/test/test_capi/test_mem.py @@ -8,8 +8,10 @@ from test.support.script_helper import assert_python_failure, assert_python_ok -# Skip this test if the _testcapi module isn't available. +# Skip this test if the _testcapi and _testinternalcapi extensions are not +# available. _testcapi = import_helper.import_module('_testcapi') +_testinternalcapi = import_helper.import_module('_testinternalcapi') @requires_subprocess() class PyMemDebugTests(unittest.TestCase): @@ -84,16 +86,13 @@ def test_pyobject_malloc_without_gil(self): def check_pyobject_is_freed(self, func_name): code = textwrap.dedent(f''' - import gc, os, sys, _testcapi + import gc, os, sys, _testinternalcapi # Disable the GC to avoid crash on GC collection gc.disable() - try: - _testcapi.{func_name}() - # Exit immediately to avoid a crash while deallocating - # the invalid object - os._exit(0) - except _testcapi.error: - os._exit(1) + _testinternalcapi.{func_name}() + # Exit immediately to avoid a crash while deallocating + # the invalid object + os._exit(0) ''') assert_python_ok( '-c', code, diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py index 4e519fa73c50cc..c81212202d9ef2 100644 --- a/Lib/test/test_capi/test_misc.py +++ b/Lib/test/test_capi/test_misc.py @@ -51,6 +51,8 @@ import _testinternalcapi +NULL = None + def decode_stderr(err): return err.decode('utf-8', 'replace').replace('\r', '') @@ -85,9 +87,15 @@ def test_instancemethod(self): @support.requires_subprocess() def test_no_FatalError_infinite_loop(self): - run_result, _cmd_line = run_python_until_end( - '-c', 'import _testcapi; _testcapi.crash_no_current_thread()', - ) + code = textwrap.dedent(""" + import _testcapi + from test import support + + with support.SuppressCrashReport(): + _testcapi.crash_no_current_thread() + """) + + run_result, _cmd_line = run_python_until_end('-c', code) _rc, out, err = run_result self.assertEqual(out, b'') # This used to cause an infinite loop. @@ -293,137 +301,6 @@ def test_getitem_with_error(self): def test_buildvalue_N(self): _testcapi.test_buildvalue_N() - def test_mapping_keys_values_items(self): - class Mapping1(dict): - def keys(self): - return list(super().keys()) - def values(self): - return list(super().values()) - def items(self): - return list(super().items()) - class Mapping2(dict): - def keys(self): - return tuple(super().keys()) - def values(self): - return tuple(super().values()) - def items(self): - return tuple(super().items()) - dict_obj = {'foo': 1, 'bar': 2, 'spam': 3} - - for mapping in [{}, OrderedDict(), Mapping1(), Mapping2(), - dict_obj, OrderedDict(dict_obj), - Mapping1(dict_obj), Mapping2(dict_obj)]: - self.assertListEqual(_testcapi.get_mapping_keys(mapping), - list(mapping.keys())) - self.assertListEqual(_testcapi.get_mapping_values(mapping), - list(mapping.values())) - self.assertListEqual(_testcapi.get_mapping_items(mapping), - list(mapping.items())) - - def test_mapping_keys_values_items_bad_arg(self): - self.assertRaises(AttributeError, _testcapi.get_mapping_keys, None) - self.assertRaises(AttributeError, _testcapi.get_mapping_values, None) - self.assertRaises(AttributeError, _testcapi.get_mapping_items, None) - - class BadMapping: - def keys(self): - return None - def values(self): - return None - def items(self): - return None - bad_mapping = BadMapping() - self.assertRaises(TypeError, _testcapi.get_mapping_keys, bad_mapping) - self.assertRaises(TypeError, _testcapi.get_mapping_values, bad_mapping) - self.assertRaises(TypeError, _testcapi.get_mapping_items, bad_mapping) - - def test_mapping_has_key(self): - dct = {'a': 1} - self.assertTrue(_testcapi.mapping_has_key(dct, 'a')) - self.assertFalse(_testcapi.mapping_has_key(dct, 'b')) - - class SubDict(dict): - pass - - dct2 = SubDict({'a': 1}) - self.assertTrue(_testcapi.mapping_has_key(dct2, 'a')) - self.assertFalse(_testcapi.mapping_has_key(dct2, 'b')) - - def test_sequence_set_slice(self): - # Correct case: - data = [1, 2, 3, 4, 5] - data_copy = data.copy() - - _testcapi.sequence_set_slice(data, 1, 3, [8, 9]) - data_copy[1:3] = [8, 9] - self.assertEqual(data, data_copy) - self.assertEqual(data, [1, 8, 9, 4, 5]) - - # Custom class: - class Custom: - def __setitem__(self, index, value): - self.index = index - self.value = value - - c = Custom() - _testcapi.sequence_set_slice(c, 0, 5, 'abc') - self.assertEqual(c.index, slice(0, 5)) - self.assertEqual(c.value, 'abc') - - # Immutable sequences must raise: - bad_seq1 = (1, 2, 3, 4) - with self.assertRaises(TypeError): - _testcapi.sequence_set_slice(bad_seq1, 1, 3, (8, 9)) - self.assertEqual(bad_seq1, (1, 2, 3, 4)) - - bad_seq2 = 'abcd' - with self.assertRaises(TypeError): - _testcapi.sequence_set_slice(bad_seq2, 1, 3, 'xy') - self.assertEqual(bad_seq2, 'abcd') - - # Not a sequence: - with self.assertRaises(TypeError): - _testcapi.sequence_set_slice(None, 1, 3, 'xy') - - def test_sequence_del_slice(self): - # Correct case: - data = [1, 2, 3, 4, 5] - data_copy = data.copy() - - _testcapi.sequence_del_slice(data, 1, 3) - del data_copy[1:3] - self.assertEqual(data, data_copy) - self.assertEqual(data, [1, 4, 5]) - - # Custom class: - class Custom: - def __delitem__(self, index): - self.index = index - - c = Custom() - _testcapi.sequence_del_slice(c, 0, 5) - self.assertEqual(c.index, slice(0, 5)) - - # Immutable sequences must raise: - bad_seq1 = (1, 2, 3, 4) - with self.assertRaises(TypeError): - _testcapi.sequence_del_slice(bad_seq1, 1, 3) - self.assertEqual(bad_seq1, (1, 2, 3, 4)) - - bad_seq2 = 'abcd' - with self.assertRaises(TypeError): - _testcapi.sequence_del_slice(bad_seq2, 1, 3) - self.assertEqual(bad_seq2, 'abcd') - - # Not a sequence: - with self.assertRaises(TypeError): - _testcapi.sequence_del_slice(None, 1, 3) - - mapping = {1: 'a', 2: 'b', 3: 'c'} - with self.assertRaises(KeyError): - _testcapi.sequence_del_slice(mapping, 1, 3) - self.assertEqual(mapping, {1: 'a', 2: 'b', 3: 'c'}) - @unittest.skipUnless(hasattr(_testcapi, 'negative_refcount'), 'need _testcapi.negative_refcount') def test_negative_refcount(self): @@ -734,7 +611,7 @@ class Base(metaclass=metaclass): # Class creation from C with warnings_helper.check_warnings( - ('.*custom tp_new.*in Python 3.14.*', DeprecationWarning), + ('.* _testcapi.Subclass .* custom tp_new.*in Python 3.14.*', DeprecationWarning), ): sub = _testcapi.make_type_with_base(Base) self.assertTrue(issubclass(sub, Base)) @@ -1115,6 +992,46 @@ class Data(_testcapi.ObjExtraData): del d.extra self.assertIsNone(d.extra) + def test_sys_getobject(self): + getobject = _testcapi.sys_getobject + + self.assertIs(getobject(b'stdout'), sys.stdout) + with support.swap_attr(sys, '\U0001f40d', 42): + self.assertEqual(getobject('\U0001f40d'.encode()), 42) + + self.assertIs(getobject(b'nonexisting'), AttributeError) + self.assertIs(getobject(b'\xff'), AttributeError) + # CRASHES getobject(NULL) + + def test_sys_setobject(self): + setobject = _testcapi.sys_setobject + + value = ['value'] + value2 = ['value2'] + try: + self.assertEqual(setobject(b'newattr', value), 0) + self.assertIs(sys.newattr, value) + self.assertEqual(setobject(b'newattr', value2), 0) + self.assertIs(sys.newattr, value2) + self.assertEqual(setobject(b'newattr', NULL), 0) + self.assertFalse(hasattr(sys, 'newattr')) + self.assertEqual(setobject(b'newattr', NULL), 0) + finally: + with contextlib.suppress(AttributeError): + del sys.newattr + try: + self.assertEqual(setobject('\U0001f40d'.encode(), value), 0) + self.assertIs(getattr(sys, '\U0001f40d'), value) + self.assertEqual(setobject('\U0001f40d'.encode(), NULL), 0) + self.assertFalse(hasattr(sys, '\U0001f40d')) + finally: + with contextlib.suppress(AttributeError): + delattr(sys, '\U0001f40d') + + with self.assertRaises(UnicodeDecodeError): + setobject(b'\xff', value) + # CRASHES setobject(NULL, value) + @requires_limited_api class TestHeapTypeRelative(unittest.TestCase): @@ -2368,12 +2285,16 @@ def clear_executors(func): class TestOptimizerAPI(unittest.TestCase): def test_get_set_optimizer(self): - self.assertEqual(_testinternalcapi.get_optimizer(), None) + old = _testinternalcapi.get_optimizer() opt = _testinternalcapi.get_counter_optimizer() - _testinternalcapi.set_optimizer(opt) - self.assertEqual(_testinternalcapi.get_optimizer(), opt) - _testinternalcapi.set_optimizer(None) - self.assertEqual(_testinternalcapi.get_optimizer(), None) + try: + _testinternalcapi.set_optimizer(opt) + self.assertEqual(_testinternalcapi.get_optimizer(), opt) + _testinternalcapi.set_optimizer(None) + self.assertEqual(_testinternalcapi.get_optimizer(), None) + finally: + _testinternalcapi.set_optimizer(old) + def test_counter_optimizer(self): # Generate a new function at each call @@ -2426,7 +2347,7 @@ def get_first_executor(func): co_code = code.co_code JUMP_BACKWARD = opcode.opmap["JUMP_BACKWARD"] for i in range(0, len(co_code), 2): - if co_code[i] == JUMP_BACKWARD or 1: + if co_code[i] == JUMP_BACKWARD: try: return _testinternalcapi.get_executor(code, i) except ValueError: @@ -2454,36 +2375,40 @@ def testfunc(x): def test_extended_arg(self): "Check EXTENDED_ARG handling in superblock creation" - def many_vars(): - # 260 vars, so z9 should have index 259 - a0 = a1 = a2 = a3 = a4 = a5 = a6 = a7 = a8 = a9 = 42 - b0 = b1 = b2 = b3 = b4 = b5 = b6 = b7 = b8 = b9 = 42 - c0 = c1 = c2 = c3 = c4 = c5 = c6 = c7 = c8 = c9 = 42 - d0 = d1 = d2 = d3 = d4 = d5 = d6 = d7 = d8 = d9 = 42 - e0 = e1 = e2 = e3 = e4 = e5 = e6 = e7 = e8 = e9 = 42 - f0 = f1 = f2 = f3 = f4 = f5 = f6 = f7 = f8 = f9 = 42 - g0 = g1 = g2 = g3 = g4 = g5 = g6 = g7 = g8 = g9 = 42 - h0 = h1 = h2 = h3 = h4 = h5 = h6 = h7 = h8 = h9 = 42 - i0 = i1 = i2 = i3 = i4 = i5 = i6 = i7 = i8 = i9 = 42 - j0 = j1 = j2 = j3 = j4 = j5 = j6 = j7 = j8 = j9 = 42 - k0 = k1 = k2 = k3 = k4 = k5 = k6 = k7 = k8 = k9 = 42 - l0 = l1 = l2 = l3 = l4 = l5 = l6 = l7 = l8 = l9 = 42 - m0 = m1 = m2 = m3 = m4 = m5 = m6 = m7 = m8 = m9 = 42 - n0 = n1 = n2 = n3 = n4 = n5 = n6 = n7 = n8 = n9 = 42 - o0 = o1 = o2 = o3 = o4 = o5 = o6 = o7 = o8 = o9 = 42 - p0 = p1 = p2 = p3 = p4 = p5 = p6 = p7 = p8 = p9 = 42 - q0 = q1 = q2 = q3 = q4 = q5 = q6 = q7 = q8 = q9 = 42 - r0 = r1 = r2 = r3 = r4 = r5 = r6 = r7 = r8 = r9 = 42 - s0 = s1 = s2 = s3 = s4 = s5 = s6 = s7 = s8 = s9 = 42 - t0 = t1 = t2 = t3 = t4 = t5 = t6 = t7 = t8 = t9 = 42 - u0 = u1 = u2 = u3 = u4 = u5 = u6 = u7 = u8 = u9 = 42 - v0 = v1 = v2 = v3 = v4 = v5 = v6 = v7 = v8 = v9 = 42 - w0 = w1 = w2 = w3 = w4 = w5 = w6 = w7 = w8 = w9 = 42 - x0 = x1 = x2 = x3 = x4 = x5 = x6 = x7 = x8 = x9 = 42 - y0 = y1 = y2 = y3 = y4 = y5 = y6 = y7 = y8 = y9 = 42 - z0 = z1 = z2 = z3 = z4 = z5 = z6 = z7 = z8 = z9 = 42 - while z9 > 0: - z9 = z9 - 1 + ns = {} + exec(textwrap.dedent(""" + def many_vars(): + # 260 vars, so z9 should have index 259 + a0 = a1 = a2 = a3 = a4 = a5 = a6 = a7 = a8 = a9 = 42 + b0 = b1 = b2 = b3 = b4 = b5 = b6 = b7 = b8 = b9 = 42 + c0 = c1 = c2 = c3 = c4 = c5 = c6 = c7 = c8 = c9 = 42 + d0 = d1 = d2 = d3 = d4 = d5 = d6 = d7 = d8 = d9 = 42 + e0 = e1 = e2 = e3 = e4 = e5 = e6 = e7 = e8 = e9 = 42 + f0 = f1 = f2 = f3 = f4 = f5 = f6 = f7 = f8 = f9 = 42 + g0 = g1 = g2 = g3 = g4 = g5 = g6 = g7 = g8 = g9 = 42 + h0 = h1 = h2 = h3 = h4 = h5 = h6 = h7 = h8 = h9 = 42 + i0 = i1 = i2 = i3 = i4 = i5 = i6 = i7 = i8 = i9 = 42 + j0 = j1 = j2 = j3 = j4 = j5 = j6 = j7 = j8 = j9 = 42 + k0 = k1 = k2 = k3 = k4 = k5 = k6 = k7 = k8 = k9 = 42 + l0 = l1 = l2 = l3 = l4 = l5 = l6 = l7 = l8 = l9 = 42 + m0 = m1 = m2 = m3 = m4 = m5 = m6 = m7 = m8 = m9 = 42 + n0 = n1 = n2 = n3 = n4 = n5 = n6 = n7 = n8 = n9 = 42 + o0 = o1 = o2 = o3 = o4 = o5 = o6 = o7 = o8 = o9 = 42 + p0 = p1 = p2 = p3 = p4 = p5 = p6 = p7 = p8 = p9 = 42 + q0 = q1 = q2 = q3 = q4 = q5 = q6 = q7 = q8 = q9 = 42 + r0 = r1 = r2 = r3 = r4 = r5 = r6 = r7 = r8 = r9 = 42 + s0 = s1 = s2 = s3 = s4 = s5 = s6 = s7 = s8 = s9 = 42 + t0 = t1 = t2 = t3 = t4 = t5 = t6 = t7 = t8 = t9 = 42 + u0 = u1 = u2 = u3 = u4 = u5 = u6 = u7 = u8 = u9 = 42 + v0 = v1 = v2 = v3 = v4 = v5 = v6 = v7 = v8 = v9 = 42 + w0 = w1 = w2 = w3 = w4 = w5 = w6 = w7 = w8 = w9 = 42 + x0 = x1 = x2 = x3 = x4 = x5 = x6 = x7 = x8 = x9 = 42 + y0 = y1 = y2 = y3 = y4 = y5 = y6 = y7 = y8 = y9 = 42 + z0 = z1 = z2 = z3 = z4 = z5 = z6 = z7 = z8 = z9 = 42 + while z9 > 0: + z9 = z9 - 1 + """), ns, ns) + many_vars = ns["many_vars"] opt = _testinternalcapi.get_uop_optimizer() with temporary_optimizer(opt): @@ -2693,5 +2618,6 @@ def testfunc(it): with self.assertRaises(StopIteration): next(it) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_watchers.py b/Lib/test/test_capi/test_watchers.py index 93f6ef752d0663..10b76e163bfb21 100644 --- a/Lib/test/test_capi/test_watchers.py +++ b/Lib/test/test_capi/test_watchers.py @@ -294,6 +294,18 @@ class C2: pass C2.hmm = "baz" self.assert_events([C1, [C2]]) + def test_all_watchers(self): + class C: pass + with ExitStack() as stack: + last_wid = -1 + # don't make assumptions about how many watchers are already + # registered, just go until we reach the max ID + while last_wid < self.TYPE_MAX_WATCHERS - 1: + last_wid = stack.enter_context(self.watcher()) + self.watch(last_wid, C) + C.foo = "bar" + self.assert_events([C]) + def test_watch_non_type(self): with self.watcher() as wid: with self.assertRaisesRegex(ValueError, r"Cannot watch non-type"): diff --git a/Lib/test/test_class.py b/Lib/test/test_class.py index 894e0ca67deabc..1531aad4f1f779 100644 --- a/Lib/test/test_class.py +++ b/Lib/test/test_class.py @@ -455,8 +455,8 @@ def __init__(self): self.attr = 1 a = A() - self.assertEqual(_testcapi.hasattr_string(a, "attr"), True) - self.assertEqual(_testcapi.hasattr_string(a, "noattr"), False) + self.assertEqual(_testcapi.object_hasattrstring(a, b"attr"), 1) + self.assertEqual(_testcapi.object_hasattrstring(a, b"noattr"), 0) self.assertIsNone(sys.exception()) def testDel(self): @@ -641,6 +641,14 @@ class A: class B: y = 0 __slots__ = ('z',) + class C: + __slots__ = ("y",) + + def __setattr__(self, name, value) -> None: + if name == "z": + super().__setattr__("y", 1) + else: + super().__setattr__(name, value) error_msg = "'A' object has no attribute 'x'" with self.assertRaisesRegex(AttributeError, error_msg): @@ -653,8 +661,16 @@ class B: B().x with self.assertRaisesRegex(AttributeError, error_msg): del B().x - with self.assertRaisesRegex(AttributeError, error_msg): + with self.assertRaisesRegex( + AttributeError, + "'B' object has no attribute 'x' and no __dict__ for setting new attributes" + ): B().x = 0 + with self.assertRaisesRegex( + AttributeError, + "'C' object has no attribute 'x'" + ): + C().x = 0 error_msg = "'B' object attribute 'y' is read-only" with self.assertRaisesRegex(AttributeError, error_msg): diff --git a/Lib/test/test_clinic.py b/Lib/test/test_clinic.py index e925ecca1b9c5d..a649b5fe2201c8 100644 --- a/Lib/test/test_clinic.py +++ b/Lib/test/test_clinic.py @@ -2,13 +2,16 @@ # Copyright 2012-2013 by Larry Hastings. # Licensed to the PSF under a contributor agreement. +from functools import partial from test import support, test_tools from test.support import os_helper +from test.support.os_helper import TESTFN, unlink from textwrap import dedent from unittest import TestCase -import collections +import contextlib import inspect import os.path +import re import sys import unittest @@ -18,97 +21,43 @@ from clinic import DSLParser -class _ParserBase(TestCase): +def _make_clinic(*, filename='clinic_tests'): + clang = clinic.CLanguage(None) + c = clinic.Clinic(clang, filename=filename) + c.block_parser = clinic.BlockParser('', clang) + return c + + +def _expect_failure(tc, parser, code, errmsg, *, filename=None, lineno=None): + """Helper for the parser tests. + + tc: unittest.TestCase; passed self in the wrapper + parser: the clinic parser used for this test case + code: a str with input text (clinic code) + errmsg: the expected error message + filename: str, optional filename + lineno: int, optional line number + """ + code = dedent(code).strip() + errmsg = re.escape(errmsg) + with tc.assertRaisesRegex(clinic.ClinicError, errmsg) as cm: + parser(code) + if filename is not None: + tc.assertEqual(cm.exception.filename, filename) + if lineno is not None: + tc.assertEqual(cm.exception.lineno, lineno) + return cm.exception + + +class ClinicWholeFileTest(TestCase): maxDiff = None - def expect_parser_failure(self, parser, _input): - with support.captured_stdout() as stdout: - with self.assertRaises(SystemExit): - parser(_input) - return stdout.getvalue() - - def parse_function_should_fail(self, _input): - return self.expect_parser_failure(self.parse_function, _input) - - -class FakeConverter: - def __init__(self, name, args): - self.name = name - self.args = args - - -class FakeConverterFactory: - def __init__(self, name): - self.name = name - - def __call__(self, name, default, **kwargs): - return FakeConverter(self.name, kwargs) - - -class FakeConvertersDict: - def __init__(self): - self.used_converters = {} - - def get(self, name, default): - return self.used_converters.setdefault(name, FakeConverterFactory(name)) - -c = clinic.Clinic(language='C', filename = "file") - -class FakeClinic: - def __init__(self): - self.converters = FakeConvertersDict() - self.legacy_converters = FakeConvertersDict() - self.language = clinic.CLanguage(None) - self.filename = None - self.destination_buffers = {} - self.block_parser = clinic.BlockParser('', self.language) - self.modules = collections.OrderedDict() - self.classes = collections.OrderedDict() - clinic.clinic = self - self.name = "FakeClinic" - self.line_prefix = self.line_suffix = '' - self.destinations = {} - self.add_destination("block", "buffer") - self.add_destination("file", "buffer") - self.add_destination("suppress", "suppress") - d = self.destinations.get - self.field_destinations = collections.OrderedDict(( - ('docstring_prototype', d('suppress')), - ('docstring_definition', d('block')), - ('methoddef_define', d('block')), - ('impl_prototype', d('block')), - ('parser_prototype', d('suppress')), - ('parser_definition', d('block')), - ('impl_definition', d('block')), - )) - - def get_destination(self, name): - d = self.destinations.get(name) - if not d: - sys.exit("Destination does not exist: " + repr(name)) - return d - - def add_destination(self, name, type, *args): - if name in self.destinations: - sys.exit("Destination already exists: " + repr(name)) - self.destinations[name] = clinic.Destination(name, type, self, *args) - - def is_directive(self, name): - return name == "module" - - def directive(self, name, args): - self.called_directives[name] = args - - _module_and_class = clinic.Clinic._module_and_class - - -class ClinicWholeFileTest(_ParserBase): - def setUp(self): - self.clinic = clinic.Clinic(clinic.CLanguage(None), filename="test.c") + def expect_failure(self, raw, errmsg, *, filename=None, lineno=None): + _expect_failure(self, self.clinic.parse, raw, errmsg, + filename=filename, lineno=lineno) - def expect_failure(self, raw): - _input = dedent(raw).strip() - return self.expect_parser_failure(self.clinic.parse, _input) + def setUp(self): + self.clinic = _make_clinic(filename="test.c") def test_eol(self): # regression test: @@ -133,12 +82,11 @@ def test_mangled_marker_line(self): [clinic start generated code]*/ /*[clinic end generated code: foo]*/ """ - msg = ( - 'Error in file "test.c" on line 3:\n' - "Mangled Argument Clinic marker line: '/*[clinic end generated code: foo]*/'\n" + err = ( + "Mangled Argument Clinic marker line: " + "'/*[clinic end generated code: foo]*/'" ) - out = self.expect_failure(raw) - self.assertEqual(out, msg) + self.expect_failure(raw, err, filename="test.c", lineno=3) def test_checksum_mismatch(self): raw = """ @@ -146,38 +94,28 @@ def test_checksum_mismatch(self): [clinic start generated code]*/ /*[clinic end generated code: output=0123456789abcdef input=fedcba9876543210]*/ """ - msg = ( - 'Error in file "test.c" on line 3:\n' - 'Checksum mismatch!\n' - 'Expected: 0123456789abcdef\n' - 'Computed: da39a3ee5e6b4b0d\n' - ) - out = self.expect_failure(raw) - self.assertIn(msg, out) + err = ("Checksum mismatch! " + "Expected '0123456789abcdef', computed 'da39a3ee5e6b4b0d'") + self.expect_failure(raw, err, filename="test.c", lineno=3) def test_garbage_after_stop_line(self): raw = """ /*[clinic input] [clinic start generated code]*/foobarfoobar! """ - msg = ( - 'Error in file "test.c" on line 2:\n' - "Garbage after stop line: 'foobarfoobar!'\n" - ) - out = self.expect_failure(raw) - self.assertEqual(out, msg) + err = "Garbage after stop line: 'foobarfoobar!'" + self.expect_failure(raw, err, filename="test.c", lineno=2) def test_whitespace_before_stop_line(self): raw = """ /*[clinic input] [clinic start generated code]*/ """ - msg = ( - 'Error in file "test.c" on line 2:\n' - "Whitespace is not allowed before the stop line: ' [clinic start generated code]*/'\n" + err = ( + "Whitespace is not allowed before the stop line: " + "' [clinic start generated code]*/'" ) - out = self.expect_failure(raw) - self.assertEqual(out, msg) + self.expect_failure(raw, err, filename="test.c", lineno=2) def test_parse_with_body_prefix(self): clang = clinic.CLanguage(None) @@ -207,12 +145,8 @@ def test_cpp_monitor_fail_nested_block_comment(self): */ */ """ - msg = ( - 'Error in file "test.c" on line 2:\n' - 'Nested block comment!\n' - ) - out = self.expect_failure(raw) - self.assertEqual(out, msg) + err = 'Nested block comment!' + self.expect_failure(raw, err, filename="test.c", lineno=2) def test_cpp_monitor_fail_invalid_format_noarg(self): raw = """ @@ -220,12 +154,8 @@ def test_cpp_monitor_fail_invalid_format_noarg(self): a() #endif """ - msg = ( - 'Error in file "test.c" on line 1:\n' - 'Invalid format for #if line: no argument!\n' - ) - out = self.expect_failure(raw) - self.assertEqual(out, msg) + err = 'Invalid format for #if line: no argument!' + self.expect_failure(raw, err, filename="test.c", lineno=1) def test_cpp_monitor_fail_invalid_format_toomanyargs(self): raw = """ @@ -233,21 +163,476 @@ def test_cpp_monitor_fail_invalid_format_toomanyargs(self): a() #endif """ - msg = ( - 'Error in file "test.c" on line 1:\n' - 'Invalid format for #ifdef line: should be exactly one argument!\n' - ) - out = self.expect_failure(raw) - self.assertEqual(out, msg) + err = 'Invalid format for #ifdef line: should be exactly one argument!' + self.expect_failure(raw, err, filename="test.c", lineno=1) def test_cpp_monitor_fail_no_matching_if(self): raw = '#else' - msg = ( - 'Error in file "test.c" on line 1:\n' - '#else without matching #if / #ifdef / #ifndef!\n' + err = '#else without matching #if / #ifdef / #ifndef!' + self.expect_failure(raw, err, filename="test.c", lineno=1) + + def test_directive_output_unknown_preset(self): + raw = """ + /*[clinic input] + output preset nosuchpreset + [clinic start generated code]*/ + """ + err = "Unknown preset 'nosuchpreset'" + self.expect_failure(raw, err) + + def test_directive_output_cant_pop(self): + raw = """ + /*[clinic input] + output pop + [clinic start generated code]*/ + """ + err = "Can't 'output pop', stack is empty" + self.expect_failure(raw, err) + + def test_directive_output_print(self): + raw = dedent(""" + /*[clinic input] + output print 'I told you once.' + [clinic start generated code]*/ + """) + out = self.clinic.parse(raw) + # The generated output will differ for every run, but we can check that + # it starts with the clinic block, we check that it contains all the + # expected fields, and we check that it contains the checksum line. + self.assertTrue(out.startswith(dedent(""" + /*[clinic input] + output print 'I told you once.' + [clinic start generated code]*/ + """))) + fields = { + "cpp_endif", + "cpp_if", + "docstring_definition", + "docstring_prototype", + "impl_definition", + "impl_prototype", + "methoddef_define", + "methoddef_ifndef", + "parser_definition", + "parser_prototype", + } + for field in fields: + with self.subTest(field=field): + self.assertIn(field, out) + last_line = out.rstrip().split("\n")[-1] + self.assertTrue( + last_line.startswith("/*[clinic end generated code: output=") + ) + + def test_directive_wrong_arg_number(self): + raw = dedent(""" + /*[clinic input] + preserve foo bar baz eggs spam ham mushrooms + [clinic start generated code]*/ + """) + err = "takes 1 positional argument but 8 were given" + self.expect_failure(raw, err) + + def test_unknown_destination_command(self): + raw = """ + /*[clinic input] + destination buffer nosuchcommand + [clinic start generated code]*/ + """ + err = "unknown destination command 'nosuchcommand'" + self.expect_failure(raw, err) + + def test_no_access_to_members_in_converter_init(self): + raw = """ + /*[python input] + class Custom_converter(CConverter): + converter = "some_c_function" + def converter_init(self): + self.function.noaccess + [python start generated code]*/ + /*[clinic input] + module test + test.fn + a: Custom + [clinic start generated code]*/ + """ + err = ( + "accessing self.function inside converter_init is disallowed!" + ) + self.expect_failure(raw, err) + + @staticmethod + @contextlib.contextmanager + def _clinic_version(new_version): + """Helper for test_version_*() tests""" + _saved = clinic.version + clinic.version = new_version + try: + yield + finally: + clinic.version = _saved + + def test_version_directive(self): + dataset = ( + # (clinic version, required version) + ('3', '2'), # required version < clinic version + ('3.1', '3.0'), # required version < clinic version + ('1.2b0', '1.2a7'), # required version < clinic version + ('5', '5'), # required version == clinic version + ('6.1', '6.1'), # required version == clinic version + ('1.2b3', '1.2b3'), # required version == clinic version + ) + for clinic_version, required_version in dataset: + with self.subTest(clinic_version=clinic_version, + required_version=required_version): + with self._clinic_version(clinic_version): + block = dedent(f""" + /*[clinic input] + version {required_version} + [clinic start generated code]*/ + """) + self.clinic.parse(block) + + def test_version_directive_insufficient_version(self): + with self._clinic_version('4'): + err = ( + "Insufficient Clinic version!\n" + " Version: 4\n" + " Required: 5" + ) + block = """ + /*[clinic input] + version 5 + [clinic start generated code]*/ + """ + self.expect_failure(block, err) + + def test_version_directive_illegal_char(self): + err = "Illegal character 'v' in version string 'v5'" + block = """ + /*[clinic input] + version v5 + [clinic start generated code]*/ + """ + self.expect_failure(block, err) + + def test_version_directive_unsupported_string(self): + err = "Unsupported version string: '.-'" + block = """ + /*[clinic input] + version .- + [clinic start generated code]*/ + """ + self.expect_failure(block, err) + + def test_clone_mismatch(self): + err = "'kind' of function and cloned function don't match!" + block = """ + /*[clinic input] + module m + @classmethod + m.f1 + a: object + [clinic start generated code]*/ + /*[clinic input] + @staticmethod + m.f2 = m.f1 + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=9) + + def test_badly_formed_return_annotation(self): + err = "Badly formed annotation for 'm.f': 'Custom'" + block = """ + /*[python input] + class Custom_return_converter(CReturnConverter): + def __init__(self): + raise ValueError("abc") + [python start generated code]*/ + /*[clinic input] + module m + m.f -> Custom + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=8) + + def test_module_already_got_one(self): + err = "Already defined module 'm'!" + block = """ + /*[clinic input] + module m + module m + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=3) + + def test_destination_already_got_one(self): + err = "Destination already exists: 'test'" + block = """ + /*[clinic input] + destination test new buffer + destination test new buffer + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=3) + + def test_destination_does_not_exist(self): + err = "Destination does not exist: '/dev/null'" + block = """ + /*[clinic input] + output everything /dev/null + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=2) + + def test_class_already_got_one(self): + err = "Already defined class 'C'!" + block = """ + /*[clinic input] + class C "" "" + class C "" "" + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=3) + + def test_cant_nest_module_inside_class(self): + err = "Can't nest a module inside a class!" + block = """ + /*[clinic input] + class C "" "" + module C.m + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=3) + + def test_dest_buffer_not_empty_at_eof(self): + expected_warning = ("Destination buffer 'buffer' not empty at " + "end of file, emptying.") + expected_generated = dedent(""" + /*[clinic input] + output everything buffer + fn + a: object + / + [clinic start generated code]*/ + /*[clinic end generated code: output=da39a3ee5e6b4b0d input=1c4668687f5fd002]*/ + + /*[clinic input] + dump buffer + [clinic start generated code]*/ + + PyDoc_VAR(fn__doc__); + + PyDoc_STRVAR(fn__doc__, + "fn($module, a, /)\\n" + "--\\n" + "\\n"); + + #define FN_METHODDEF \\ + {"fn", (PyCFunction)fn, METH_O, fn__doc__}, + + static PyObject * + fn(PyObject *module, PyObject *a) + /*[clinic end generated code: output=be6798b148ab4e53 input=524ce2e021e4eba6]*/ + """) + block = dedent(""" + /*[clinic input] + output everything buffer + fn + a: object + / + [clinic start generated code]*/ + """) + with support.captured_stdout() as stdout: + generated = self.clinic.parse(block) + self.assertIn(expected_warning, stdout.getvalue()) + self.assertEqual(generated, expected_generated) + + def test_dest_clear(self): + err = "Can't clear destination 'file': it's not of type 'buffer'" + block = """ + /*[clinic input] + destination file clear + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=2) + + def test_directive_set_misuse(self): + err = "unknown variable 'ets'" + block = """ + /*[clinic input] + set ets tse + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=2) + + def test_directive_set_prefix(self): + block = dedent(""" + /*[clinic input] + set line_prefix '// ' + output everything suppress + output docstring_prototype buffer + fn + a: object + / + [clinic start generated code]*/ + /* We need to dump the buffer. + * If not, Argument Clinic will emit a warning */ + /*[clinic input] + dump buffer + [clinic start generated code]*/ + """) + generated = self.clinic.parse(block) + expected_docstring_prototype = "// PyDoc_VAR(fn__doc__);" + self.assertIn(expected_docstring_prototype, generated) + + def test_directive_set_suffix(self): + block = dedent(""" + /*[clinic input] + set line_suffix ' // test' + output everything suppress + output docstring_prototype buffer + fn + a: object + / + [clinic start generated code]*/ + /* We need to dump the buffer. + * If not, Argument Clinic will emit a warning */ + /*[clinic input] + dump buffer + [clinic start generated code]*/ + """) + generated = self.clinic.parse(block) + expected_docstring_prototype = "PyDoc_VAR(fn__doc__); // test" + self.assertIn(expected_docstring_prototype, generated) + + def test_directive_set_prefix_and_suffix(self): + block = dedent(""" + /*[clinic input] + set line_prefix '{block comment start} ' + set line_suffix ' {block comment end}' + output everything suppress + output docstring_prototype buffer + fn + a: object + / + [clinic start generated code]*/ + /* We need to dump the buffer. + * If not, Argument Clinic will emit a warning */ + /*[clinic input] + dump buffer + [clinic start generated code]*/ + """) + generated = self.clinic.parse(block) + expected_docstring_prototype = "/* PyDoc_VAR(fn__doc__); */" + self.assertIn(expected_docstring_prototype, generated) + + def test_directive_printout(self): + block = dedent(""" + /*[clinic input] + output everything buffer + printout test + [clinic start generated code]*/ + """) + expected = dedent(""" + /*[clinic input] + output everything buffer + printout test + [clinic start generated code]*/ + test + /*[clinic end generated code: output=4e1243bd22c66e76 input=898f1a32965d44ca]*/ + """) + generated = self.clinic.parse(block) + self.assertEqual(generated, expected) + + def test_directive_preserve_twice(self): + err = "Can't have 'preserve' twice in one block!" + block = """ + /*[clinic input] + preserve + preserve + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=3) + + def test_directive_preserve_input(self): + err = "'preserve' only works for blocks that don't produce any output!" + block = """ + /*[clinic input] + preserve + fn + a: object + / + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=6) + + def test_directive_preserve_output(self): + block = dedent(""" + /*[clinic input] + output everything buffer + preserve + [clinic start generated code]*/ + // Preserve this + /*[clinic end generated code: output=eaa49677ae4c1f7d input=559b5db18fddae6a]*/ + /*[clinic input] + dump buffer + [clinic start generated code]*/ + /*[clinic end generated code: output=da39a3ee5e6b4b0d input=524ce2e021e4eba6]*/ + """) + generated = self.clinic.parse(block) + self.assertEqual(generated, block) + + def test_directive_output_invalid_command(self): + err = dedent(""" + Invalid command or destination name 'cmd'. Must be one of: + - 'preset' + - 'push' + - 'pop' + - 'print' + - 'everything' + - 'cpp_if' + - 'docstring_prototype' + - 'docstring_definition' + - 'methoddef_define' + - 'impl_prototype' + - 'parser_prototype' + - 'parser_definition' + - 'cpp_endif' + - 'methoddef_ifndef' + - 'impl_definition' + """).strip() + block = """ + /*[clinic input] + output cmd buffer + [clinic start generated code]*/ + """ + self.expect_failure(block, err, lineno=2) + + +class ParseFileUnitTest(TestCase): + def expect_parsing_failure( + self, *, filename, expected_error, verify=True, output=None + ): + errmsg = re.escape(dedent(expected_error).strip()) + with self.assertRaisesRegex(clinic.ClinicError, errmsg): + clinic.parse_file(filename) + + def test_parse_file_no_extension(self) -> None: + self.expect_parsing_failure( + filename="foo", + expected_error="Can't extract file type for file 'foo'" ) - out = self.expect_failure(raw) - self.assertEqual(out, msg) + + def test_parse_file_strange_extension(self) -> None: + filenames_to_errors = { + "foo.rs": "Can't identify file type for file 'foo.rs'", + "foo.hs": "Can't identify file type for file 'foo.hs'", + "foo.js": "Can't identify file type for file 'foo.js'", + } + for filename, errmsg in filenames_to_errors.items(): + with self.subTest(filename=filename): + self.expect_parsing_failure(filename=filename, expected_error=errmsg) class ClinicGroupPermuterTest(TestCase): @@ -426,14 +811,34 @@ def test_clinic_1(self): """) -class ClinicParserTest(_ParserBase): +class ClinicParserTest(TestCase): + + def parse(self, text): + c = _make_clinic() + parser = DSLParser(c) + block = clinic.Block(text) + parser.parse(block) + return block + + def parse_function(self, text, signatures_in_block=2, function_index=1): + block = self.parse(text) + s = block.signatures + self.assertEqual(len(s), signatures_in_block) + assert isinstance(s[0], clinic.Module) + assert isinstance(s[function_index], clinic.Function) + return s[function_index] + + def expect_failure(self, block, err, *, filename=None, lineno=None): + return _expect_failure(self, self.parse_function, block, err, + filename=filename, lineno=lineno) + def checkDocstring(self, fn, expected): self.assertTrue(hasattr(fn, "docstring")) - self.assertEqual(fn.docstring.strip(), - dedent(expected).strip()) + self.assertEqual(dedent(expected).strip(), + fn.docstring.strip()) def test_trivial(self): - parser = DSLParser(FakeClinic()) + parser = DSLParser(_make_clinic()) block = clinic.Block(""" module os os.access @@ -486,7 +891,7 @@ def test_param_with_continuations(self): p = function.parameters['follow_symlinks'] self.assertEqual(True, p.default) - def test_param_default_expression(self): + def test_param_default_expr_named_constant(self): function = self.parse_function(""" module os os.access @@ -496,17 +901,62 @@ def test_param_default_expression(self): self.assertEqual(sys.maxsize, p.default) self.assertEqual("MAXSIZE", p.converter.c_default) - expected_msg = ( - "Error on line 0:\n" - "When you specify a named constant ('sys.maxsize') as your default value,\n" - "you MUST specify a valid c_default.\n" + err = ( + "When you specify a named constant ('sys.maxsize') as your default value, " + "you MUST specify a valid c_default." ) - out = self.parse_function_should_fail(""" + block = """ module os os.access follow_symlinks: int = sys.maxsize - """) - self.assertEqual(out, expected_msg) + """ + self.expect_failure(block, err, lineno=2) + + def test_param_with_bizarre_default_fails_correctly(self): + template = """ + module os + os.access + follow_symlinks: int = {default} + """ + err = "Unsupported expression as default value" + for bad_default_value in ( + "{1, 2, 3}", + "3 if bool() else 4", + "[x for x in range(42)]" + ): + with self.subTest(bad_default=bad_default_value): + block = template.format(default=bad_default_value) + self.expect_failure(block, err, lineno=2) + + def test_unspecified_not_allowed_as_default_value(self): + block = """ + module os + os.access + follow_symlinks: int(c_default='MAXSIZE') = unspecified + """ + err = "'unspecified' is not a legal default value!" + exc = self.expect_failure(block, err, lineno=2) + self.assertNotIn('Malformed expression given as default value', str(exc)) + + def test_malformed_expression_as_default_value(self): + block = """ + module os + os.access + follow_symlinks: int(c_default='MAXSIZE') = 1/0 + """ + err = "Malformed expression given as default value" + self.expect_failure(block, err, lineno=2) + + def test_param_default_expr_binop(self): + err = ( + "When you specify an expression ('a + b') as your default value, " + "you MUST specify a valid c_default." + ) + block = """ + fn + follow_symlinks: int = a + b + """ + self.expect_failure(block, err, lineno=1) def test_param_no_docstring(self): function = self.parse_function(""" @@ -515,23 +965,22 @@ def test_param_no_docstring(self): follow_symlinks: bool = True something_else: str = '' """) - p = function.parameters['follow_symlinks'] self.assertEqual(3, len(function.parameters)) conv = function.parameters['something_else'].converter self.assertIsInstance(conv, clinic.str_converter) def test_param_default_parameters_out_of_order(self): - expected_msg = ( - "Error on line 0:\n" - "Can't have a parameter without a default ('something_else')\n" - "after a parameter with a default!\n" + err = ( + "Can't have a parameter without a default ('something_else') " + "after a parameter with a default!" ) - out = self.parse_function_should_fail(""" + block = """ module os os.access follow_symlinks: bool = True - something_else: str""") - self.assertEqual(out, expected_msg) + something_else: str + """ + self.expect_failure(block, err, lineno=3) def disabled_test_converter_arguments(self): function = self.parse_function(""" @@ -549,8 +998,12 @@ def test_function_docstring(self): path: str Path to be examined + Ensure that multiple lines are indented correctly. Perform a stat system call on the given path. + + Ensure that multiple lines are indented correctly. + Ensure that multiple lines are indented correctly. """) self.checkDocstring(function, """ stat($module, /, path) @@ -560,6 +1013,32 @@ def test_function_docstring(self): path Path to be examined + Ensure that multiple lines are indented correctly. + + Ensure that multiple lines are indented correctly. + Ensure that multiple lines are indented correctly. + """) + + def test_docstring_trailing_whitespace(self): + function = self.parse_function( + "module t\n" + "t.s\n" + " a: object\n" + " Param docstring with trailing whitespace \n" + "Func docstring summary with trailing whitespace \n" + " \n" + "Func docstring body with trailing whitespace \n" + ) + self.checkDocstring(function, """ + s($module, /, a) + -- + + Func docstring summary with trailing whitespace + + a + Param docstring with trailing whitespace + + Func docstring body with trailing whitespace """) def test_explicit_parameters_in_docstring(self): @@ -586,6 +1065,38 @@ def test_explicit_parameters_in_docstring(self): Okay, we're done here. """) + def test_docstring_with_comments(self): + function = self.parse_function(dedent(""" + module foo + foo.bar + x: int + # We're about to have + # the documentation for x. + Documentation for x. + # We've just had + # the documentation for x. + y: int + + # We're about to have + # the documentation for foo. + This is the documentation for foo. + # We've just had + # the documentation for foo. + + Okay, we're done here. + """)) + self.checkDocstring(function, """ + bar($module, /, x, y) + -- + + This is the documentation for foo. + + x + Documentation for x. + + Okay, we're done here. + """) + def test_parser_regression_special_character_in_parameter_column_of_docstring_first_line(self): function = self.parse_function(dedent(""" module os @@ -607,6 +1118,43 @@ def test_c_name(self): """) self.assertEqual("os_stat_fn", function.c_basename) + def test_base_invalid_syntax(self): + block = """ + module os + os.stat + invalid syntax: int = 42 + """ + err = dedent(r""" + Function 'stat' has an invalid parameter declaration: + \s+'invalid syntax: int = 42' + """).strip() + with self.assertRaisesRegex(clinic.ClinicError, err): + self.parse_function(block) + + def test_param_default_invalid_syntax(self): + block = """ + module os + os.stat + x: int = invalid syntax + """ + err = r"Syntax error: 'x = invalid syntax\n'" + self.expect_failure(block, err, lineno=2) + + def test_cloning_nonexistent_function_correctly_fails(self): + block = """ + cloned = fooooooooooooooooo + This is trying to clone a nonexistent function!! + """ + err = "Couldn't find existing function 'fooooooooooooooooo'!" + with support.captured_stderr() as stderr: + self.expect_failure(block, err, lineno=0) + expected_debug_print = dedent("""\ + cls=None, module=, existing='fooooooooooooooooo' + (cls or module).functions=[] + """) + stderr = stderr.getvalue() + self.assertIn(expected_debug_print, stderr) + def test_return_converter(self): function = self.parse_function(""" module os @@ -614,6 +1162,30 @@ def test_return_converter(self): """) self.assertIsInstance(function.return_converter, clinic.int_return_converter) + def test_return_converter_invalid_syntax(self): + block = """ + module os + os.stat -> invalid syntax + """ + err = "Badly formed annotation for 'os.stat': 'invalid syntax'" + self.expect_failure(block, err) + + def test_legacy_converter_disallowed_in_return_annotation(self): + block = """ + module os + os.stat -> "s" + """ + err = "Legacy converter 's' not allowed as a return converter" + self.expect_failure(block, err) + + def test_unknown_return_converter(self): + block = """ + module os + os.stat -> fooooooooooooooooooooooo + """ + err = "No available return converter called 'fooooooooooooooooooooooo'" + self.expect_failure(block, err) + def test_star(self): function = self.parse_function(""" module os @@ -757,19 +1329,12 @@ def test_nested_groups(self): Attributes for the character. """) - def parse_function_should_fail(self, s): - with support.captured_stdout() as stdout: - with self.assertRaises(SystemExit): - self.parse_function(s) - return stdout.getvalue() - def test_disallowed_grouping__two_top_groups_on_left(self): - expected_msg = ( - 'Error on line 0:\n' - 'Function two_top_groups_on_left has an unsupported group ' - 'configuration. (Unexpected state 2.b)\n' + err = ( + "Function 'two_top_groups_on_left' has an unsupported group " + "configuration. (Unexpected state 2.b)" ) - out = self.parse_function_should_fail(""" + block = """ module foo foo.two_top_groups_on_left [ @@ -779,11 +1344,11 @@ def test_disallowed_grouping__two_top_groups_on_left(self): group2 : int ] param: int - """) - self.assertEqual(out, expected_msg) + """ + self.expect_failure(block, err, lineno=5) def test_disallowed_grouping__two_top_groups_on_right(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.two_top_groups_on_right param: int @@ -793,15 +1358,15 @@ def test_disallowed_grouping__two_top_groups_on_right(self): [ group2 : int ] - """) - msg = ( - "Function two_top_groups_on_right has an unsupported group " + """ + err = ( + "Function 'two_top_groups_on_right' has an unsupported group " "configuration. (Unexpected state 6.b)" ) - self.assertIn(msg, out) + self.expect_failure(block, err) def test_disallowed_grouping__parameter_after_group_on_right(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.parameter_after_group_on_right param: int @@ -811,15 +1376,15 @@ def test_disallowed_grouping__parameter_after_group_on_right(self): ] group2 : int ] - """) - msg = ( + """ + err = ( "Function parameter_after_group_on_right has an unsupported group " "configuration. (Unexpected state 6.a)" ) - self.assertIn(msg, out) + self.expect_failure(block, err) def test_disallowed_grouping__group_after_parameter_on_left(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.group_after_parameter_on_left [ @@ -829,15 +1394,15 @@ def test_disallowed_grouping__group_after_parameter_on_left(self): ] ] param: int - """) - msg = ( - "Function group_after_parameter_on_left has an unsupported group " + """ + err = ( + "Function 'group_after_parameter_on_left' has an unsupported group " "configuration. (Unexpected state 2.b)" ) - self.assertIn(msg, out) + self.expect_failure(block, err) def test_disallowed_grouping__empty_group_on_left(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.empty_group [ @@ -846,15 +1411,15 @@ def test_disallowed_grouping__empty_group_on_left(self): group2 : int ] param: int - """) - msg = ( - "Function empty_group has an empty group.\n" + """ + err = ( + "Function 'empty_group' has an empty group. " "All groups must contain at least one parameter." ) - self.assertIn(msg, out) + self.expect_failure(block, err) def test_disallowed_grouping__empty_group_on_right(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.empty_group param: int @@ -863,24 +1428,24 @@ def test_disallowed_grouping__empty_group_on_right(self): ] group2 : int ] - """) - msg = ( - "Function empty_group has an empty group.\n" + """ + err = ( + "Function 'empty_group' has an empty group. " "All groups must contain at least one parameter." ) - self.assertIn(msg, out) + self.expect_failure(block, err) def test_disallowed_grouping__no_matching_bracket(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.empty_group param: int ] group2: int ] - """) - msg = "Function empty_group has a ] without a matching [." - self.assertIn(msg, out) + """ + err = "Function 'empty_group' has a ']' without a matching '['" + self.expect_failure(block, err) def test_no_parameters(self): function = self.parse_function(""" @@ -909,31 +1474,32 @@ class foo.Bar "unused" "notneeded" self.assertEqual(1, len(function.parameters)) def test_illegal_module_line(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.bar => int / - """) - msg = "Illegal function name: foo.bar => int" - self.assertIn(msg, out) + """ + err = "Illegal function name: 'foo.bar => int'" + self.expect_failure(block, err) def test_illegal_c_basename(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.bar as 935 / - """) - msg = "Illegal C basename: 935" - self.assertIn(msg, out) + """ + err = "Illegal C basename: '935'" + self.expect_failure(block, err) def test_single_star(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.bar * * - """) - self.assertIn("Function bar uses '*' more than once.", out) + """ + err = "Function 'bar' uses '*' more than once." + self.expect_failure(block, err) def test_parameters_required_after_star(self): dataset = ( @@ -942,39 +1508,120 @@ def test_parameters_required_after_star(self): "module foo\nfoo.bar\n this: int\n *", "module foo\nfoo.bar\n this: int\n *\nDocstring.", ) - msg = "Function bar specifies '*' without any parameters afterwards." + err = "Function 'foo.bar' specifies '*' without any parameters afterwards." for block in dataset: with self.subTest(block=block): - out = self.parse_function_should_fail(block) - self.assertIn(msg, out) + self.expect_failure(block, err) + + def test_depr_star_invalid_format_1(self): + block = """ + module foo + foo.bar + this: int + * [from 3] + Docstring. + """ + err = ( + "Function 'foo.bar': expected format '* [from major.minor]' " + "where 'major' and 'minor' are integers; got '3'" + ) + self.expect_failure(block, err, lineno=3) + + def test_depr_star_invalid_format_2(self): + block = """ + module foo + foo.bar + this: int + * [from a.b] + Docstring. + """ + err = ( + "Function 'foo.bar': expected format '* [from major.minor]' " + "where 'major' and 'minor' are integers; got 'a.b'" + ) + self.expect_failure(block, err, lineno=3) + + def test_depr_star_invalid_format_3(self): + block = """ + module foo + foo.bar + this: int + * [from 1.2.3] + Docstring. + """ + err = ( + "Function 'foo.bar': expected format '* [from major.minor]' " + "where 'major' and 'minor' are integers; got '1.2.3'" + ) + self.expect_failure(block, err, lineno=3) + + def test_parameters_required_after_depr_star(self): + block = """ + module foo + foo.bar + this: int + * [from 3.14] + Docstring. + """ + err = ( + "Function 'foo.bar' specifies '* [from ...]' without " + "any parameters afterwards" + ) + self.expect_failure(block, err, lineno=4) + + def test_depr_star_must_come_before_star(self): + block = """ + module foo + foo.bar + this: int + * + * [from 3.14] + Docstring. + """ + err = "Function 'foo.bar': '* [from ...]' must come before '*'" + self.expect_failure(block, err, lineno=4) + + def test_depr_star_duplicate(self): + block = """ + module foo + foo.bar + a: int + * [from 3.14] + b: int + * [from 3.14] + c: int + Docstring. + """ + err = "Function 'foo.bar' uses '[from ...]' more than once" + self.expect_failure(block, err, lineno=5) def test_single_slash(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.bar / / - """) - msg = ( - "Function bar has an unsupported group configuration. " + """ + err = ( + "Function 'bar' has an unsupported group configuration. " "(Unexpected state 0.d)" ) - self.assertIn(msg, out) + self.expect_failure(block, err) def test_double_slash(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.bar a: int / b: int / - """) - msg = "Function bar uses '/' more than once." - self.assertIn(msg, out) + """ + err = "Function 'bar' uses '/' more than once." + self.expect_failure(block, err) def test_mix_star_and_slash(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.bar x: int @@ -982,38 +1629,35 @@ def test_mix_star_and_slash(self): * z: int / - """) - msg = ( - "Function bar mixes keyword-only and positional-only parameters, " + """ + err = ( + "Function 'bar' mixes keyword-only and positional-only parameters, " "which is unsupported." ) - self.assertIn(msg, out) + self.expect_failure(block, err) def test_parameters_not_permitted_after_slash_for_now(self): - out = self.parse_function_should_fail(""" + block = """ module foo foo.bar / x: int - """) - msg = ( - "Function bar has an unsupported group configuration. " + """ + err = ( + "Function 'bar' has an unsupported group configuration. " "(Unexpected state 0.d)" ) - self.assertIn(msg, out) + self.expect_failure(block, err) def test_parameters_no_more_than_one_vararg(self): - expected_msg = ( - "Error on line 0:\n" - "Too many var args\n" - ) - out = self.parse_function_should_fail(""" + err = "Too many var args" + block = """ module foo foo.bar *vararg1: object *vararg2: object - """) - self.assertEqual(out, expected_msg) + """ + self.expect_failure(block, err, lineno=0) def test_function_not_at_column_0(self): function = self.parse_function(""" @@ -1035,9 +1679,86 @@ def test_function_not_at_column_0(self): Nested docstring here, goeth. """) + def test_docstring_only_summary(self): + function = self.parse_function(""" + module m + m.f + summary + """) + self.checkDocstring(function, """ + f($module, /) + -- + + summary + """) + + def test_docstring_empty_lines(self): + function = self.parse_function(""" + module m + m.f + + + """) + self.checkDocstring(function, """ + f($module, /) + -- + """) + + def test_docstring_explicit_params_placement(self): + function = self.parse_function(""" + module m + m.f + a: int + Param docstring for 'a' will be included + b: int + c: int + Param docstring for 'c' will be included + This is the summary line. + + We'll now place the params section here: + {parameters} + And now for something completely different! + (Note the added newline) + """) + self.checkDocstring(function, """ + f($module, /, a, b, c) + -- + + This is the summary line. + + We'll now place the params section here: + a + Param docstring for 'a' will be included + c + Param docstring for 'c' will be included + + And now for something completely different! + (Note the added newline) + """) + + def test_indent_stack_no_tabs(self): + block = """ + module foo + foo.bar + *vararg1: object + \t*vararg2: object + """ + err = ("Tab characters are illegal in the Clinic DSL: " + r"'\t*vararg2: object'") + self.expect_failure(block, err) + + def test_indent_stack_illegal_outdent(self): + block = """ + module foo + foo.bar + a: object + b: object + """ + err = "Illegal outdent" + self.expect_failure(block, err) + def test_directive(self): - c = FakeClinic() - parser = DSLParser(c) + parser = DSLParser(_make_clinic()) parser.flag = False parser.directives['setflag'] = lambda : setattr(parser, 'flag', True) block = clinic.Block("setflag") @@ -1051,10 +1772,7 @@ def test_legacy_converters(self): self.assertIsInstance(conv, clinic.str_converter) def test_legacy_converters_non_string_constant_annotation(self): - expected_failure_message = ( - "Error on line 0:\n" - "Annotations must be either a name, a function call, or a string.\n" - ) + err = "Annotations must be either a name, a function call, or a string" dataset = ( 'module os\nos.access\n path: 42', 'module os\nos.access\n path: 42.42', @@ -1063,14 +1781,10 @@ def test_legacy_converters_non_string_constant_annotation(self): ) for block in dataset: with self.subTest(block=block): - out = self.parse_function_should_fail(block) - self.assertEqual(out, expected_failure_message) + self.expect_failure(block, err, lineno=2) def test_other_bizarre_things_in_annotations_fail(self): - expected_failure_message = ( - "Error on line 0:\n" - "Annotations must be either a name, a function call, or a string.\n" - ) + err = "Annotations must be either a name, a function call, or a string" dataset = ( 'module os\nos.access\n path: {"some": "dictionary"}', 'module os\nos.access\n path: ["list", "of", "strings"]', @@ -1078,30 +1792,24 @@ def test_other_bizarre_things_in_annotations_fail(self): ) for block in dataset: with self.subTest(block=block): - out = self.parse_function_should_fail(block) - self.assertEqual(out, expected_failure_message) + self.expect_failure(block, err, lineno=2) def test_kwarg_splats_disallowed_in_function_call_annotations(self): - expected_error_msg = ( - "Error on line 0:\n" - "Cannot use a kwarg splat in a function-call annotation\n" - ) + err = "Cannot use a kwarg splat in a function-call annotation" dataset = ( 'module fo\nfo.barbaz\n o: bool(**{None: "bang!"})', 'module fo\nfo.barbaz -> bool(**{None: "bang!"})', 'module fo\nfo.barbaz -> bool(**{"bang": 42})', 'module fo\nfo.barbaz\n o: bool(**{"bang": None})', ) - for fn in dataset: - with self.subTest(fn=fn): - out = self.parse_function_should_fail(fn) - self.assertEqual(out, expected_error_msg) + for block in dataset: + with self.subTest(block=block): + self.expect_failure(block, err) def test_self_param_placement(self): - expected_error_msg = ( - "Error on line 0:\n" + err = ( "A 'self' parameter, if specified, must be the very first thing " - "in the parameter block.\n" + "in the parameter block." ) block = """ module foo @@ -1109,27 +1817,21 @@ def test_self_param_placement(self): a: int self: self(type="PyObject *") """ - out = self.parse_function_should_fail(block) - self.assertEqual(out, expected_error_msg) + self.expect_failure(block, err, lineno=3) def test_self_param_cannot_be_optional(self): - expected_error_msg = ( - "Error on line 0:\n" - "A 'self' parameter cannot be marked optional.\n" - ) + err = "A 'self' parameter cannot be marked optional." block = """ module foo foo.func self: self(type="PyObject *") = None """ - out = self.parse_function_should_fail(block) - self.assertEqual(out, expected_error_msg) + self.expect_failure(block, err, lineno=2) def test_defining_class_param_placement(self): - expected_error_msg = ( - "Error on line 0:\n" + err = ( "A 'defining_class' parameter, if specified, must either be the " - "first thing in the parameter block, or come just after 'self'.\n" + "first thing in the parameter block, or come just after 'self'." ) block = """ module foo @@ -1138,21 +1840,16 @@ def test_defining_class_param_placement(self): a: int cls: defining_class """ - out = self.parse_function_should_fail(block) - self.assertEqual(out, expected_error_msg) + self.expect_failure(block, err, lineno=4) def test_defining_class_param_cannot_be_optional(self): - expected_error_msg = ( - "Error on line 0:\n" - "A 'defining_class' parameter cannot be marked optional.\n" - ) + err = "A 'defining_class' parameter cannot be marked optional." block = """ module foo foo.func cls: defining_class(type="PyObject *") = None """ - out = self.parse_function_should_fail(block) - self.assertEqual(out, expected_error_msg) + self.expect_failure(block, err, lineno=2) def test_slot_methods_cannot_access_defining_class(self): block = """ @@ -1162,34 +1859,38 @@ class Foo "" "" cls: defining_class a: object """ - msg = "Slot methods cannot access their defining class." - with self.assertRaisesRegex(ValueError, msg): + err = "Slot methods cannot access their defining class." + with self.assertRaisesRegex(ValueError, err): self.parse_function(block) def test_new_must_be_a_class_method(self): - expected_error_msg = ( - "Error on line 0:\n" - "__new__ must be a class method!\n" - ) - out = self.parse_function_should_fail(""" + err = "__new__ must be a class method!" + block = """ module foo class Foo "" "" Foo.__new__ - """) - self.assertEqual(out, expected_error_msg) + """ + self.expect_failure(block, err, lineno=2) def test_init_must_be_a_normal_method(self): - expected_error_msg = ( - "Error on line 0:\n" - "__init__ must be a normal method, not a class or static method!\n" - ) - out = self.parse_function_should_fail(""" + err = "__init__ must be a normal method, not a class or static method!" + block = """ module foo class Foo "" "" @classmethod Foo.__init__ - """) - self.assertEqual(out, expected_error_msg) + """ + self.expect_failure(block, err, lineno=3) + + def test_duplicate_coexist(self): + err = "Called @coexist twice" + block = """ + module m + @coexist + @coexist + m.fn + """ + self.expect_failure(block, err, lineno=2) def test_unused_param(self): block = self.parse(""" @@ -1229,68 +1930,497 @@ def test_unused_param(self): parser_decl = p.simple_declaration(in_parser=True) self.assertNotIn("Py_UNUSED", parser_decl) - def parse(self, text): - c = FakeClinic() - parser = DSLParser(c) - block = clinic.Block(text) - parser.parse(block) - return block - - def parse_function(self, text, signatures_in_block=2, function_index=1): - block = self.parse(text) - s = block.signatures - self.assertEqual(len(s), signatures_in_block) - assert isinstance(s[0], clinic.Module) - assert isinstance(s[function_index], clinic.Function) - return s[function_index] - def test_scaffolding(self): # test repr on special values self.assertEqual(repr(clinic.unspecified), '') self.assertEqual(repr(clinic.NULL), '') # test that fail fails - expected = ( - 'Error in file "clown.txt" on line 69:\n' - 'The igloos are melting!\n' - ) with support.captured_stdout() as stdout: - with self.assertRaises(SystemExit): - clinic.fail('The igloos are melting!', - filename='clown.txt', line_number=69) - actual = stdout.getvalue() - self.assertEqual(actual, expected) + errmsg = 'The igloos are melting' + with self.assertRaisesRegex(clinic.ClinicError, errmsg) as cm: + clinic.fail(errmsg, filename='clown.txt', line_number=69) + exc = cm.exception + self.assertEqual(exc.filename, 'clown.txt') + self.assertEqual(exc.lineno, 69) + self.assertEqual(stdout.getvalue(), "") + + def test_non_ascii_character_in_docstring(self): + block = """ + module test + test.fn + a: int + á param docstring + docstring fü bár baß + """ + with support.captured_stdout() as stdout: + self.parse(block) + # The line numbers are off; this is a known limitation. + expected = dedent("""\ + Warning in file 'clinic_tests' on line 0: + Non-ascii characters are not allowed in docstrings: 'á' + + Warning in file 'clinic_tests' on line 0: + Non-ascii characters are not allowed in docstrings: 'ü', 'á', 'ß' + + """) + self.assertEqual(stdout.getvalue(), expected) + + def test_illegal_c_identifier(self): + err = "Illegal C identifier: 17a" + block = """ + module test + test.fn + a as 17a: int + """ + self.expect_failure(block, err, lineno=2) + + def test_cannot_convert_special_method(self): + err = "__len__ is a special method and cannot be converted" + block = """ + class T "" "" + T.__len__ + """ + self.expect_failure(block, err, lineno=1) + + def test_cannot_specify_pydefault_without_default(self): + err = "You can't specify py_default without specifying a default value!" + block = """ + fn + a: object(py_default='NULL') + """ + self.expect_failure(block, err, lineno=1) + + def test_vararg_cannot_take_default_value(self): + err = "Vararg can't take a default value!" + block = """ + fn + *args: object = None + """ + self.expect_failure(block, err, lineno=1) + + def test_default_is_not_of_correct_type(self): + err = ("int_converter: default value 2.5 for field 'a' " + "is not of type 'int'") + block = """ + fn + a: int = 2.5 + """ + self.expect_failure(block, err, lineno=1) + + def test_invalid_legacy_converter(self): + err = "'fhi' is not a valid legacy converter" + block = """ + fn + a: 'fhi' + """ + self.expect_failure(block, err, lineno=1) + + def test_parent_class_or_module_does_not_exist(self): + err = "Parent class or module 'z' does not exist" + block = """ + module m + z.func + """ + self.expect_failure(block, err, lineno=1) + + def test_duplicate_param_name(self): + err = "You can't have two parameters named 'a'" + block = """ + module m + m.func + a: int + a: float + """ + self.expect_failure(block, err, lineno=3) + + def test_param_requires_custom_c_name(self): + err = "Parameter 'module' requires a custom C name" + block = """ + module m + m.func + module: int + """ + self.expect_failure(block, err, lineno=2) + + def test_state_func_docstring_assert_no_group(self): + err = "Function 'func' has a ']' without a matching '['" + block = """ + module m + m.func + ] + docstring + """ + self.expect_failure(block, err, lineno=2) + + def test_state_func_docstring_no_summary(self): + err = "Docstring for 'm.func' does not have a summary line!" + block = """ + module m + m.func + docstring1 + docstring2 + """ + self.expect_failure(block, err, lineno=0) + + def test_state_func_docstring_only_one_param_template(self): + err = "You may not specify {parameters} more than once in a docstring!" + block = """ + module m + m.func + docstring summary + + these are the params: + {parameters} + these are the params again: + {parameters} + """ + self.expect_failure(block, err, lineno=0) class ClinicExternalTest(TestCase): maxDiff = None + def run_clinic(self, *args): + with ( + support.captured_stdout() as out, + support.captured_stderr() as err, + self.assertRaises(SystemExit) as cm + ): + clinic.main(args) + return out.getvalue(), err.getvalue(), cm.exception.code + + def expect_success(self, *args): + out, err, code = self.run_clinic(*args) + if code != 0: + self.fail("\n".join([f"Unexpected failure: {args=}", out, err])) + self.assertEqual(err, "") + return out + + def expect_failure(self, *args): + out, err, code = self.run_clinic(*args) + self.assertNotEqual(code, 0, f"Unexpected success: {args=}") + return out, err + def test_external(self): CLINIC_TEST = 'clinic.test.c' - # bpo-42398: Test that the destination file is left unchanged if the - # content does not change. Moreover, check also that the file - # modification time does not change in this case. source = support.findfile(CLINIC_TEST) with open(source, 'r', encoding='utf-8') as f: orig_contents = f.read() - with os_helper.temp_dir() as tmp_dir: - testfile = os.path.join(tmp_dir, CLINIC_TEST) - with open(testfile, 'w', encoding='utf-8') as f: - f.write(orig_contents) - old_mtime_ns = os.stat(testfile).st_mtime_ns - - clinic.parse_file(testfile) + # Run clinic CLI and verify that it does not complain. + self.addCleanup(unlink, TESTFN) + out = self.expect_success("-f", "-o", TESTFN, source) + self.assertEqual(out, "") - with open(testfile, 'r', encoding='utf-8') as f: - new_contents = f.read() - new_mtime_ns = os.stat(testfile).st_mtime_ns + with open(TESTFN, 'r', encoding='utf-8') as f: + new_contents = f.read() self.assertEqual(new_contents, orig_contents) + + def test_no_change(self): + # bpo-42398: Test that the destination file is left unchanged if the + # content does not change. Moreover, check also that the file + # modification time does not change in this case. + code = dedent(""" + /*[clinic input] + [clinic start generated code]*/ + /*[clinic end generated code: output=da39a3ee5e6b4b0d input=da39a3ee5e6b4b0d]*/ + """) + with os_helper.temp_dir() as tmp_dir: + fn = os.path.join(tmp_dir, "test.c") + with open(fn, "w", encoding="utf-8") as f: + f.write(code) + pre_mtime = os.stat(fn).st_mtime_ns + self.expect_success(fn) + post_mtime = os.stat(fn).st_mtime_ns # Don't change the file modification time # if the content does not change - self.assertEqual(new_mtime_ns, old_mtime_ns) + self.assertEqual(pre_mtime, post_mtime) + + def test_cli_force(self): + invalid_input = dedent(""" + /*[clinic input] + output preset block + module test + test.fn + a: int + [clinic start generated code]*/ + + const char *hand_edited = "output block is overwritten"; + /*[clinic end generated code: output=bogus input=bogus]*/ + """) + fail_msg = ( + "Checksum mismatch! Expected 'bogus', computed '2ed19'. " + "Suggested fix: remove all generated code including the end marker, " + "or use the '-f' option.\n" + ) + with os_helper.temp_dir() as tmp_dir: + fn = os.path.join(tmp_dir, "test.c") + with open(fn, "w", encoding="utf-8") as f: + f.write(invalid_input) + # First, run the CLI without -f and expect failure. + # Note, we cannot check the entire fail msg, because the path to + # the tmp file will change for every run. + _, err = self.expect_failure(fn) + self.assertTrue(err.endswith(fail_msg), + f"{err!r} does not end with {fail_msg!r}") + # Then, force regeneration; success expected. + out = self.expect_success("-f", fn) + self.assertEqual(out, "") + # Verify by checking the checksum. + checksum = ( + "/*[clinic end generated code: " + "output=2124c291eb067d76 input=9543a8d2da235301]*/\n" + ) + with open(fn, 'r', encoding='utf-8') as f: + generated = f.read() + self.assertTrue(generated.endswith(checksum)) + + def test_cli_make(self): + c_code = dedent(""" + /*[clinic input] + [clinic start generated code]*/ + """) + py_code = "pass" + c_files = "file1.c", "file2.c" + py_files = "file1.py", "file2.py" + + def create_files(files, srcdir, code): + for fn in files: + path = os.path.join(srcdir, fn) + with open(path, "w", encoding="utf-8") as f: + f.write(code) + + with os_helper.temp_dir() as tmp_dir: + # add some folders, some C files and a Python file + create_files(c_files, tmp_dir, c_code) + create_files(py_files, tmp_dir, py_code) + + # create C files in externals/ dir + ext_path = os.path.join(tmp_dir, "externals") + with os_helper.temp_dir(path=ext_path) as externals: + create_files(c_files, externals, c_code) + + # run clinic in verbose mode with --make on tmpdir + out = self.expect_success("-v", "--make", "--srcdir", tmp_dir) + + # expect verbose mode to only mention the C files in tmp_dir + for filename in c_files: + with self.subTest(filename=filename): + path = os.path.join(tmp_dir, filename) + self.assertIn(path, out) + for filename in py_files: + with self.subTest(filename=filename): + path = os.path.join(tmp_dir, filename) + self.assertNotIn(path, out) + # don't expect C files from the externals dir + for filename in c_files: + with self.subTest(filename=filename): + path = os.path.join(ext_path, filename) + self.assertNotIn(path, out) + + def test_cli_make_exclude(self): + code = dedent(""" + /*[clinic input] + [clinic start generated code]*/ + """) + with os_helper.temp_dir(quiet=False) as tmp_dir: + # add some folders, some C files and a Python file + for fn in "file1.c", "file2.c", "file3.c", "file4.c": + path = os.path.join(tmp_dir, fn) + with open(path, "w", encoding="utf-8") as f: + f.write(code) + + # Run clinic in verbose mode with --make on tmpdir. + # Exclude file2.c and file3.c. + out = self.expect_success( + "-v", "--make", "--srcdir", tmp_dir, + "--exclude", os.path.join(tmp_dir, "file2.c"), + # The added ./ should be normalised away. + "--exclude", os.path.join(tmp_dir, "./file3.c"), + # Relative paths should also work. + "--exclude", "file4.c" + ) + + # expect verbose mode to only mention the C files in tmp_dir + self.assertIn("file1.c", out) + self.assertNotIn("file2.c", out) + self.assertNotIn("file3.c", out) + self.assertNotIn("file4.c", out) + + def test_cli_verbose(self): + with os_helper.temp_dir() as tmp_dir: + fn = os.path.join(tmp_dir, "test.c") + with open(fn, "w", encoding="utf-8") as f: + f.write("") + out = self.expect_success("-v", fn) + self.assertEqual(out.strip(), fn) + + def test_cli_help(self): + out = self.expect_success("-h") + self.assertIn("usage: clinic.py", out) + + def test_cli_converters(self): + prelude = dedent(""" + Legacy converters: + B C D L O S U Y Z Z# + b c d f h i l p s s# s* u u# w* y y# y* z z# z* + + Converters: + """) + expected_converters = ( + "bool", + "byte", + "char", + "defining_class", + "double", + "fildes", + "float", + "int", + "long", + "long_long", + "object", + "Py_buffer", + "Py_complex", + "Py_ssize_t", + "Py_UNICODE", + "PyByteArrayObject", + "PyBytesObject", + "self", + "short", + "size_t", + "slice_index", + "str", + "unicode", + "unsigned_char", + "unsigned_int", + "unsigned_long", + "unsigned_long_long", + "unsigned_short", + ) + finale = dedent(""" + Return converters: + bool() + double() + float() + init() + int() + long() + Py_ssize_t() + size_t() + unsigned_int() + unsigned_long() + + All converters also accept (c_default=None, py_default=None, annotation=None). + All return converters also accept (py_default=None). + """) + out = self.expect_success("--converters") + # We cannot simply compare the output, because the repr of the *accept* + # param may change (it's a set, thus unordered). So, let's compare the + # start and end of the expected output, and then assert that the + # converters appear lined up in alphabetical order. + self.assertTrue(out.startswith(prelude), out) + self.assertTrue(out.endswith(finale), out) + + out = out.removeprefix(prelude) + out = out.removesuffix(finale) + lines = out.split("\n") + for converter, line in zip(expected_converters, lines): + line = line.lstrip() + with self.subTest(converter=converter): + self.assertTrue( + line.startswith(converter), + f"expected converter {converter!r}, got {line!r}" + ) + + def test_cli_fail_converters_and_filename(self): + _, err = self.expect_failure("--converters", "test.c") + msg = "can't specify --converters and a filename at the same time" + self.assertIn(msg, err) + + def test_cli_fail_no_filename(self): + _, err = self.expect_failure() + self.assertIn("no input files", err) + + def test_cli_fail_output_and_multiple_files(self): + _, err = self.expect_failure("-o", "out.c", "input.c", "moreinput.c") + msg = "error: can't use -o with multiple filenames" + self.assertIn(msg, err) + + def test_cli_fail_filename_or_output_and_make(self): + msg = "can't use -o or filenames with --make" + for opts in ("-o", "out.c"), ("filename.c",): + with self.subTest(opts=opts): + _, err = self.expect_failure("--make", *opts) + self.assertIn(msg, err) + + def test_cli_fail_make_without_srcdir(self): + _, err = self.expect_failure("--make", "--srcdir", "") + msg = "error: --srcdir must not be empty with --make" + self.assertIn(msg, err) + + def test_file_dest(self): + block = dedent(""" + /*[clinic input] + destination test new file {path}.h + output everything test + func + a: object + / + [clinic start generated code]*/ + """) + expected_checksum_line = ( + "/*[clinic end generated code: " + "output=da39a3ee5e6b4b0d input=b602ab8e173ac3bd]*/\n" + ) + expected_output = dedent("""\ + /*[clinic input] + preserve + [clinic start generated code]*/ + + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + # include "pycore_gc.h" // PyGC_Head + # include "pycore_runtime.h" // _Py_ID() + #endif + + + PyDoc_VAR(func__doc__); + + PyDoc_STRVAR(func__doc__, + "func($module, a, /)\\n" + "--\\n" + "\\n"); + + #define FUNC_METHODDEF \\ + {"func", (PyCFunction)func, METH_O, func__doc__}, + static PyObject * + func(PyObject *module, PyObject *a) + /*[clinic end generated code: output=56c09670e89a0d9a input=a9049054013a1b77]*/ + """) + with os_helper.temp_dir() as tmp_dir: + in_fn = os.path.join(tmp_dir, "test.c") + out_fn = os.path.join(tmp_dir, "test.c.h") + with open(in_fn, "w", encoding="utf-8") as f: + f.write(block) + with open(out_fn, "w", encoding="utf-8") as f: + f.write("") # Write an empty output file! + # Clinic should complain about the empty output file. + _, err = self.expect_failure(in_fn) + expected_err = (f"Modified destination file {out_fn!r}; " + "not overwriting!") + self.assertIn(expected_err, err) + # Run clinic again, this time with the -f option. + _ = self.expect_success("-f", in_fn) + # Read back the generated output. + with open(in_fn, encoding="utf-8") as f: + data = f.read() + expected_block = f"{block}{expected_checksum_line}" + self.assertEqual(data, expected_block) + with open(out_fn, encoding="utf-8") as f: + data = f.read() + self.assertEqual(data, expected_output) try: import _testclinic as ac_tester @@ -1302,6 +2432,19 @@ class ClinicFunctionalTest(unittest.TestCase): locals().update((name, getattr(ac_tester, name)) for name in dir(ac_tester) if name.startswith('test_')) + def check_depr_star(self, pnames, fn, *args, **kwds): + regex = ( + fr"Passing( more than)?( [0-9]+)? positional argument(s)? to " + fr"{fn.__name__}\(\) is deprecated. Parameter(s)? {pnames} will " + fr"become( a)? keyword-only parameter(s)? in Python 3\.14" + ) + with self.assertWarnsRegex(DeprecationWarning, regex) as cm: + # Record the line number, so we're sure we've got the correct stack + # level on the deprecation warning. + _, lineno = fn(*args, **kwds), sys._getframe().f_lineno + self.assertEqual(cm.filename, __file__) + self.assertEqual(cm.lineno, lineno) + def test_objects_converter(self): with self.assertRaises(TypeError): ac_tester.objects_converter() @@ -1746,8 +2889,11 @@ def test_gh_99233_refcount(self): self.assertEqual(arg_refcount_origin, arg_refcount_after) def test_gh_99240_double_free(self): - expected_error = r'gh_99240_double_free\(\) argument 2 must be encoded string without null bytes, not str' - with self.assertRaisesRegex(TypeError, expected_error): + err = re.escape( + "gh_99240_double_free() argument 2 must be encoded string " + "without null bytes, not str" + ) + with self.assertRaisesRegex(TypeError, err): ac_tester.gh_99240_double_free('a', '\0b') def test_cloned_func_exception_message(self): @@ -1763,6 +2909,95 @@ def test_cloned_func_with_converter_exception_message(self): func = getattr(ac_tester, name) self.assertEqual(func(), name) + def test_depr_star_new(self): + regex = re.escape( + "Passing positional arguments to _testclinic.DeprStarNew() is " + "deprecated. Parameter 'a' will become a keyword-only parameter " + "in Python 3.14." + ) + with self.assertWarnsRegex(DeprecationWarning, regex) as cm: + ac_tester.DeprStarNew(None) + self.assertEqual(cm.filename, __file__) + + def test_depr_star_init(self): + regex = re.escape( + "Passing positional arguments to _testclinic.DeprStarInit() is " + "deprecated. Parameter 'a' will become a keyword-only parameter " + "in Python 3.14." + ) + with self.assertWarnsRegex(DeprecationWarning, regex) as cm: + ac_tester.DeprStarInit(None) + self.assertEqual(cm.filename, __file__) + + def test_depr_star_pos0_len1(self): + fn = ac_tester.depr_star_pos0_len1 + fn(a=None) + self.check_depr_star("'a'", fn, "a") + + def test_depr_star_pos0_len2(self): + fn = ac_tester.depr_star_pos0_len2 + fn(a=0, b=0) + check = partial(self.check_depr_star, "'a' and 'b'", fn) + check("a", b=0) + check("a", "b") + + def test_depr_star_pos0_len3_with_kwd(self): + fn = ac_tester.depr_star_pos0_len3_with_kwd + fn(a=0, b=0, c=0, d=0) + check = partial(self.check_depr_star, "'a', 'b' and 'c'", fn) + check("a", b=0, c=0, d=0) + check("a", "b", c=0, d=0) + check("a", "b", "c", d=0) + + def test_depr_star_pos1_len1_opt(self): + fn = ac_tester.depr_star_pos1_len1_opt + fn(a=0, b=0) + fn("a", b=0) + fn(a=0) # b is optional + check = partial(self.check_depr_star, "'b'", fn) + check("a", "b") + + def test_depr_star_pos1_len1(self): + fn = ac_tester.depr_star_pos1_len1 + fn(a=0, b=0) + fn("a", b=0) + check = partial(self.check_depr_star, "'b'", fn) + check("a", "b") + + def test_depr_star_pos1_len2_with_kwd(self): + fn = ac_tester.depr_star_pos1_len2_with_kwd + fn(a=0, b=0, c=0, d=0), + fn("a", b=0, c=0, d=0), + check = partial(self.check_depr_star, "'b' and 'c'", fn) + check("a", "b", c=0, d=0), + check("a", "b", "c", d=0), + + def test_depr_star_pos2_len1(self): + fn = ac_tester.depr_star_pos2_len1 + fn(a=0, b=0, c=0) + fn("a", b=0, c=0) + fn("a", "b", c=0) + check = partial(self.check_depr_star, "'c'", fn) + check("a", "b", "c") + + def test_depr_star_pos2_len2(self): + fn = ac_tester.depr_star_pos2_len2 + fn(a=0, b=0, c=0, d=0) + fn("a", b=0, c=0, d=0) + fn("a", "b", c=0, d=0) + check = partial(self.check_depr_star, "'c' and 'd'", fn) + check("a", "b", "c", d=0) + check("a", "b", "c", "d") + + def test_depr_star_pos2_len2_with_kwd(self): + fn = ac_tester.depr_star_pos2_len2_with_kwd + fn(a=0, b=0, c=0, d=0, e=0) + fn("a", b=0, c=0, d=0, e=0) + fn("a", "b", c=0, d=0, e=0) + check = partial(self.check_depr_star, "'c' and 'd'", fn) + check("a", "b", "c", d=0, e=0) + check("a", "b", "c", "d", e=0) + class PermutationTests(unittest.TestCase): """Test permutation support functions.""" @@ -2034,5 +3269,95 @@ def test_suffix_all_lines(self): self.assertEqual(out, expected) +class ClinicReprTests(unittest.TestCase): + def test_Block_repr(self): + block = clinic.Block("foo") + expected_repr = "" + self.assertEqual(repr(block), expected_repr) + + block2 = clinic.Block("bar", "baz", [], "eggs", "spam") + expected_repr_2 = "" + self.assertEqual(repr(block2), expected_repr_2) + + block3 = clinic.Block( + input="longboi_" * 100, + dsl_name="wow_so_long", + signatures=[], + output="very_long_" * 100, + indent="" + ) + expected_repr_3 = ( + "" + ) + self.assertEqual(repr(block3), expected_repr_3) + + def test_Destination_repr(self): + c = _make_clinic() + + destination = clinic.Destination( + "foo", type="file", clinic=c, args=("eggs",) + ) + self.assertEqual( + repr(destination), "" + ) + + destination2 = clinic.Destination("bar", type="buffer", clinic=c) + self.assertEqual(repr(destination2), "") + + def test_Module_repr(self): + module = clinic.Module("foo", _make_clinic()) + self.assertRegex(repr(module), r"") + + def test_Class_repr(self): + cls = clinic.Class("foo", _make_clinic(), None, 'some_typedef', 'some_type_object') + self.assertRegex(repr(cls), r"") + + def test_FunctionKind_repr(self): + self.assertEqual( + repr(clinic.FunctionKind.INVALID), "" + ) + self.assertEqual( + repr(clinic.FunctionKind.CLASS_METHOD), "" + ) + + def test_Function_and_Parameter_reprs(self): + function = clinic.Function( + name='foo', + module=_make_clinic(), + cls=None, + c_basename=None, + full_name='foofoo', + return_converter=clinic.init_return_converter(), + kind=clinic.FunctionKind.METHOD_INIT, + coexist=False + ) + self.assertEqual(repr(function), "") + + converter = clinic.self_converter('bar', 'bar', function) + parameter = clinic.Parameter( + "bar", + kind=inspect.Parameter.POSITIONAL_OR_KEYWORD, + function=function, + converter=converter + ) + self.assertEqual(repr(parameter), "") + + def test_Monitor_repr(self): + monitor = clinic.cpp.Monitor() + self.assertRegex(repr(monitor), r"") + + monitor.line_number = 42 + monitor.stack.append(("token1", "condition1")) + self.assertRegex( + repr(monitor), r"" + ) + + monitor.stack.append(("token2", "condition2")) + self.assertRegex( + repr(monitor), + r"" + ) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index 94298003063593..e88b7c8572d9e8 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -717,11 +717,11 @@ def test_xdev(self): # Memory allocator debug hooks try: - import _testcapi + import _testinternalcapi except ImportError: pass else: - code = "import _testcapi; print(_testcapi.pymem_getallocatorsname())" + code = "import _testinternalcapi; print(_testinternalcapi.pymem_getallocatorsname())" with support.SuppressCrashReport(): out = self.run_xdev("-c", code, check_exitcode=False) if support.with_pymalloc(): @@ -783,7 +783,7 @@ def test_warnings_filter_precedence(self): self.assertEqual(out, expected_filters) def check_pythonmalloc(self, env_var, name): - code = 'import _testcapi; print(_testcapi.pymem_getallocatorsname())' + code = 'import _testinternalcapi; print(_testinternalcapi.pymem_getallocatorsname())' env = dict(os.environ) env.pop('PYTHONDEVMODE', None) if env_var is not None: diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index 85ce0a4b39d854..770184c5ef9a91 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -11,10 +11,9 @@ import warnings from test import support from test.support import (script_helper, requires_debug_ranges, - requires_specialization) + requires_specialization, C_RECURSION_LIMIT) from test.support.os_helper import FakePath - class TestSpecifics(unittest.TestCase): def compile_single(self, source): @@ -112,7 +111,7 @@ def __getitem__(self, key): @unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI") def test_extended_arg(self): - repeat = 2000 + repeat = int(C_RECURSION_LIMIT * 0.9) longexpr = 'x = x or ' + '-x' * repeat g = {} code = textwrap.dedent(''' @@ -558,16 +557,12 @@ def test_yet_more_evil_still_undecodable(self): @support.cpython_only @unittest.skipIf(support.is_wasi, "exhausts limited stack on WASI") def test_compiler_recursion_limit(self): - # Expected limit is sys.getrecursionlimit() * the scaling factor - # in symtable.c (currently 3) - # We expect to fail *at* that limit, because we use up some of - # the stack depth limit in the test suite code - # So we check the expected limit and 75% of that - # XXX (ncoghlan): duplicating the scaling factor here is a little - # ugly. Perhaps it should be exposed somewhere... - fail_depth = sys.getrecursionlimit() * 3 - crash_depth = sys.getrecursionlimit() * 300 - success_depth = int(fail_depth * 0.75) + # Expected limit is C_RECURSION_LIMIT * 2 + # Duplicating the limit here is a little ugly. + # Perhaps it should be exposed somewhere... + fail_depth = C_RECURSION_LIMIT * 2 + 1 + crash_depth = C_RECURSION_LIMIT * 100 + success_depth = int(C_RECURSION_LIMIT * 1.8) def check_limit(prefix, repeated, mode="single"): expect_ok = prefix + repeated * success_depth diff --git a/Lib/test/test_compiler_assemble.py b/Lib/test/test_compiler_assemble.py index 6df72cbc54666b..5696433e529d0a 100644 --- a/Lib/test/test_compiler_assemble.py +++ b/Lib/test/test_compiler_assemble.py @@ -94,12 +94,12 @@ def inner(): instructions = [ ('RESUME', 0,), - ('PUSH_NULL', 0, 1), ('LOAD_CLOSURE', 0, 1), ('BUILD_TUPLE', 1, 1), ('LOAD_CONST', 1, 1), ('MAKE_FUNCTION', 0, 2), ('SET_FUNCTION_ATTRIBUTE', 8, 2), + ('PUSH_NULL', 0, 1), ('CALL', 0, 2), # (lambda: x)() ('LOAD_CONST', 2, 2), # 2 ('BINARY_OP', 6, 2), # % diff --git a/Lib/test/test_compiler_codegen.py b/Lib/test/test_compiler_codegen.py index d99bb8c6cd472d..6d7731ddba02c5 100644 --- a/Lib/test/test_compiler_codegen.py +++ b/Lib/test/test_compiler_codegen.py @@ -41,8 +41,8 @@ def test_for_loop(self): loop_lbl := self.Label(), ('FOR_ITER', exit_lbl := self.Label(), 1), ('STORE_NAME', 1, 1), - ('PUSH_NULL', None, 2), ('LOAD_NAME', 2, 2), + ('PUSH_NULL', None, 2), ('LOAD_NAME', 1, 2), ('CALL', 1, 2), ('POP_TOP', None), diff --git a/Lib/test/test_contextlib.py b/Lib/test/test_contextlib.py index ecc5a43dad43da..dbc7dfcc24bf07 100644 --- a/Lib/test/test_contextlib.py +++ b/Lib/test/test_contextlib.py @@ -1085,7 +1085,7 @@ def first(): class TestExitStack(TestBaseExitStack, unittest.TestCase): exit_stack = ExitStack callback_error_internal_frames = [ - ('__exit__', 'raise exc_details[1]'), + ('__exit__', 'raise exc'), ('__exit__', 'if cb(*exc_details):'), ] diff --git a/Lib/test/test_contextlib_async.py b/Lib/test/test_contextlib_async.py index bb72ae74e5845f..fa89c23f8edd91 100644 --- a/Lib/test/test_contextlib_async.py +++ b/Lib/test/test_contextlib_async.py @@ -557,7 +557,7 @@ def __exit__(self, *exc_details): ('__exit__', 'return self.run_coroutine(self.__aexit__(*exc_details))'), ('run_coroutine', 'raise exc'), ('run_coroutine', 'raise exc'), - ('__aexit__', 'raise exc_details[1]'), + ('__aexit__', 'raise exc'), ('__aexit__', 'cb_suppress = cb(*exc_details)'), ] diff --git a/Lib/test/test_ctypes/test_as_parameter.py b/Lib/test/test_ctypes/test_as_parameter.py index 39f70e864757d5..a1a8745e737fa2 100644 --- a/Lib/test/test_ctypes/test_as_parameter.py +++ b/Lib/test/test_ctypes/test_as_parameter.py @@ -192,7 +192,7 @@ class S8I(Structure): (9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9)) def test_recursive_as_param(self): - class A(object): + class A: pass a = A() @@ -201,7 +201,7 @@ class A(object): c_int.from_param(a) -class AsParamWrapper(object): +class AsParamWrapper: def __init__(self, param): self._as_parameter_ = param @@ -209,7 +209,7 @@ class AsParamWrapperTestCase(BasicWrapTestCase): wrap = AsParamWrapper -class AsParamPropertyWrapper(object): +class AsParamPropertyWrapper: def __init__(self, param): self._param = param diff --git a/Lib/test/test_ctypes/test_callbacks.py b/Lib/test/test_ctypes/test_callbacks.py index 037677e37ab34a..6fe3e119672409 100644 --- a/Lib/test/test_ctypes/test_callbacks.py +++ b/Lib/test/test_ctypes/test_callbacks.py @@ -120,7 +120,7 @@ def test_unsupported_restype_2(self): def test_issue_7959(self): proto = self.functype.__func__(None) - class X(object): + class X: def func(self): pass def __init__(self): self.v = proto(self.func) diff --git a/Lib/test/test_ctypes/test_functions.py b/Lib/test/test_ctypes/test_functions.py index 9cf680f16620ac..08eecbc9ea4442 100644 --- a/Lib/test/test_ctypes/test_functions.py +++ b/Lib/test/test_ctypes/test_functions.py @@ -4,8 +4,8 @@ import unittest from ctypes import (CDLL, Structure, Array, CFUNCTYPE, byref, POINTER, pointer, ArgumentError, - c_char, c_wchar, c_byte, c_char_p, - c_short, c_int, c_long, c_longlong, + c_char, c_wchar, c_byte, c_char_p, c_wchar_p, + c_short, c_int, c_long, c_longlong, c_void_p, c_float, c_double, c_longdouble) from _ctypes import _Pointer, _SimpleCData @@ -92,6 +92,54 @@ def test_wchar_parm(self): "argument 2: TypeError: one character unicode string " "expected") + def test_c_char_p_parm(self): + """Test the error message when converting an incompatible type to c_char_p.""" + proto = CFUNCTYPE(c_int, c_char_p) + def callback(*args): + return 0 + + callback = proto(callback) + self.assertEqual(callback(b"abc"), 0) + + with self.assertRaises(ArgumentError) as cm: + callback(10) + + self.assertEqual(str(cm.exception), + "argument 1: TypeError: 'int' object cannot be " + "interpreted as ctypes.c_char_p") + + def test_c_wchar_p_parm(self): + """Test the error message when converting an incompatible type to c_wchar_p.""" + proto = CFUNCTYPE(c_int, c_wchar_p) + def callback(*args): + return 0 + + callback = proto(callback) + self.assertEqual(callback("abc"), 0) + + with self.assertRaises(ArgumentError) as cm: + callback(10) + + self.assertEqual(str(cm.exception), + "argument 1: TypeError: 'int' object cannot be " + "interpreted as ctypes.c_wchar_p") + + def test_c_void_p_parm(self): + """Test the error message when converting an incompatible type to c_void_p.""" + proto = CFUNCTYPE(c_int, c_void_p) + def callback(*args): + return 0 + + callback = proto(callback) + self.assertEqual(callback(5), 0) + + with self.assertRaises(ArgumentError) as cm: + callback(2.5) + + self.assertEqual(str(cm.exception), + "argument 1: TypeError: 'float' object cannot be " + "interpreted as ctypes.c_void_p") + def test_wchar_result(self): f = dll._testfunc_i_bhilfd f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double] diff --git a/Lib/test/test_ctypes/test_keeprefs.py b/Lib/test/test_ctypes/test_keeprefs.py index 23b03b64b4a716..c6fe1de62eae7c 100644 --- a/Lib/test/test_ctypes/test_keeprefs.py +++ b/Lib/test/test_ctypes/test_keeprefs.py @@ -98,6 +98,33 @@ def test_p_cint(self): x = pointer(i) self.assertEqual(x._objects, {'1': i}) + def test_pp_ownership(self): + d = c_int(123) + n = c_int(456) + + p = pointer(d) + pp = pointer(p) + + self.assertIs(pp._objects['1'], p) + self.assertIs(pp._objects['0']['1'], d) + + pp.contents.contents = n + + self.assertIs(pp._objects['1'], p) + self.assertIs(pp._objects['0']['1'], n) + + self.assertIs(p._objects['1'], n) + self.assertEqual(len(p._objects), 1) + + del d + del p + + self.assertIs(pp._objects['0']['1'], n) + self.assertEqual(len(pp._objects), 2) + + del n + + self.assertEqual(len(pp._objects), 2) class PointerToStructure(unittest.TestCase): def test(self): diff --git a/Lib/test/test_ctypes/test_numbers.py b/Lib/test/test_ctypes/test_numbers.py index fd318f9a18e533..29108a28ec16e1 100644 --- a/Lib/test/test_ctypes/test_numbers.py +++ b/Lib/test/test_ctypes/test_numbers.py @@ -86,7 +86,7 @@ def test_byref(self): def test_floats(self): # c_float and c_double can be created from # Python int and float - class FloatLike(object): + class FloatLike: def __float__(self): return 2.0 f = FloatLike() @@ -97,15 +97,15 @@ def __float__(self): self.assertEqual(t(f).value, 2.0) def test_integers(self): - class FloatLike(object): + class FloatLike: def __float__(self): return 2.0 f = FloatLike() - class IntLike(object): + class IntLike: def __int__(self): return 2 d = IntLike() - class IndexLike(object): + class IndexLike: def __index__(self): return 2 i = IndexLike() diff --git a/Lib/test/test_ctypes/test_parameters.py b/Lib/test/test_ctypes/test_parameters.py index 40979212a627d8..d1eeee6b0306fe 100644 --- a/Lib/test/test_ctypes/test_parameters.py +++ b/Lib/test/test_ctypes/test_parameters.py @@ -157,7 +157,7 @@ def test_noctypes_argtype(self): # TypeError: has no from_param method self.assertRaises(TypeError, setattr, func, "argtypes", (object,)) - class Adapter(object): + class Adapter: def from_param(cls, obj): return None @@ -165,7 +165,7 @@ def from_param(cls, obj): self.assertEqual(func(None), None) self.assertEqual(func(object()), None) - class Adapter(object): + class Adapter: def from_param(cls, obj): return obj @@ -174,7 +174,7 @@ def from_param(cls, obj): self.assertRaises(ArgumentError, func, object()) self.assertEqual(func(c_void_p(42)), 42) - class Adapter(object): + class Adapter: def from_param(cls, obj): raise ValueError(obj) diff --git a/Lib/test/test_dataclasses.py b/Lib/test/test_dataclasses.py index 6669f1c57e2e78..bd8d82438414e6 100644 --- a/Lib/test/test_dataclasses.py +++ b/Lib/test/test_dataclasses.py @@ -134,7 +134,7 @@ class C: # Non-defaults following defaults. with self.assertRaisesRegex(TypeError, "non-default argument 'y' follows " - "default argument"): + "default argument 'x'"): @dataclass class C: x: int = 0 @@ -143,7 +143,7 @@ class C: # A derived class adds a non-default field after a default one. with self.assertRaisesRegex(TypeError, "non-default argument 'y' follows " - "default argument"): + "default argument 'x'"): @dataclass class B: x: int = 0 @@ -156,7 +156,7 @@ class C(B): # a field which didn't use to have a default. with self.assertRaisesRegex(TypeError, "non-default argument 'y' follows " - "default argument"): + "default argument 'x'"): @dataclass class B: x: int @@ -4521,7 +4521,7 @@ class A: # Make sure we still check for non-kwarg non-defaults not following # defaults. - err_regex = "non-default argument 'z' follows default argument" + err_regex = "non-default argument 'z' follows default argument 'a'" with self.assertRaisesRegex(TypeError, err_regex): @dataclass class A: diff --git a/Lib/test/test_dbm.py b/Lib/test/test_dbm.py index f21eebc6530c76..e3924d8ec8b5c1 100644 --- a/Lib/test/test_dbm.py +++ b/Lib/test/test_dbm.py @@ -155,6 +155,21 @@ def test_keys(self): self.assertNotIn(b'xxx', d) self.assertRaises(KeyError, lambda: d[b'xxx']) + def test_clear(self): + with dbm.open(_fname, 'c') as d: + self.assertEqual(d.keys(), []) + a = [(b'a', b'b'), (b'12345678910', b'019237410982340912840198242')] + for k, v in a: + d[k] = v + for k, _ in a: + self.assertIn(k, d) + self.assertEqual(len(d), len(a)) + + d.clear() + self.assertEqual(len(d), 0) + for k, _ in a: + self.assertNotIn(k, d) + def setUp(self): self.addCleanup(setattr, dbm, '_defaultmod', dbm._defaultmod) dbm._defaultmod = self.module diff --git a/Lib/test/test_dbm_gnu.py b/Lib/test/test_dbm_gnu.py index 73602cab5180fc..e20addf1f04f1b 100644 --- a/Lib/test/test_dbm_gnu.py +++ b/Lib/test/test_dbm_gnu.py @@ -192,6 +192,20 @@ def test_open_with_bytes_path(self): def test_open_with_pathlib_bytes_path(self): gdbm.open(FakePath(os.fsencode(filename)), "c").close() + def test_clear(self): + kvs = [('foo', 'bar'), ('1234', '5678')] + with gdbm.open(filename, 'c') as db: + for k, v in kvs: + db[k] = v + self.assertIn(k, db) + self.assertEqual(len(db), len(kvs)) + + db.clear() + for k, v in kvs: + self.assertNotIn(k, db) + self.assertEqual(len(db), 0) + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_dbm_ndbm.py b/Lib/test/test_dbm_ndbm.py index 8f37e3cc624e2e..e0f31c9a9a337d 100644 --- a/Lib/test/test_dbm_ndbm.py +++ b/Lib/test/test_dbm_ndbm.py @@ -147,6 +147,19 @@ def test_bool_on_closed_db_raises(self): db['a'] = 'b' self.assertRaises(dbm.ndbm.error, bool, db) + def test_clear(self): + kvs = [('foo', 'bar'), ('1234', '5678')] + with dbm.ndbm.open(self.filename, 'c') as db: + for k, v in kvs: + db[k] = v + self.assertIn(k, db) + self.assertEqual(len(db), len(kvs)) + + db.clear() + for k, v in kvs: + self.assertNotIn(k, db) + self.assertEqual(len(db), 0) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py index db67f37608f1f2..fc66a309788ac1 100644 --- a/Lib/test/test_decimal.py +++ b/Lib/test/test_decimal.py @@ -5701,6 +5701,36 @@ def test_c_disallow_instantiation(self): ContextManager = type(C.localcontext()) check_disallow_instantiation(self, ContextManager) + def test_c_signaldict_segfault(self): + # See gh-106263 for details. + SignalDict = type(C.Context().flags) + sd = SignalDict() + err_msg = "invalid signal dict" + + with self.assertRaisesRegex(ValueError, err_msg): + len(sd) + + with self.assertRaisesRegex(ValueError, err_msg): + iter(sd) + + with self.assertRaisesRegex(ValueError, err_msg): + repr(sd) + + with self.assertRaisesRegex(ValueError, err_msg): + sd[C.InvalidOperation] = True + + with self.assertRaisesRegex(ValueError, err_msg): + sd[C.InvalidOperation] + + with self.assertRaisesRegex(ValueError, err_msg): + sd == C.Context().flags + + with self.assertRaisesRegex(ValueError, err_msg): + C.Context().flags == sd + + with self.assertRaisesRegex(ValueError, err_msg): + sd.copy() + @requires_docstrings @requires_cdecimal class SignatureTest(unittest.TestCase): diff --git a/Lib/test/test_descrtut.py b/Lib/test/test_descrtut.py index 7796031ed0602f..13e3ea41bdb76c 100644 --- a/Lib/test/test_descrtut.py +++ b/Lib/test/test_descrtut.py @@ -139,7 +139,7 @@ def merge(self, other): >>> a.x1 = 1 Traceback (most recent call last): File "", line 1, in ? - AttributeError: 'defaultdict2' object has no attribute 'x1' + AttributeError: 'defaultdict2' object has no attribute 'x1' and no __dict__ for setting new attributes >>> """ diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py index 79638340059f65..fbc6ce8282de3c 100644 --- a/Lib/test/test_dict.py +++ b/Lib/test/test_dict.py @@ -8,7 +8,7 @@ import unittest import weakref from test import support -from test.support import import_helper +from test.support import import_helper, C_RECURSION_LIMIT class DictTest(unittest.TestCase): @@ -596,7 +596,7 @@ def __repr__(self): def test_repr_deep(self): d = {} - for i in range(sys.getrecursionlimit() + 100): + for i in range(C_RECURSION_LIMIT + 1): d = {1: d} self.assertRaises(RecursionError, repr, d) diff --git a/Lib/test/test_dictviews.py b/Lib/test/test_dictviews.py index 924f4a6829e19c..2bd9d6eef8cfc6 100644 --- a/Lib/test/test_dictviews.py +++ b/Lib/test/test_dictviews.py @@ -3,6 +3,7 @@ import pickle import sys import unittest +from test.support import C_RECURSION_LIMIT class DictSetTest(unittest.TestCase): @@ -279,7 +280,7 @@ def test_recursive_repr(self): def test_deeply_nested_repr(self): d = {} - for i in range(sys.getrecursionlimit() + 100): + for i in range(C_RECURSION_LIMIT//2 + 100): d = {42: d.values()} self.assertRaises(RecursionError, repr, d) diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index 8597b8f14ac058..f49c60a01a54ea 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -102,7 +102,7 @@ def _f(a): dis_f = """\ %3d RESUME 0 -%3d LOAD_GLOBAL 1 (NULL + print) +%3d LOAD_GLOBAL 1 (print + NULL) LOAD_FAST 0 (a) CALL 1 POP_TOP @@ -131,7 +131,7 @@ def bug708901(): dis_bug708901 = """\ %3d RESUME 0 -%3d LOAD_GLOBAL 1 (NULL + range) +%3d LOAD_GLOBAL 1 (range + NULL) LOAD_CONST 1 (1) %3d LOAD_CONST 2 (10) @@ -236,7 +236,7 @@ def wrap_func_w_kwargs(): dis_kw_names = """\ %3d RESUME 0 -%3d LOAD_GLOBAL 1 (NULL + func_w_kwargs) +%3d LOAD_GLOBAL 1 (func_w_kwargs + NULL) LOAD_CONST 1 (1) LOAD_CONST 2 (2) LOAD_CONST 3 (5) @@ -345,8 +345,8 @@ def wrap_func_w_kwargs(): LOAD_CONST 1 ('x') STORE_SUBSCR - 3 PUSH_NULL - LOAD_NAME 3 (fun) + 3 LOAD_NAME 3 (fun) + PUSH_NULL LOAD_CONST 0 (1) CALL 1 LOAD_NAME 2 (__annotations__) @@ -355,8 +355,8 @@ def wrap_func_w_kwargs(): 4 LOAD_CONST 0 (1) LOAD_NAME 4 (lst) - PUSH_NULL LOAD_NAME 3 (fun) + PUSH_NULL LOAD_CONST 3 (0) CALL 1 STORE_SUBSCR @@ -615,14 +615,14 @@ def _tryfinallyconst(b): %3d LOAD_FAST 0 (a) -%3d PUSH_NULL - LOAD_FAST 1 (b) +%3d LOAD_FAST 1 (b) + PUSH_NULL CALL 0 POP_TOP RETURN_VALUE >> PUSH_EXC_INFO - PUSH_NULL LOAD_FAST 1 (b) + PUSH_NULL CALL 0 POP_TOP RERAISE 0 @@ -644,14 +644,14 @@ def _tryfinallyconst(b): %3d NOP -%3d PUSH_NULL - LOAD_FAST 0 (b) +%3d LOAD_FAST 0 (b) + PUSH_NULL CALL 0 POP_TOP RETURN_CONST 1 (1) PUSH_EXC_INFO - PUSH_NULL LOAD_FAST 0 (b) + PUSH_NULL CALL 0 POP_TOP RERAISE 0 @@ -710,7 +710,7 @@ def foo(x): %3d RESUME 0 -%3d LOAD_GLOBAL 1 (NULL + list) +%3d LOAD_GLOBAL 1 (list + NULL) LOAD_FAST 0 (x) BUILD_TUPLE 1 LOAD_CONST 1 ( at 0x..., file "%s", line %d>) @@ -792,7 +792,7 @@ def loop_test(): >> FOR_ITER_LIST 14 (to 48) STORE_FAST 0 (i) -%3d LOAD_GLOBAL_MODULE 1 (NULL + load_test) +%3d LOAD_GLOBAL_MODULE 1 (load_test + NULL) LOAD_FAST 0 (i) CALL_PY_WITH_DEFAULTS 1 POP_TOP @@ -1230,8 +1230,8 @@ def test_call_specialize(self): call_quicken = """\ 0 RESUME 0 - 1 PUSH_NULL - LOAD_NAME 0 (str) + 1 LOAD_NAME 0 (str) + PUSH_NULL LOAD_CONST 0 (1) CALL_NO_KW_STR_1 1 RETURN_VALUE @@ -1244,10 +1244,14 @@ def test_call_specialize(self): @cpython_only @requires_specialization def test_loop_quicken(self): + import _testinternalcapi # Loop can trigger a quicken where the loop is located self.code_quicken(loop_test, 1) got = self.get_disassembly(loop_test, adaptive=True) - self.do_disassembly_compare(got, dis_loop_test_quickened_code) + expected = dis_loop_test_quickened_code + if _testinternalcapi.get_optimizer(): + expected = expected.replace("JUMP_BACKWARD ", "ENTER_EXECUTOR") + self.do_disassembly_compare(got, expected) @cpython_only def test_extended_arg_quick(self): @@ -1625,7 +1629,7 @@ def _prepare_test_cases(): Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=177, arg=8, argval=8, argrepr='closure', offset=18, start_offset=18, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=177, arg=1, argval=1, argrepr='defaults', offset=20, start_offset=20, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='f', argrepr='f', offset=22, start_offset=22, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='NULL + print', offset=24, start_offset=24, starts_line=7, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print + NULL', offset=24, start_offset=24, starts_line=7, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=0, argval='a', argrepr='a', offset=34, start_offset=34, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=1, argval='b', argrepr='b', offset=36, start_offset=36, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval='', argrepr="''", offset=38, start_offset=38, starts_line=None, is_jump_target=False, positions=None), @@ -1655,7 +1659,7 @@ def _prepare_test_cases(): Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=177, arg=8, argval=8, argrepr='closure', offset=24, start_offset=24, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='SET_FUNCTION_ATTRIBUTE', opcode=177, arg=1, argval=1, argrepr='defaults', offset=26, start_offset=26, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='inner', argrepr='inner', offset=28, start_offset=28, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='NULL + print', offset=30, start_offset=30, starts_line=5, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print + NULL', offset=30, start_offset=30, starts_line=5, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=3, argval='a', argrepr='a', offset=40, start_offset=40, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=4, argval='b', argrepr='b', offset=42, start_offset=42, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=0, argval='c', argrepr='c', offset=44, start_offset=44, starts_line=None, is_jump_target=False, positions=None), @@ -1669,7 +1673,7 @@ def _prepare_test_cases(): expected_opinfo_inner = [ Instruction(opname='COPY_FREE_VARS', opcode=149, arg=4, argval=4, argrepr='', offset=0, start_offset=0, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='RESUME', opcode=151, arg=0, argval=0, argrepr='', offset=2, start_offset=2, starts_line=3, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='NULL + print', offset=4, start_offset=4, starts_line=4, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print + NULL', offset=4, start_offset=4, starts_line=4, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=2, argval='a', argrepr='a', offset=14, start_offset=14, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=3, argval='b', argrepr='b', offset=16, start_offset=16, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_DEREF', opcode=137, arg=4, argval='c', argrepr='c', offset=18, start_offset=18, starts_line=None, is_jump_target=False, positions=None), @@ -1682,13 +1686,13 @@ def _prepare_test_cases(): expected_opinfo_jumpy = [ Instruction(opname='RESUME', opcode=151, arg=0, argval=0, argrepr='', offset=0, start_offset=0, starts_line=1, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='range', argrepr='NULL + range', offset=2, start_offset=2, starts_line=3, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='range', argrepr='range + NULL', offset=2, start_offset=2, starts_line=3, is_jump_target=False, positions=None), Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=10, argrepr='10', offset=12, start_offset=12, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=14, start_offset=14, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='GET_ITER', opcode=68, arg=None, argval=None, argrepr='', offset=22, start_offset=22, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='FOR_ITER', opcode=93, arg=28, argval=84, argrepr='to 84', offset=24, start_offset=24, starts_line=None, is_jump_target=True, positions=None), Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=28, start_offset=28, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=30, start_offset=30, starts_line=4, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='print + NULL', offset=30, start_offset=30, starts_line=4, is_jump_target=False, positions=None), Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=40, start_offset=40, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=42, start_offset=42, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=50, start_offset=50, starts_line=None, is_jump_target=False, positions=None), @@ -1705,14 +1709,14 @@ def _prepare_test_cases(): Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=80, start_offset=80, starts_line=8, is_jump_target=True, positions=None), Instruction(opname='JUMP_FORWARD', opcode=110, arg=12, argval=108, argrepr='to 108', offset=82, start_offset=82, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='END_FOR', opcode=4, arg=None, argval=None, argrepr='', offset=84, start_offset=84, starts_line=3, is_jump_target=True, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=86, start_offset=86, starts_line=10, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='print + NULL', offset=86, start_offset=86, starts_line=10, is_jump_target=False, positions=None), Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=96, start_offset=96, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=98, start_offset=98, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=106, start_offset=106, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='LOAD_FAST_CHECK', opcode=127, arg=0, argval='i', argrepr='i', offset=108, start_offset=108, starts_line=11, is_jump_target=True, positions=None), Instruction(opname='TO_BOOL', opcode=6, arg=None, argval=None, argrepr='', offset=110, start_offset=110, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=37, argval=194, argrepr='to 194', offset=118, start_offset=118, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=120, start_offset=120, starts_line=12, is_jump_target=True, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='print + NULL', offset=120, start_offset=120, starts_line=12, is_jump_target=True, positions=None), Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=130, start_offset=130, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=132, start_offset=132, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=140, start_offset=140, starts_line=None, is_jump_target=False, positions=None), @@ -1734,7 +1738,7 @@ def _prepare_test_cases(): Instruction(opname='TO_BOOL', opcode=6, arg=None, argval=None, argrepr='', offset=180, start_offset=180, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=2, argval=194, argrepr='to 194', offset=188, start_offset=188, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='JUMP_BACKWARD', opcode=140, arg=37, argval=120, argrepr='to 120', offset=190, start_offset=190, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=194, start_offset=194, starts_line=19, is_jump_target=True, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='print + NULL', offset=194, start_offset=194, starts_line=19, is_jump_target=True, positions=None), Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=204, start_offset=204, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=206, start_offset=206, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=214, start_offset=214, starts_line=None, is_jump_target=False, positions=None), @@ -1746,7 +1750,7 @@ def _prepare_test_cases(): Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=228, start_offset=228, starts_line=25, is_jump_target=False, positions=None), Instruction(opname='BEFORE_WITH', opcode=53, arg=None, argval=None, argrepr='', offset=230, start_offset=230, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=232, start_offset=232, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=234, start_offset=234, starts_line=26, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='print + NULL', offset=234, start_offset=234, starts_line=26, is_jump_target=False, positions=None), Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Never reach this', argrepr="'Never reach this'", offset=244, start_offset=244, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=246, start_offset=246, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=254, start_offset=254, starts_line=None, is_jump_target=False, positions=None), @@ -1755,7 +1759,7 @@ def _prepare_test_cases(): Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=260, start_offset=260, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=2, argval=2, argrepr='', offset=262, start_offset=262, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=270, start_offset=270, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=272, start_offset=272, starts_line=28, is_jump_target=True, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='print + NULL', offset=272, start_offset=272, starts_line=28, is_jump_target=True, positions=None), Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=282, start_offset=282, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=284, start_offset=284, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=292, start_offset=292, starts_line=None, is_jump_target=False, positions=None), @@ -1778,7 +1782,7 @@ def _prepare_test_cases(): Instruction(opname='CHECK_EXC_MATCH', opcode=36, arg=None, argval=None, argrepr='', offset=342, start_offset=342, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=15, argval=376, argrepr='to 376', offset=344, start_offset=344, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=346, start_offset=346, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=348, start_offset=348, starts_line=23, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='print + NULL', offset=348, start_offset=348, starts_line=23, is_jump_target=False, positions=None), Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=358, start_offset=358, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=360, start_offset=360, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=368, start_offset=368, starts_line=None, is_jump_target=False, positions=None), @@ -1789,7 +1793,7 @@ def _prepare_test_cases(): Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=380, start_offset=380, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='RERAISE', opcode=119, arg=1, argval=1, argrepr='', offset=382, start_offset=382, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='PUSH_EXC_INFO', opcode=35, arg=None, argval=None, argrepr='', offset=384, start_offset=384, starts_line=None, is_jump_target=False, positions=None), - Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='NULL + print', offset=386, start_offset=386, starts_line=28, is_jump_target=False, positions=None), + Instruction(opname='LOAD_GLOBAL', opcode=116, arg=3, argval='print', argrepr='print + NULL', offset=386, start_offset=386, starts_line=28, is_jump_target=False, positions=None), Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=396, start_offset=396, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='CALL', opcode=171, arg=1, argval=1, argrepr='', offset=398, start_offset=398, starts_line=None, is_jump_target=False, positions=None), Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=406, start_offset=406, starts_line=None, is_jump_target=False, positions=None), diff --git a/Lib/test/test_dtrace.py b/Lib/test/test_dtrace.py index 4b971deacc1a5c..e1adf8e9748506 100644 --- a/Lib/test/test_dtrace.py +++ b/Lib/test/test_dtrace.py @@ -3,6 +3,7 @@ import re import subprocess import sys +import sysconfig import types import unittest @@ -173,6 +174,87 @@ class SystemTapOptimizedTests(TraceTests, unittest.TestCase): backend = SystemTapBackend() optimize_python = 2 +class CheckDtraceProbes(unittest.TestCase): + @classmethod + def setUpClass(cls): + if sysconfig.get_config_var('WITH_DTRACE'): + readelf_major_version, readelf_minor_version = cls.get_readelf_version() + if support.verbose: + print(f"readelf version: {readelf_major_version}.{readelf_minor_version}") + else: + raise unittest.SkipTest("CPython must be configured with the --with-dtrace option.") + + + @staticmethod + def get_readelf_version(): + try: + cmd = ["readelf", "--version"] + proc = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + ) + with proc: + version, stderr = proc.communicate() + + if proc.returncode: + raise Exception( + f"Command {' '.join(cmd)!r} failed " + f"with exit code {proc.returncode}: " + f"stdout={version!r} stderr={stderr!r}" + ) + except OSError: + raise unittest.SkipTest("Couldn't find readelf on the path") + + # Regex to parse: + # 'GNU readelf (GNU Binutils) 2.40.0\n' -> 2.40 + match = re.search(r"^(?:GNU) readelf.*?\b(\d+)\.(\d+)", version) + if match is None: + raise unittest.SkipTest(f"Unable to parse readelf version: {version}") + + return int(match.group(1)), int(match.group(2)) + + def get_readelf_output(self): + command = ["readelf", "-n", sys.executable] + stdout, _ = subprocess.Popen( + command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + ).communicate() + return stdout + + def test_check_probes(self): + readelf_output = self.get_readelf_output() + + available_probe_names = [ + "Name: import__find__load__done", + "Name: import__find__load__start", + "Name: audit", + "Name: gc__start", + "Name: gc__done", + ] + + for probe_name in available_probe_names: + with self.subTest(probe_name=probe_name): + self.assertIn(probe_name, readelf_output) + + @unittest.expectedFailure + def test_missing_probes(self): + readelf_output = self.get_readelf_output() + + # Missing probes will be added in the future. + missing_probe_names = [ + "Name: function__entry", + "Name: function__return", + "Name: line", + ] + + for probe_name in missing_probe_names: + with self.subTest(probe_name=probe_name): + self.assertIn(probe_name, readelf_output) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_email/data/msg_47.txt b/Lib/test/test_email/data/msg_47.txt new file mode 100644 index 00000000000000..bb48b47d96baf8 --- /dev/null +++ b/Lib/test/test_email/data/msg_47.txt @@ -0,0 +1,14 @@ +Date: 01 Jan 2001 00:01+0000 +From: arthur@example.example +MIME-Version: 1.0 +Content-Type: multipart/mixed; boundary=foo + +--foo +Content-Type: text/plain +bar + +--foo +Content-Type: text/html +

baz

+ +--foo-- \ No newline at end of file diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py index 5238944d6b4788..cdb6ef1275e520 100644 --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -3319,90 +3319,32 @@ def test_getaddresses(self): [('Al Person', 'aperson@dom.ain'), ('Bud Person', 'bperson@dom.ain')]) - def test_getaddresses_parsing_errors(self): - """Test for parsing errors from CVE-2023-27043""" - eq = self.assertEqual - eq(utils.getaddresses(['alice@example.org(']), - [('', '')]) - eq(utils.getaddresses(['alice@example.org)']), - [('', '')]) - eq(utils.getaddresses(['alice@example.org<']), - [('', '')]) - eq(utils.getaddresses(['alice@example.org>']), - [('', '')]) - eq(utils.getaddresses(['alice@example.org@']), - [('', '')]) - eq(utils.getaddresses(['alice@example.org,']), - [('', 'alice@example.org'), ('', 'bob@example.com')]) - eq(utils.getaddresses(['alice@example.org;']), - [('', '')]) - eq(utils.getaddresses(['alice@example.org:']), - [('', '')]) - eq(utils.getaddresses(['alice@example.org.']), - [('', '')]) - eq(utils.getaddresses(['alice@example.org"']), - [('', '')]) - eq(utils.getaddresses(['alice@example.org[']), - [('', '')]) - eq(utils.getaddresses(['alice@example.org]']), - [('', '')]) - - def test_parseaddr_parsing_errors(self): - """Test for parsing errors from CVE-2023-27043""" - eq = self.assertEqual - eq(utils.parseaddr(['alice@example.org(']), - ('', '')) - eq(utils.parseaddr(['alice@example.org)']), - ('', '')) - eq(utils.parseaddr(['alice@example.org<']), - ('', '')) - eq(utils.parseaddr(['alice@example.org>']), - ('', '')) - eq(utils.parseaddr(['alice@example.org@']), - ('', '')) - eq(utils.parseaddr(['alice@example.org,']), - ('', '')) - eq(utils.parseaddr(['alice@example.org;']), - ('', '')) - eq(utils.parseaddr(['alice@example.org:']), - ('', '')) - eq(utils.parseaddr(['alice@example.org.']), - ('', '')) - eq(utils.parseaddr(['alice@example.org"']), - ('', '')) - eq(utils.parseaddr(['alice@example.org[']), - ('', '')) - eq(utils.parseaddr(['alice@example.org]']), - ('', '')) + def test_getaddresses_comma_in_name(self): + """GH-106669 regression test.""" + self.assertEqual( + utils.getaddresses( + [ + '"Bud, Person" ', + 'aperson@dom.ain (Al Person)', + '"Mariusz Felisiak" ', + ] + ), + [ + ('Bud, Person', 'bperson@dom.ain'), + ('Al Person', 'aperson@dom.ain'), + ('Mariusz Felisiak', 'to@example.com'), + ], + ) def test_getaddresses_nasty(self): eq = self.assertEqual eq(utils.getaddresses(['foo: ;']), [('', '')]) - eq(utils.getaddresses(['[]*-- =~$']), [('', '')]) + eq(utils.getaddresses( + ['[]*-- =~$']), + [('', ''), ('', ''), ('', '*--')]) eq(utils.getaddresses( ['foo: ;', '"Jason R. Mastaler" ']), [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')]) - eq(utils.getaddresses( - [r'Pete(A nice \) chap) ']), - [('Pete (A nice ) chap his account his host)', 'pete@silly.test')]) - eq(utils.getaddresses( - ['(Empty list)(start)Undisclosed recipients :(nobody(I know))']), - [('', '')]) - eq(utils.getaddresses( - ['Mary <@machine.tld:mary@example.net>, , jdoe@test . example']), - [('Mary', 'mary@example.net'), ('', ''), ('', 'jdoe@test.example')]) - eq(utils.getaddresses( - ['John Doe ']), - [('John Doe (comment)', 'jdoe@machine.example')]) - eq(utils.getaddresses( - ['"Mary Smith: Personal Account" ']), - [('Mary Smith: Personal Account', 'smith@home.example')]) - eq(utils.getaddresses( - ['Undisclosed recipients:;']), - [('', '')]) - eq(utils.getaddresses( - [r', "Giant; \"Big\" Box" ']), - [('', 'boss@nil.test'), ('Giant; "Big" Box', 'bob@example.net')]) def test_getaddresses_embedded_comment(self): """Test proper handling of a nested comment""" @@ -3770,6 +3712,16 @@ def test_bytes_header_parser(self): self.assertIsInstance(msg.get_payload(), str) self.assertIsInstance(msg.get_payload(decode=True), bytes) + def test_header_parser_multipart_is_valid(self): + # Don't flag valid multipart emails as having defects + with openfile('msg_47.txt', encoding="utf-8") as fp: + msgdata = fp.read() + + parser = email.parser.Parser(policy=email.policy.default) + parsed_msg = parser.parsestr(msgdata, headersonly=True) + + self.assertEqual(parsed_msg.defects, []) + def test_bytes_parser_does_not_close_file(self): with openfile('msg_02.txt', 'rb') as fp: email.parser.BytesParser().parse(fp) diff --git a/Lib/test/test_exception_group.py b/Lib/test/test_exception_group.py index 2658e027ff3e2b..a02d54da35e948 100644 --- a/Lib/test/test_exception_group.py +++ b/Lib/test/test_exception_group.py @@ -1,7 +1,7 @@ import collections.abc import types import unittest - +from test.support import C_RECURSION_LIMIT class TestExceptionGroupTypeHierarchy(unittest.TestCase): def test_exception_group_types(self): @@ -460,7 +460,7 @@ def test_basics_split_by_predicate__match(self): class DeepRecursionInSplitAndSubgroup(unittest.TestCase): def make_deep_eg(self): e = TypeError(1) - for i in range(2000): + for i in range(C_RECURSION_LIMIT + 1): e = ExceptionGroup('eg', [e]) return e diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index c4eca0f5b79511..50770f066a5e16 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -2474,6 +2474,74 @@ def _(arg): self.assertTrue(A.t('')) self.assertEqual(A.t(0.0), 0.0) + def test_slotted_class(self): + class Slot: + __slots__ = ('a', 'b') + @functools.singledispatchmethod + def go(self, item, arg): + pass + + @go.register + def _(self, item: int, arg): + return item + arg + + s = Slot() + self.assertEqual(s.go(1, 1), 2) + + def test_classmethod_slotted_class(self): + class Slot: + __slots__ = ('a', 'b') + @functools.singledispatchmethod + @classmethod + def go(cls, item, arg): + pass + + @go.register + @classmethod + def _(cls, item: int, arg): + return item + arg + + s = Slot() + self.assertEqual(s.go(1, 1), 2) + self.assertEqual(Slot.go(1, 1), 2) + + def test_staticmethod_slotted_class(self): + class A: + __slots__ = ['a'] + @functools.singledispatchmethod + @staticmethod + def t(arg): + return arg + @t.register(int) + @staticmethod + def _(arg): + return isinstance(arg, int) + @t.register(str) + @staticmethod + def _(arg): + return isinstance(arg, str) + a = A() + + self.assertTrue(A.t(0)) + self.assertTrue(A.t('')) + self.assertEqual(A.t(0.0), 0.0) + self.assertTrue(a.t(0)) + self.assertTrue(a.t('')) + self.assertEqual(a.t(0.0), 0.0) + + def test_assignment_behavior(self): + # see gh-106448 + class A: + @functools.singledispatchmethod + def t(arg): + return arg + + a = A() + a.t.foo = 'bar' + a2 = A() + with self.assertRaises(AttributeError): + a2.t.foo + def test_classmethod_register(self): class A: def __init__(self, arg): diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index ba0e5e8b0f6954..54378fced54699 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -7,7 +7,9 @@ test_tools.skip_if_missing('cases_generator') with test_tools.imports_under_tool('cases_generator'): import generate_cases - from parser import StackEffect + import analysis + import formatting + from parsing import StackEffect class TestEffects(unittest.TestCase): @@ -27,45 +29,28 @@ def test_effect_sizes(self): StackEffect("q", "", "", ""), StackEffect("r", "", "", ""), ] - self.assertEqual(generate_cases.effect_size(x), (1, "")) - self.assertEqual(generate_cases.effect_size(y), (0, "oparg")) - self.assertEqual(generate_cases.effect_size(z), (0, "oparg*2")) + self.assertEqual(formatting.effect_size(x), (1, "")) + self.assertEqual(formatting.effect_size(y), (0, "oparg")) + self.assertEqual(formatting.effect_size(z), (0, "oparg*2")) self.assertEqual( - generate_cases.list_effect_size(input_effects), + formatting.list_effect_size(input_effects), (1, "oparg + oparg*2"), ) self.assertEqual( - generate_cases.list_effect_size(output_effects), + formatting.list_effect_size(output_effects), (2, "oparg*4"), ) self.assertEqual( - generate_cases.list_effect_size(other_effects), + formatting.list_effect_size(other_effects), (2, "(oparg<<1)"), ) - self.assertEqual( - generate_cases.string_effect_size( - generate_cases.list_effect_size(input_effects), - ), "1 + oparg + oparg*2", - ) - self.assertEqual( - generate_cases.string_effect_size( - generate_cases.list_effect_size(output_effects), - ), - "2 + oparg*4", - ) - self.assertEqual( - generate_cases.string_effect_size( - generate_cases.list_effect_size(other_effects), - ), - "2 + (oparg<<1)", - ) - class TestGeneratedCases(unittest.TestCase): def setUp(self) -> None: super().setUp() + self.maxDiff = None self.temp_dir = tempfile.gettempdir() self.temp_input_filename = os.path.join(self.temp_dir, "input.txt") @@ -90,23 +75,17 @@ def tearDown(self) -> None: def run_cases_test(self, input: str, expected: str): with open(self.temp_input_filename, "w+") as temp_input: - temp_input.write(generate_cases.BEGIN_MARKER) + temp_input.write(analysis.BEGIN_MARKER) temp_input.write(input) - temp_input.write(generate_cases.END_MARKER) + temp_input.write(analysis.END_MARKER) temp_input.flush() - a = generate_cases.Analyzer( - [self.temp_input_filename], - self.temp_output_filename, - self.temp_metadata_filename, - self.temp_pymetadata_filename, - self.temp_executor_filename, - ) + a = generate_cases.Generator([self.temp_input_filename]) a.parse() a.analyze() if a.errors: raise RuntimeError(f"Found {a.errors} errors") - a.write_instructions() + a.write_instructions(self.temp_output_filename, False) with open(self.temp_output_filename) as temp_output: lines = temp_output.readlines() @@ -144,7 +123,8 @@ def test_inst_one_pop(self): """ output = """ TARGET(OP) { - PyObject *value = stack_pointer[-1]; + PyObject *value; + value = stack_pointer[-1]; spam(); STACK_SHRINK(1); DISPATCH(); @@ -177,8 +157,9 @@ def test_inst_one_push_one_pop(self): """ output = """ TARGET(OP) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; spam(); stack_pointer[-1] = res; DISPATCH(); @@ -194,9 +175,11 @@ def test_binary_op(self): """ output = """ TARGET(OP) { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; spam(); STACK_SHRINK(1); stack_pointer[-1] = res; @@ -213,9 +196,11 @@ def test_overlap(self): """ output = """ TARGET(OP) { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *result; + right = stack_pointer[-1]; + left = stack_pointer[-2]; spam(); stack_pointer[-1] = result; DISPATCH(); @@ -239,8 +224,9 @@ def test_predictions_and_eval_breaker(self): } TARGET(OP3) { - PyObject *arg = stack_pointer[-1]; + PyObject *arg; PyObject *res; + arg = stack_pointer[-1]; DEOPT_IF(xxx, OP1); stack_pointer[-1] = res; CHECK_EVAL_BREAKER(); @@ -285,9 +271,11 @@ def test_error_if_pop(self): """ output = """ TARGET(OP) { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; if (cond) goto pop_2_label; STACK_SHRINK(1); stack_pointer[-1] = res; @@ -303,7 +291,8 @@ def test_cache_effect(self): """ output = """ TARGET(OP) { - PyObject *value = stack_pointer[-1]; + PyObject *value; + value = stack_pointer[-1]; uint16_t counter = read_u16(&next_instr[0].cache); uint32_t extra = read_u32(&next_instr[1].cache); STACK_SHRINK(1); @@ -342,8 +331,10 @@ def test_macro_instruction(self): """ output = """ TARGET(OP1) { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; + right = stack_pointer[-1]; + left = stack_pointer[-2]; uint16_t counter = read_u16(&next_instr[0].cache); op1(left, right); next_instr += 1; @@ -351,38 +342,38 @@ def test_macro_instruction(self): } TARGET(OP) { - PyObject *_tmp_1 = stack_pointer[-1]; - PyObject *_tmp_2 = stack_pointer[-2]; - PyObject *_tmp_3 = stack_pointer[-3]; + static_assert(INLINE_CACHE_ENTRIES_OP == 5, "incorrect cache size"); + PyObject *right; + PyObject *left; + PyObject *arg2; + PyObject *res; + // OP1 + right = stack_pointer[-1]; + left = stack_pointer[-2]; { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; uint16_t counter = read_u16(&next_instr[0].cache); op1(left, right); - _tmp_2 = left; - _tmp_1 = right; } + // OP2 + arg2 = stack_pointer[-3]; { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; - PyObject *arg2 = _tmp_3; - PyObject *res; uint32_t extra = read_u32(&next_instr[3].cache); res = op2(arg2, left, right); - _tmp_3 = res; } - next_instr += 5; - static_assert(INLINE_CACHE_ENTRIES_OP == 5, "incorrect cache size"); STACK_SHRINK(2); - stack_pointer[-1] = _tmp_3; + stack_pointer[-1] = res; + next_instr += 5; DISPATCH(); } TARGET(OP3) { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; - PyObject *arg2 = stack_pointer[-3]; + PyObject *right; + PyObject *left; + PyObject *arg2; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; + arg2 = stack_pointer[-3]; res = op3(arg2, left, right); STACK_SHRINK(2); stack_pointer[-1] = res; @@ -400,9 +391,12 @@ def test_array_input(self): """ output = """ TARGET(OP) { - PyObject *above = stack_pointer[-1]; - PyObject **values = (stack_pointer - (1 + oparg*2)); - PyObject *below = stack_pointer[-(2 + oparg*2)]; + PyObject *above; + PyObject **values; + PyObject *below; + above = stack_pointer[-1]; + values = stack_pointer - 1 - oparg*2; + below = stack_pointer[-2 - oparg*2]; spam(); STACK_SHRINK(oparg*2); STACK_SHRINK(2); @@ -420,12 +414,13 @@ def test_array_output(self): output = """ TARGET(OP) { PyObject *below; - PyObject **values = stack_pointer - (2) + 1; + PyObject **values; PyObject *above; + values = stack_pointer - 1; spam(values, oparg); STACK_GROW(oparg*3); + stack_pointer[-2 - oparg*3] = below; stack_pointer[-1] = above; - stack_pointer[-(2 + oparg*3)] = below; DISPATCH(); } """ @@ -439,8 +434,9 @@ def test_array_input_output(self): """ output = """ TARGET(OP) { - PyObject **values = (stack_pointer - oparg); + PyObject **values; PyObject *above; + values = stack_pointer - oparg; spam(values, oparg); STACK_GROW(1); stack_pointer[-1] = above; @@ -457,8 +453,10 @@ def test_array_error_if(self): """ output = """ TARGET(OP) { - PyObject **values = (stack_pointer - oparg); - PyObject *extra = stack_pointer[-(1 + oparg)]; + PyObject **values; + PyObject *extra; + values = stack_pointer - oparg; + extra = stack_pointer[-1 - oparg]; if (oparg == 0) { STACK_SHRINK(oparg); goto pop_1_somewhere; } STACK_SHRINK(oparg); STACK_SHRINK(1); @@ -475,18 +473,21 @@ def test_cond_effect(self): """ output = """ TARGET(OP) { - PyObject *cc = stack_pointer[-1]; - PyObject *input = ((oparg & 1) == 1) ? stack_pointer[-(1 + (((oparg & 1) == 1) ? 1 : 0))] : NULL; - PyObject *aa = stack_pointer[-(2 + (((oparg & 1) == 1) ? 1 : 0))]; + PyObject *cc; + PyObject *input = NULL; + PyObject *aa; PyObject *xx; PyObject *output = NULL; PyObject *zz; + cc = stack_pointer[-1]; + if ((oparg & 1) == 1) { input = stack_pointer[-1 - ((oparg & 1) == 1 ? 1 : 0)]; } + aa = stack_pointer[-2 - ((oparg & 1) == 1 ? 1 : 0)]; output = spam(oparg, input); STACK_SHRINK((((oparg & 1) == 1) ? 1 : 0)); STACK_GROW(((oparg & 2) ? 1 : 0)); + stack_pointer[-2 - (oparg & 2 ? 1 : 0)] = xx; + if (oparg & 2) { stack_pointer[-1 - (oparg & 2 ? 1 : 0)] = output; } stack_pointer[-1] = zz; - if (oparg & 2) { stack_pointer[-(1 + ((oparg & 2) ? 1 : 0))] = output; } - stack_pointer[-(2 + ((oparg & 2) ? 1 : 0))] = xx; DISPATCH(); } """ @@ -504,29 +505,28 @@ def test_macro_cond_effect(self): """ output = """ TARGET(M) { - PyObject *_tmp_1 = stack_pointer[-1]; - PyObject *_tmp_2 = stack_pointer[-2]; - PyObject *_tmp_3 = stack_pointer[-3]; + PyObject *right; + PyObject *middle; + PyObject *left; + PyObject *deep; + PyObject *extra = NULL; + PyObject *res; + // A + right = stack_pointer[-1]; + middle = stack_pointer[-2]; + left = stack_pointer[-3]; { - PyObject *right = _tmp_1; - PyObject *middle = _tmp_2; - PyObject *left = _tmp_3; # Body of A } + // B { - PyObject *deep; - PyObject *extra = NULL; - PyObject *res; # Body of B - _tmp_3 = deep; - if (oparg) { _tmp_2 = extra; } - _tmp_1 = res; } STACK_SHRINK(1); STACK_GROW((oparg ? 1 : 0)); - stack_pointer[-1] = _tmp_1; - if (oparg) { stack_pointer[-2] = _tmp_2; } - stack_pointer[-3] = _tmp_3; + stack_pointer[-2 - (oparg ? 1 : 0)] = deep; + if (oparg) { stack_pointer[-1 - (oparg ? 1 : 0)] = extra; } + stack_pointer[-1] = res; DISPATCH(); } """ diff --git a/Lib/test/test_gettext.py b/Lib/test/test_gettext.py index 1608d1b18e98fb..8430fc234d00ee 100644 --- a/Lib/test/test_gettext.py +++ b/Lib/test/test_gettext.py @@ -320,6 +320,8 @@ def test_plural_forms1(self): eq(x, 'Hay %s fichero') x = gettext.ngettext('There is %s file', 'There are %s files', 2) eq(x, 'Hay %s ficheros') + x = gettext.gettext('There is %s file') + eq(x, 'Hay %s fichero') def test_plural_context_forms1(self): eq = self.assertEqual @@ -329,6 +331,8 @@ def test_plural_context_forms1(self): x = gettext.npgettext('With context', 'There is %s file', 'There are %s files', 2) eq(x, 'Hay %s ficheros (context)') + x = gettext.pgettext('With context', 'There is %s file') + eq(x, 'Hay %s fichero (context)') def test_plural_forms2(self): eq = self.assertEqual @@ -338,6 +342,8 @@ def test_plural_forms2(self): eq(x, 'Hay %s fichero') x = t.ngettext('There is %s file', 'There are %s files', 2) eq(x, 'Hay %s ficheros') + x = t.gettext('There is %s file') + eq(x, 'Hay %s fichero') def test_plural_context_forms2(self): eq = self.assertEqual @@ -349,6 +355,8 @@ def test_plural_context_forms2(self): x = t.npgettext('With context', 'There is %s file', 'There are %s files', 2) eq(x, 'Hay %s ficheros (context)') + x = gettext.pgettext('With context', 'There is %s file') + eq(x, 'Hay %s fichero (context)') # Examples from http://www.gnu.org/software/gettext/manual/gettext.html diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index ec8ccf0bd78d37..051711bfd1fe24 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -97,7 +97,6 @@ def require_frozen(module, *, skip=True): def require_pure_python(module, *, skip=False): _require_loader(module, SourceFileLoader, skip) - def remove_files(name): for f in (name + ".py", name + ".pyc", @@ -147,19 +146,35 @@ def _ready_to_import(name=None, source=""): del sys.modules[name] -def requires_subinterpreters(meth): - """Decorator to skip a test if subinterpreters are not supported.""" - return unittest.skipIf(_interpreters is None, - 'subinterpreters required')(meth) +if _testsinglephase is not None: + def restore__testsinglephase(*, _orig=_testsinglephase): + # We started with the module imported and want to restore + # it to its nominal state. + sys.modules.pop('_testsinglephase', None) + _orig._clear_globals() + _testinternalcapi.clear_extension('_testsinglephase', _orig.__file__) + import _testsinglephase def requires_singlephase_init(meth): """Decorator to skip if single-phase init modules are not supported.""" + if not isinstance(meth, type): + def meth(self, _meth=meth): + try: + return _meth(self) + finally: + restore__testsinglephase() meth = cpython_only(meth) return unittest.skipIf(_testsinglephase is None, 'test requires _testsinglephase module')(meth) +def requires_subinterpreters(meth): + """Decorator to skip a test if subinterpreters are not supported.""" + return unittest.skipIf(_interpreters is None, + 'subinterpreters required')(meth) + + class ModuleSnapshot(types.SimpleNamespace): """A representation of a module for testing. @@ -1962,6 +1977,20 @@ def test_isolated_config(self): with self.subTest(f'{module}: strict, fresh'): self.check_compatible_fresh(module, strict=True, isolated=True) + @requires_subinterpreters + @requires_singlephase_init + def test_disallowed_reimport(self): + # See https://github.com/python/cpython/issues/104621. + script = textwrap.dedent(''' + import _testsinglephase + print(_testsinglephase) + ''') + interpid = _interpreters.create() + with self.assertRaises(_interpreters.RunFailedError): + _interpreters.run_string(interpid, script) + with self.assertRaises(_interpreters.RunFailedError): + _interpreters.run_string(interpid, script) + class TestSinglePhaseSnapshot(ModuleSnapshot): @@ -2017,6 +2046,10 @@ def setUpClass(cls): # Start fresh. cls.clean_up() + @classmethod + def tearDownClass(cls): + restore__testsinglephase() + def tearDown(self): # Clean up the module. self.clean_up() @@ -2093,7 +2126,7 @@ def clean_up(): _interpreters.run_string(interpid, textwrap.dedent(f''' name = {self.NAME!r} if name in sys.modules: - sys.modules[name]._clear_globals() + sys.modules.pop(name)._clear_globals() _testinternalcapi.clear_extension(name, {self.FILE!r}) ''')) _interpreters.destroy(interpid) @@ -2522,6 +2555,12 @@ def test_basic_multiple_interpreters_main_no_reset(self): def test_basic_multiple_interpreters_deleted_no_reset(self): # without resetting; already loaded in a deleted interpreter + if hasattr(sys, 'getobjects'): + # It's a Py_TRACE_REFS build. + # This test breaks interpreter isolation a little, + # which causes problems on Py_TRACE_REF builds. + raise unittest.SkipTest('crashes on Py_TRACE_REFS builds') + # At this point: # * alive in 0 interpreters # * module def may or may not be loaded already diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py index 33a593f3591d68..07c48eac5b48b0 100644 --- a/Lib/test/test_inspect.py +++ b/Lib/test/test_inspect.py @@ -13,14 +13,18 @@ import _pickle import pickle import shutil +import stat import sys +import time import types +import tempfile import textwrap import unicodedata import unittest import unittest.mock import warnings + try: from concurrent.futures import ThreadPoolExecutor except ImportError: @@ -135,6 +139,14 @@ def gen_coroutine_function_example(self): yield return 'spam' +def meth_noargs(): pass +def meth_o(object, /): pass +def meth_self_noargs(self, /): pass +def meth_self_o(self, object, /): pass +def meth_type_noargs(type, /): pass +def meth_type_o(type, object, /): pass + + class TestPredicates(IsTestBase): def test_excluding_predicates(self): @@ -963,6 +975,36 @@ def test_nested_class_definition_inside_function(self): self.assertSourceEqual(mod2.cls213, 218, 222) self.assertSourceEqual(mod2.cls213().func219(), 220, 221) + def test_class_with_method_from_other_module(self): + with tempfile.TemporaryDirectory() as tempdir: + with open(os.path.join(tempdir, 'inspect_actual%spy' % os.extsep), + 'w', encoding='utf-8') as f: + f.write(textwrap.dedent(""" + import inspect_other + class A: + def f(self): + pass + class A: + def f(self): + pass # correct one + A.f = inspect_other.A.f + """)) + + with open(os.path.join(tempdir, 'inspect_other%spy' % os.extsep), + 'w', encoding='utf-8') as f: + f.write(textwrap.dedent(""" + class A: + def f(self): + pass + """)) + + with DirsOnSysPath(tempdir): + import inspect_actual + self.assertIn("correct", inspect.getsource(inspect_actual.A)) + # Remove the module from sys.modules to force it to be reloaded. + # This is necessary when the test is run multiple times. + sys.modules.pop("inspect_actual") + @unittest.skipIf( support.is_emscripten or support.is_wasi, "socket.accept is broken" @@ -1142,6 +1184,39 @@ def test_getfullargspec_builtin_func_no_signature(self): with self.assertRaises(TypeError): inspect.getfullargspec(builtin) + cls = _testcapi.DocStringNoSignatureTest + obj = _testcapi.DocStringNoSignatureTest() + for builtin, template in [ + (_testcapi.docstring_no_signature_noargs, meth_noargs), + (_testcapi.docstring_no_signature_o, meth_o), + (cls.meth_noargs, meth_self_noargs), + (cls.meth_o, meth_self_o), + (obj.meth_noargs, meth_self_noargs), + (obj.meth_o, meth_self_o), + (cls.meth_noargs_class, meth_type_noargs), + (cls.meth_o_class, meth_type_o), + (cls.meth_noargs_static, meth_noargs), + (cls.meth_o_static, meth_o), + (cls.meth_noargs_coexist, meth_self_noargs), + (cls.meth_o_coexist, meth_self_o), + + (time.time, meth_noargs), + (stat.S_IMODE, meth_o), + (str.lower, meth_self_noargs), + (''.lower, meth_self_noargs), + (set.add, meth_self_o), + (set().add, meth_self_o), + (set.__contains__, meth_self_o), + (set().__contains__, meth_self_o), + (datetime.datetime.__dict__['utcnow'], meth_type_noargs), + (datetime.datetime.utcnow, meth_type_noargs), + (dict.__dict__['__class_getitem__'], meth_type_o), + (dict.__class_getitem__, meth_type_o), + ]: + with self.subTest(builtin): + self.assertEqual(inspect.getfullargspec(builtin), + inspect.getfullargspec(template)) + def test_getfullargspec_definition_order_preserved_on_kwonly(self): for fn in signatures_with_lexicographic_keyword_only_parameters(): signature = inspect.getfullargspec(fn) @@ -2857,6 +2932,39 @@ def test_signature_on_builtins_no_signature(self): 'no signature found for builtin'): inspect.signature(str) + cls = _testcapi.DocStringNoSignatureTest + obj = _testcapi.DocStringNoSignatureTest() + for builtin, template in [ + (_testcapi.docstring_no_signature_noargs, meth_noargs), + (_testcapi.docstring_no_signature_o, meth_o), + (cls.meth_noargs, meth_self_noargs), + (cls.meth_o, meth_self_o), + (obj.meth_noargs, meth_noargs), + (obj.meth_o, meth_o), + (cls.meth_noargs_class, meth_noargs), + (cls.meth_o_class, meth_o), + (cls.meth_noargs_static, meth_noargs), + (cls.meth_o_static, meth_o), + (cls.meth_noargs_coexist, meth_self_noargs), + (cls.meth_o_coexist, meth_self_o), + + (time.time, meth_noargs), + (stat.S_IMODE, meth_o), + (str.lower, meth_self_noargs), + (''.lower, meth_noargs), + (set.add, meth_self_o), + (set().add, meth_o), + (set.__contains__, meth_self_o), + (set().__contains__, meth_o), + (datetime.datetime.__dict__['utcnow'], meth_type_noargs), + (datetime.datetime.utcnow, meth_noargs), + (dict.__dict__['__class_getitem__'], meth_type_o), + (dict.__class_getitem__, meth_o), + ]: + with self.subTest(builtin): + self.assertEqual(inspect.signature(builtin), + inspect.signature(template)) + def test_signature_on_non_function(self): with self.assertRaisesRegex(TypeError, 'is not a callable object'): inspect.signature(42) diff --git a/Lib/test/test_interpreters.py b/Lib/test/test_interpreters.py index d1bebe47158322..5981d96de8de06 100644 --- a/Lib/test/test_interpreters.py +++ b/Lib/test/test_interpreters.py @@ -7,6 +7,7 @@ from test import support from test.support import import_helper +from test.support import threading_helper _interpreters = import_helper.import_module('_xxsubinterpreters') _channels = import_helper.import_module('_xxinterpchannels') from test.support import interpreters @@ -463,6 +464,27 @@ def test_bytes_for_script(self): # test_xxsubinterpreters covers the remaining Interpreter.run() behavior. +class StressTests(TestBase): + + # In these tests we generally want a lot of interpreters, + # but not so many that any test takes too long. + + def test_create_many_sequential(self): + alive = [] + for _ in range(100): + interp = interpreters.create() + alive.append(interp) + + def test_create_many_threaded(self): + alive = [] + def task(): + interp = interpreters.create() + alive.append(interp) + threads = (threading.Thread(target=task) for _ in range(200)) + with threading_helper.start_threads(threads): + pass + + class TestIsShareable(TestBase): def test_default_shareables(self): diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py index a5388b2e5debd8..6f204948c9fc48 100644 --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -1321,6 +1321,17 @@ def testGetIp(self): self.assertEqual(str(self.ipv6_scoped_interface.ip), '2001:658:22a:cafe:200::1') + def testIPv6IPv4MappedStringRepresentation(self): + long_prefix = '0000:0000:0000:0000:0000:ffff:' + short_prefix = '::ffff:' + ipv4 = '1.2.3.4' + ipv6_ipv4_str = short_prefix + ipv4 + ipv6_ipv4_addr = ipaddress.IPv6Address(ipv6_ipv4_str) + ipv6_ipv4_iface = ipaddress.IPv6Interface(ipv6_ipv4_str) + self.assertEqual(str(ipv6_ipv4_addr), ipv6_ipv4_str) + self.assertEqual(ipv6_ipv4_addr.exploded, long_prefix + ipv4) + self.assertEqual(str(ipv6_ipv4_iface.ip), ipv6_ipv4_str) + def testGetScopeId(self): self.assertEqual(self.ipv6_address.scope_id, None) diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 18258c22874ae0..def976fbe96ba3 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -2079,17 +2079,17 @@ def test_output(self): # The log message sent to the SysLogHandler is properly received. logger = logging.getLogger("slh") logger.error("sp\xe4m") - self.handled.wait() + self.handled.wait(support.LONG_TIMEOUT) self.assertEqual(self.log_output, b'<11>sp\xc3\xa4m\x00') self.handled.clear() self.sl_hdlr.append_nul = False logger.error("sp\xe4m") - self.handled.wait() + self.handled.wait(support.LONG_TIMEOUT) self.assertEqual(self.log_output, b'<11>sp\xc3\xa4m') self.handled.clear() self.sl_hdlr.ident = "h\xe4m-" logger.error("sp\xe4m") - self.handled.wait() + self.handled.wait(support.LONG_TIMEOUT) self.assertEqual(self.log_output, b'<11>h\xc3\xa4m-sp\xc3\xa4m') def test_udp_reconnection(self): @@ -2097,7 +2097,7 @@ def test_udp_reconnection(self): self.sl_hdlr.close() self.handled.clear() logger.error("sp\xe4m") - self.handled.wait(0.1) + self.handled.wait(support.LONG_TIMEOUT) self.assertEqual(self.log_output, b'<11>sp\xc3\xa4m\x00') @unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required") diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py index f854c582846660..845185be737eb2 100644 --- a/Lib/test/test_monitoring.py +++ b/Lib/test/test_monitoring.py @@ -8,7 +8,7 @@ import textwrap import types import unittest - +import asyncio PAIR = (0,1) @@ -136,20 +136,27 @@ def test_c_return_count(self): E = sys.monitoring.events -SIMPLE_EVENTS = [ +INSTRUMENTED_EVENTS = [ (E.PY_START, "start"), (E.PY_RESUME, "resume"), (E.PY_RETURN, "return"), (E.PY_YIELD, "yield"), (E.JUMP, "jump"), (E.BRANCH, "branch"), +] + +EXCEPT_EVENTS = [ (E.RAISE, "raise"), (E.PY_UNWIND, "unwind"), (E.EXCEPTION_HANDLED, "exception_handled"), +] + +SIMPLE_EVENTS = INSTRUMENTED_EVENTS + EXCEPT_EVENTS + [ (E.C_RAISE, "c_raise"), (E.C_RETURN, "c_return"), ] + SIMPLE_EVENT_SET = functools.reduce(operator.or_, [ev for (ev, _) in SIMPLE_EVENTS], 0) | E.CALL @@ -243,7 +250,6 @@ def check_events(self, func, expected=None): expected = func.events self.assertEqual(events, expected) - class MonitoringEventsTest(MonitoringEventsBase, unittest.TestCase): def test_just_pass(self): @@ -619,6 +625,49 @@ def func2(): self.check_lines(func2, [1,2,3,4,5,6]) +class TestDisable(MonitoringTestBase, unittest.TestCase): + + def gen(self, cond): + for i in range(10): + if cond: + yield 1 + else: + yield 2 + + def raise_handle_reraise(self): + try: + 1/0 + except: + raise + + def test_disable_legal_events(self): + for event, name in INSTRUMENTED_EVENTS: + try: + counter = CounterWithDisable() + counter.disable = True + sys.monitoring.register_callback(TEST_TOOL, event, counter) + sys.monitoring.set_events(TEST_TOOL, event) + for _ in self.gen(1): + pass + self.assertLess(counter.count, 4) + finally: + sys.monitoring.set_events(TEST_TOOL, 0) + sys.monitoring.register_callback(TEST_TOOL, event, None) + + + def test_disable_illegal_events(self): + for event, name in EXCEPT_EVENTS: + try: + counter = CounterWithDisable() + counter.disable = True + sys.monitoring.register_callback(TEST_TOOL, event, counter) + sys.monitoring.set_events(TEST_TOOL, event) + with self.assertRaises(ValueError): + self.raise_handle_reraise() + finally: + sys.monitoring.set_events(TEST_TOOL, 0) + sys.monitoring.register_callback(TEST_TOOL, event, None) + class ExceptionRecorder: @@ -632,7 +681,7 @@ def __call__(self, code, offset, exc): class CheckEvents(MonitoringTestBase, unittest.TestCase): - def check_events(self, func, expected, tool=TEST_TOOL, recorders=(ExceptionRecorder,)): + def get_events(self, func, tool, recorders): try: self.assertEqual(sys.monitoring._all_events(), {}) event_list = [] @@ -646,19 +695,70 @@ def check_events(self, func, expected, tool=TEST_TOOL, recorders=(ExceptionRecor sys.monitoring.set_events(tool, 0) for recorder in recorders: sys.monitoring.register_callback(tool, recorder.event_type, None) - self.assertEqual(event_list, expected) + return event_list finally: sys.monitoring.set_events(tool, 0) for recorder in recorders: sys.monitoring.register_callback(tool, recorder.event_type, None) + def check_events(self, func, expected, tool=TEST_TOOL, recorders=(ExceptionRecorder,)): + events = self.get_events(func, tool, recorders) + if events != expected: + print(events, file = sys.stderr) + self.assertEqual(events, expected) + + def check_balanced(self, func, recorders): + events = self.get_events(func, TEST_TOOL, recorders) + self.assertEqual(len(events)%2, 0) + for r, h in zip(events[::2],events[1::2]): + r0 = r[0] + self.assertIn(r0, ("raise", "reraise")) + h0 = h[0] + self.assertIn(h0, ("handled", "unwind")) + self.assertEqual(r[1], h[1]) + + class StopiterationRecorder(ExceptionRecorder): event_type = E.STOP_ITERATION -class ExceptionMontoringTest(CheckEvents): +class ReraiseRecorder(ExceptionRecorder): + + event_type = E.RERAISE + + def __call__(self, code, offset, exc): + self.events.append(("reraise", type(exc))) + +class UnwindRecorder(ExceptionRecorder): + + event_type = E.PY_UNWIND + + def __call__(self, code, offset, exc): + self.events.append(("unwind", type(exc))) + +class ExceptionHandledRecorder(ExceptionRecorder): + + event_type = E.EXCEPTION_HANDLED + + def __call__(self, code, offset, exc): + self.events.append(("handled", type(exc))) + +class ThrowRecorder(ExceptionRecorder): + + event_type = E.PY_THROW + + def __call__(self, code, offset, exc): + self.events.append(("throw", type(exc))) + +class ExceptionMonitoringTest(CheckEvents): - recorder = ExceptionRecorder + + exception_recorders = ( + ExceptionRecorder, + ReraiseRecorder, + ExceptionHandledRecorder, + UnwindRecorder + ) def test_simple_try_except(self): @@ -672,6 +772,8 @@ def func1(): self.check_events(func1, [("raise", KeyError)]) + def test_implicit_stop_iteration(self): + def gen(): yield 1 return 2 @@ -682,6 +784,142 @@ def implicit_stop_iteration(): self.check_events(implicit_stop_iteration, [("raise", StopIteration)], recorders=(StopiterationRecorder,)) + initial = [ + ("raise", ZeroDivisionError), + ("handled", ZeroDivisionError) + ] + + reraise = [ + ("reraise", ZeroDivisionError), + ("handled", ZeroDivisionError) + ] + + def test_explicit_reraise(self): + + def func(): + try: + try: + 1/0 + except: + raise + except: + pass + + self.check_balanced( + func, + recorders = self.exception_recorders) + + def test_explicit_reraise_named(self): + + def func(): + try: + try: + 1/0 + except Exception as ex: + raise + except: + pass + + self.check_balanced( + func, + recorders = self.exception_recorders) + + def test_implicit_reraise(self): + + def func(): + try: + try: + 1/0 + except ValueError: + pass + except: + pass + + self.check_balanced( + func, + recorders = self.exception_recorders) + + + def test_implicit_reraise_named(self): + + def func(): + try: + try: + 1/0 + except ValueError as ex: + pass + except: + pass + + self.check_balanced( + func, + recorders = self.exception_recorders) + + def test_try_finally(self): + + def func(): + try: + try: + 1/0 + finally: + pass + except: + pass + + self.check_balanced( + func, + recorders = self.exception_recorders) + + def test_async_for(self): + + def func(): + + async def async_generator(): + for i in range(1): + raise ZeroDivisionError + yield i + + async def async_loop(): + try: + async for item in async_generator(): + pass + except Exception: + pass + + try: + async_loop().send(None) + except StopIteration: + pass + + self.check_balanced( + func, + recorders = self.exception_recorders) + + def test_throw(self): + + def gen(): + yield 1 + yield 2 + + def func(): + try: + g = gen() + next(g) + g.throw(IndexError) + except IndexError: + pass + + self.check_balanced( + func, + recorders = self.exception_recorders) + + events = self.get_events( + func, + TEST_TOOL, + self.exception_recorders + (ThrowRecorder,) + ) + self.assertEqual(events[0], ("throw", IndexError)) + class LineRecorder: event_type = E.LINE @@ -733,12 +971,12 @@ def func1(): line3 = 3 self.check_events(func1, recorders = MANY_RECORDERS, expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('call', 'func1', sys.monitoring.MISSING), ('line', 'func1', 1), ('line', 'func1', 2), ('line', 'func1', 3), - ('line', 'check_events', 11), + ('line', 'get_events', 11), ('call', 'set_events', 2)]) def test_c_call(self): @@ -749,14 +987,14 @@ def func2(): line3 = 3 self.check_events(func2, recorders = MANY_RECORDERS, expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('call', 'func2', sys.monitoring.MISSING), ('line', 'func2', 1), ('line', 'func2', 2), ('call', 'append', [2]), ('C return', 'append', [2]), ('line', 'func2', 3), - ('line', 'check_events', 11), + ('line', 'get_events', 11), ('call', 'set_events', 2)]) def test_try_except(self): @@ -770,7 +1008,7 @@ def func3(): line = 6 self.check_events(func3, recorders = MANY_RECORDERS, expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('call', 'func3', sys.monitoring.MISSING), ('line', 'func3', 1), ('line', 'func3', 2), @@ -779,7 +1017,7 @@ def func3(): ('line', 'func3', 4), ('line', 'func3', 5), ('line', 'func3', 6), - ('line', 'check_events', 11), + ('line', 'get_events', 11), ('call', 'set_events', 2)]) class InstructionRecorder: @@ -791,7 +1029,7 @@ def __init__(self, events): def __call__(self, code, offset): # Filter out instructions in check_events to lower noise - if code.co_name != "check_events": + if code.co_name != "get_events": self.events.append(("instruction", code.co_name, offset)) @@ -808,7 +1046,7 @@ def func1(): line3 = 3 self.check_events(func1, recorders = LINE_AND_INSTRUCTION_RECORDERS, expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('line', 'func1', 1), ('instruction', 'func1', 2), ('instruction', 'func1', 4), @@ -819,7 +1057,7 @@ def func1(): ('instruction', 'func1', 10), ('instruction', 'func1', 12), ('instruction', 'func1', 14), - ('line', 'check_events', 11)]) + ('line', 'get_events', 11)]) def test_c_call(self): @@ -829,7 +1067,7 @@ def func2(): line3 = 3 self.check_events(func2, recorders = LINE_AND_INSTRUCTION_RECORDERS, expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('line', 'func2', 1), ('instruction', 'func2', 2), ('instruction', 'func2', 4), @@ -843,7 +1081,7 @@ def func2(): ('instruction', 'func2', 40), ('instruction', 'func2', 42), ('instruction', 'func2', 44), - ('line', 'check_events', 11)]) + ('line', 'get_events', 11)]) def test_try_except(self): @@ -856,7 +1094,7 @@ def func3(): line = 6 self.check_events(func3, recorders = LINE_AND_INSTRUCTION_RECORDERS, expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('line', 'func3', 1), ('instruction', 'func3', 2), ('line', 'func3', 2), @@ -876,7 +1114,7 @@ def func3(): ('instruction', 'func3', 30), ('instruction', 'func3', 32), ('instruction', 'func3', 34), - ('line', 'check_events', 11)]) + ('line', 'get_events', 11)]) def test_with_restart(self): def func1(): @@ -885,7 +1123,7 @@ def func1(): line3 = 3 self.check_events(func1, recorders = LINE_AND_INSTRUCTION_RECORDERS, expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('line', 'func1', 1), ('instruction', 'func1', 2), ('instruction', 'func1', 4), @@ -896,12 +1134,12 @@ def func1(): ('instruction', 'func1', 10), ('instruction', 'func1', 12), ('instruction', 'func1', 14), - ('line', 'check_events', 11)]) + ('line', 'get_events', 11)]) sys.monitoring.restart_events() self.check_events(func1, recorders = LINE_AND_INSTRUCTION_RECORDERS, expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('line', 'func1', 1), ('instruction', 'func1', 2), ('instruction', 'func1', 4), @@ -912,7 +1150,7 @@ def func1(): ('instruction', 'func1', 10), ('instruction', 'func1', 12), ('instruction', 'func1', 14), - ('line', 'check_events', 11)]) + ('line', 'get_events', 11)]) class TestInstallIncrementallly(MonitoringTestBase, unittest.TestCase): @@ -1114,7 +1352,7 @@ def func(): ('branch', 'func', 2, 2)]) self.check_events(func, recorders = JUMP_BRANCH_AND_LINE_RECORDERS, expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('line', 'func', 1), ('line', 'func', 2), ('branch', 'func', 2, 2), @@ -1130,7 +1368,7 @@ def func(): ('jump', 'func', 4, 2), ('line', 'func', 2), ('branch', 'func', 2, 2), - ('line', 'check_events', 11)]) + ('line', 'get_events', 11)]) def test_except_star(self): @@ -1149,7 +1387,7 @@ def func(): self.check_events(func, recorders = JUMP_BRANCH_AND_LINE_RECORDERS, expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('line', 'func', 1), ('line', 'func', 2), ('line', 'func', 3), @@ -1160,10 +1398,10 @@ def func(): ('jump', 'func', 5, 5), ('jump', 'func', 5, '[offset=112]'), ('branch', 'func', '[offset=118]', '[offset=120]'), - ('line', 'check_events', 11)]) + ('line', 'get_events', 11)]) self.check_events(func, recorders = FLOW_AND_LINE_RECORDERS, expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('line', 'func', 1), ('line', 'func', 2), ('line', 'func', 3), @@ -1177,7 +1415,7 @@ def func(): ('jump', 'func', 5, '[offset=112]'), ('branch', 'func', '[offset=118]', '[offset=120]'), ('return', None), - ('line', 'check_events', 11)]) + ('line', 'get_events', 11)]) class TestLoadSuperAttr(CheckEvents): RECORDERS = CallRecorder, LineRecorder, CRaiseRecorder, CReturnRecorder @@ -1229,7 +1467,7 @@ def f(): """ d = self._exec_super(codestr, optimized) expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('call', 'f', sys.monitoring.MISSING), ('line', 'f', 1), ('call', 'method', d["b"]), @@ -1242,7 +1480,7 @@ def f(): ('call', 'method', 1), ('line', 'method', 1), ('line', 'method', 1), - ('line', 'check_events', 11), + ('line', 'get_events', 11), ('call', 'set_events', 2), ] return d["f"], expected @@ -1280,7 +1518,7 @@ def f(): """ d = self._exec_super(codestr, optimized) expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('call', 'f', sys.monitoring.MISSING), ('line', 'f', 1), ('line', 'f', 2), @@ -1293,7 +1531,7 @@ def f(): ('C raise', 'super', 1), ('line', 'f', 3), ('line', 'f', 4), - ('line', 'check_events', 11), + ('line', 'get_events', 11), ('call', 'set_events', 2), ] return d["f"], expected @@ -1321,7 +1559,7 @@ def f(): """ d = self._exec_super(codestr, optimized) expected = [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('call', 'f', sys.monitoring.MISSING), ('line', 'f', 1), ('call', 'method', d["b"]), @@ -1330,7 +1568,7 @@ def f(): ('C return', 'super', sys.monitoring.MISSING), ('line', 'method', 2), ('line', 'method', 1), - ('line', 'check_events', 11), + ('line', 'get_events', 11), ('call', 'set_events', 2) ] return d["f"], expected @@ -1355,7 +1593,7 @@ def f(): def get_expected(name, call_method, ns): repr_arg = 0 if name == "int" else sys.monitoring.MISSING return [ - ('line', 'check_events', 10), + ('line', 'get_events', 10), ('call', 'f', sys.monitoring.MISSING), ('line', 'f', 1), ('call', 'method', ns["c"]), @@ -1368,7 +1606,7 @@ def get_expected(name, call_method, ns): ('C return', '__repr__', repr_arg), ] if call_method else [] ), - ('line', 'check_events', 11), + ('line', 'get_events', 11), ('call', 'set_events', 2), ] diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py index 538d758624c9d6..78e1cb582512b0 100644 --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -1036,6 +1036,7 @@ def test_path_normcase(self): self._check_function(self.path.normcase) if sys.platform == 'win32': self.assertEqual(ntpath.normcase('\u03a9\u2126'), 'ωΩ') + self.assertEqual(ntpath.normcase('abc\x00def'), 'abc\x00def') def test_path_isabs(self): self._check_function(self.path.isabs) diff --git a/Lib/test/test_opcache.py b/Lib/test/test_opcache.py index 564dc4745ae64e..1baa10cbdfdef2 100644 --- a/Lib/test/test_opcache.py +++ b/Lib/test/test_opcache.py @@ -1,8 +1,11 @@ +import copy +import pickle import dis import threading import types import unittest from test.support import threading_helper +import _testinternalcapi class TestLoadSuperAttrCache(unittest.TestCase): @@ -865,8 +868,10 @@ class C: items = [] for _ in range(self.ITEMS): item = C() - item.__dict__ item.a = None + # Resize into a combined unicode dict: + for i in range(29): + setattr(item, f"_{i}", None) items.append(item) return items @@ -932,7 +937,9 @@ class C: items = [] for _ in range(self.ITEMS): item = C() - item.__dict__ + # Resize into a combined unicode dict: + for i in range(29): + setattr(item, f"_{i}", None) items.append(item) return items @@ -993,6 +1000,124 @@ def write(items): opname = "UNPACK_SEQUENCE_LIST" self.assert_races_do_not_crash(opname, get_items, read, write) +class C: + pass + +class TestInstanceDict(unittest.TestCase): + + def setUp(self): + c = C() + c.a, c.b, c.c = 0,0,0 + + def test_values_on_instance(self): + c = C() + c.a = 1 + C().b = 2 + c.c = 3 + self.assertEqual( + _testinternalcapi.get_object_dict_values(c), + (1, '', 3) + ) + + def test_dict_materialization(self): + c = C() + c.a = 1 + c.b = 2 + c.__dict__ + self.assertIs( + _testinternalcapi.get_object_dict_values(c), + None + ) + + def test_dict_dematerialization(self): + c = C() + c.a = 1 + c.b = 2 + c.__dict__ + self.assertIs( + _testinternalcapi.get_object_dict_values(c), + None + ) + for _ in range(100): + c.a + self.assertEqual( + _testinternalcapi.get_object_dict_values(c), + (1, 2, '') + ) + + def test_dict_dematerialization_multiple_refs(self): + c = C() + c.a = 1 + c.b = 2 + d = c.__dict__ + for _ in range(100): + c.a + self.assertIs( + _testinternalcapi.get_object_dict_values(c), + None + ) + self.assertIs(c.__dict__, d) + + def test_dict_dematerialization_copy(self): + c = C() + c.a = 1 + c.b = 2 + c2 = copy.copy(c) + for _ in range(100): + c.a + c2.a + self.assertEqual( + _testinternalcapi.get_object_dict_values(c), + (1, 2, '') + ) + self.assertEqual( + _testinternalcapi.get_object_dict_values(c2), + (1, 2, '') + ) + c3 = copy.deepcopy(c) + for _ in range(100): + c.a + c3.a + self.assertEqual( + _testinternalcapi.get_object_dict_values(c), + (1, 2, '') + ) + #NOTE -- c3.__dict__ does not de-materialize + + def test_dict_dematerialization_pickle(self): + c = C() + c.a = 1 + c.b = 2 + c2 = pickle.loads(pickle.dumps(c)) + for _ in range(100): + c.a + c2.a + self.assertEqual( + _testinternalcapi.get_object_dict_values(c), + (1, 2, '') + ) + self.assertEqual( + _testinternalcapi.get_object_dict_values(c2), + (1, 2, '') + ) + + def test_dict_dematerialization_subclass(self): + class D(dict): pass + c = C() + c.a = 1 + c.b = 2 + c.__dict__ = D(c.__dict__) + for _ in range(100): + c.a + self.assertIs( + _testinternalcapi.get_object_dict_values(c), + None + ) + self.assertEqual( + c.__dict__, + {'a':1, 'b':2} + ) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py index 6bbbf5247466e4..e9c9e2b93c3d3f 100644 --- a/Lib/test/test_pathlib.py +++ b/Lib/test/test_pathlib.py @@ -693,8 +693,14 @@ def test_relative_to_common(self): self.assertRaises(ValueError, p.relative_to, P('a/b/c')) self.assertRaises(ValueError, p.relative_to, P('a/c')) self.assertRaises(ValueError, p.relative_to, P('/a')) + self.assertRaises(ValueError, p.relative_to, P("../a")) + self.assertRaises(ValueError, p.relative_to, P("a/..")) + self.assertRaises(ValueError, p.relative_to, P("/a/..")) self.assertRaises(ValueError, p.relative_to, P('/'), walk_up=True) self.assertRaises(ValueError, p.relative_to, P('/a'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("../a"), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("a/.."), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("/a/.."), walk_up=True) p = P('/a/b') self.assertEqual(p.relative_to(P('/')), P('a/b')) self.assertEqual(p.relative_to('/'), P('a/b')) @@ -723,8 +729,14 @@ def test_relative_to_common(self): self.assertRaises(ValueError, p.relative_to, P()) self.assertRaises(ValueError, p.relative_to, '') self.assertRaises(ValueError, p.relative_to, P('a')) + self.assertRaises(ValueError, p.relative_to, P("../a")) + self.assertRaises(ValueError, p.relative_to, P("a/..")) + self.assertRaises(ValueError, p.relative_to, P("/a/..")) self.assertRaises(ValueError, p.relative_to, P(''), walk_up=True) self.assertRaises(ValueError, p.relative_to, P('a'), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("../a"), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("a/.."), walk_up=True) + self.assertRaises(ValueError, p.relative_to, P("/a/.."), walk_up=True) def test_is_relative_to_common(self): P = self.cls @@ -2043,11 +2055,11 @@ def _check(glob, expected): "dirC/dirD", "dirC/dirD/fileD"]) _check(p.rglob("file*"), ["dirC/fileC", "dirC/dirD/fileD"]) _check(p.rglob("**/file*"), ["dirC/fileC", "dirC/dirD/fileD"]) - _check(p.rglob("dir*/**"), ["dirC/dirD"]) + _check(p.rglob("dir*/**/"), ["dirC/dirD"]) _check(p.rglob("*/*"), ["dirC/dirD/fileD"]) _check(p.rglob("*/"), ["dirC/dirD"]) _check(p.rglob(""), ["dirC", "dirC/dirD"]) - _check(p.rglob("**"), ["dirC", "dirC/dirD"]) + _check(p.rglob("**/"), ["dirC", "dirC/dirD"]) # gh-91616, a re module regression _check(p.rglob("*.txt"), ["dirC/novel.txt"]) _check(p.rglob("*.*"), ["dirC/novel.txt"]) @@ -2199,7 +2211,20 @@ def test_glob_above_recursion_limit(self): path.mkdir(parents=True) with set_recursion_limit(recursion_limit): - list(base.glob('**')) + list(base.glob('**/')) + + def test_glob_recursive_no_trailing_slash(self): + P = self.cls + p = P(BASE) + with self.assertWarns(FutureWarning): + p.glob('**') + with self.assertWarns(FutureWarning): + p.glob('*/**') + with self.assertWarns(FutureWarning): + p.rglob('**') + with self.assertWarns(FutureWarning): + p.rglob('*/**') + def test_readlink(self): if not self.can_symlink: diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py index 82b0b50d0ea437..fba41f0b119796 100644 --- a/Lib/test/test_peepholer.py +++ b/Lib/test/test_peepholer.py @@ -991,6 +991,7 @@ def test_conditional_jump_forward_non_const_condition(self): ('LOAD_NAME', 1, 11), ('POP_JUMP_IF_TRUE', lbl := self.Label(), 12), ('LOAD_CONST', 2, 13), + ('RETURN_VALUE', 13), lbl, ('LOAD_CONST', 3, 14), ('RETURN_VALUE', 14), @@ -998,7 +999,7 @@ def test_conditional_jump_forward_non_const_condition(self): expected_insts = [ ('LOAD_NAME', 1, 11), ('POP_JUMP_IF_TRUE', lbl := self.Label(), 12), - ('LOAD_CONST', 1, 13), + ('RETURN_CONST', 1, 13), lbl, ('RETURN_CONST', 2, 14), ] @@ -1072,6 +1073,7 @@ def test_no_unsafe_static_swap(self): ('STORE_FAST', 1, 4), ('STORE_FAST', 1, 4), ('POP_TOP', 0, 4), + ('LOAD_CONST', 0, 5), ('RETURN_VALUE', 5) ] expected_insts = [ @@ -1080,7 +1082,7 @@ def test_no_unsafe_static_swap(self): ('NOP', 0, 3), ('STORE_FAST', 1, 4), ('POP_TOP', 0, 4), - ('RETURN_VALUE', 5) + ('RETURN_CONST', 0) ] self.cfg_optimization_test(insts, expected_insts, consts=list(range(3)), nlocals=1) @@ -1092,6 +1094,7 @@ def test_dead_store_elimination_in_same_lineno(self): ('STORE_FAST', 1, 4), ('STORE_FAST', 1, 4), ('STORE_FAST', 1, 4), + ('LOAD_CONST', 0, 5), ('RETURN_VALUE', 5) ] expected_insts = [ @@ -1100,7 +1103,7 @@ def test_dead_store_elimination_in_same_lineno(self): ('NOP', 0, 3), ('POP_TOP', 0, 4), ('STORE_FAST', 1, 4), - ('RETURN_VALUE', 5) + ('RETURN_CONST', 0, 5) ] self.cfg_optimization_test(insts, expected_insts, consts=list(range(3)), nlocals=1) @@ -1112,9 +1115,19 @@ def test_no_dead_store_elimination_in_different_lineno(self): ('STORE_FAST', 1, 4), ('STORE_FAST', 1, 5), ('STORE_FAST', 1, 6), + ('LOAD_CONST', 0, 5), ('RETURN_VALUE', 5) ] - self.cfg_optimization_test(insts, insts, consts=list(range(3)), nlocals=1) + expected_insts = [ + ('LOAD_CONST', 0, 1), + ('LOAD_CONST', 1, 2), + ('LOAD_CONST', 2, 3), + ('STORE_FAST', 1, 4), + ('STORE_FAST', 1, 5), + ('STORE_FAST', 1, 6), + ('RETURN_CONST', 0, 5) + ] + self.cfg_optimization_test(insts, expected_insts, consts=list(range(3)), nlocals=1) if __name__ == "__main__": diff --git a/Lib/test/test_peg_generator/test_c_parser.py b/Lib/test/test_peg_generator/test_c_parser.py index f9105a9f23bd6d..9e273e99e387a4 100644 --- a/Lib/test/test_peg_generator/test_c_parser.py +++ b/Lib/test/test_peg_generator/test_c_parser.py @@ -404,7 +404,7 @@ def test_ternary_operator(self) -> None: a='[' b=NAME c=for_if_clauses d=']' { _PyAST_ListComp(b, c, EXTRA) } ) for_if_clauses[asdl_comprehension_seq*]: ( - a[asdl_comprehension_seq*]=(y=[ASYNC] 'for' a=NAME 'in' b=NAME c[asdl_expr_seq*]=('if' z=NAME { z })* + a[asdl_comprehension_seq*]=(y=['async'] 'for' a=NAME 'in' b=NAME c[asdl_expr_seq*]=('if' z=NAME { z })* { _PyAST_comprehension(_PyAST_Name(((expr_ty) a)->v.Name.id, Store, EXTRA), b, c, (y == NULL) ? 0 : 1, p->arena) })+ { a } ) """ diff --git a/Lib/test/test_pydoc.py b/Lib/test/test_pydoc.py index ddb5187f90da9b..fe4e37d4858c85 100644 --- a/Lib/test/test_pydoc.py +++ b/Lib/test/test_pydoc.py @@ -1,3 +1,4 @@ +import datetime import os import sys import contextlib @@ -12,6 +13,7 @@ import stat import tempfile import test.support +import time import types import typing import unittest @@ -1180,6 +1182,108 @@ def test_module_level_callable(self): self.assertEqual(self._get_summary_line(os.stat), "stat(path, *, dir_fd=None, follow_symlinks=True)") + def test_module_level_callable_noargs(self): + self.assertEqual(self._get_summary_line(time.time), + "time()") + + def test_module_level_callable_o(self): + self.assertEqual(self._get_summary_line(stat.S_IMODE), + "S_IMODE(object, /)") + + def test_unbound_builtin_method_noargs(self): + self.assertEqual(self._get_summary_line(str.lower), + "lower(self, /)") + + def test_bound_builtin_method_noargs(self): + self.assertEqual(self._get_summary_line(''.lower), + "lower() method of builtins.str instance") + + def test_unbound_builtin_method_o(self): + self.assertEqual(self._get_summary_line(set.add), + "add(self, object, /)") + + def test_bound_builtin_method_o(self): + self.assertEqual(self._get_summary_line(set().add), + "add(object, /) method of builtins.set instance") + + def test_unbound_builtin_method_coexist_o(self): + self.assertEqual(self._get_summary_line(set.__contains__), + "__contains__(self, object, /)") + + def test_bound_builtin_method_coexist_o(self): + self.assertEqual(self._get_summary_line(set().__contains__), + "__contains__(object, /) method of builtins.set instance") + + def test_unbound_builtin_classmethod_noargs(self): + self.assertEqual(self._get_summary_line(datetime.datetime.__dict__['utcnow']), + "utcnow(type, /)") + + def test_bound_builtin_classmethod_noargs(self): + self.assertEqual(self._get_summary_line(datetime.datetime.utcnow), + "utcnow() method of builtins.type instance") + + def test_unbound_builtin_classmethod_o(self): + self.assertEqual(self._get_summary_line(dict.__dict__['__class_getitem__']), + "__class_getitem__(type, object, /)") + + def test_bound_builtin_classmethod_o(self): + self.assertEqual(self._get_summary_line(dict.__class_getitem__), + "__class_getitem__(object, /) method of builtins.type instance") + + def test_module_level_callable_unrepresentable_default(self): + self.assertEqual(self._get_summary_line(getattr), + "getattr(object, name, default=, /)") + + def test_builtin_staticmethod_unrepresentable_default(self): + self.assertEqual(self._get_summary_line(str.maketrans), + "maketrans(x, y=, z=, /)") + + def test_unbound_builtin_method_unrepresentable_default(self): + self.assertEqual(self._get_summary_line(dict.pop), + "pop(self, key, default=, /)") + + def test_bound_builtin_method_unrepresentable_default(self): + self.assertEqual(self._get_summary_line({}.pop), + "pop(key, default=, /) " + "method of builtins.dict instance") + + def test_overridden_text_signature(self): + class C: + def meth(*args, **kwargs): + pass + @classmethod + def cmeth(*args, **kwargs): + pass + @staticmethod + def smeth(*args, **kwargs): + pass + for text_signature, unbound, bound in [ + ("($slf)", "(slf, /)", "()"), + ("($slf, /)", "(slf, /)", "()"), + ("($slf, /, arg)", "(slf, /, arg)", "(arg)"), + ("($slf, /, arg=)", "(slf, /, arg=)", "(arg=)"), + ("($slf, arg, /)", "(slf, arg, /)", "(arg, /)"), + ("($slf, arg=, /)", "(slf, arg=, /)", "(arg=, /)"), + ("(/, slf, arg)", "(/, slf, arg)", "(/, slf, arg)"), + ("(/, slf, arg=)", "(/, slf, arg=)", "(/, slf, arg=)"), + ("(slf, /, arg)", "(slf, /, arg)", "(arg)"), + ("(slf, /, arg=)", "(slf, /, arg=)", "(arg=)"), + ("(slf, arg, /)", "(slf, arg, /)", "(arg, /)"), + ("(slf, arg=, /)", "(slf, arg=, /)", "(arg=, /)"), + ]: + with self.subTest(text_signature): + C.meth.__text_signature__ = text_signature + self.assertEqual(self._get_summary_line(C.meth), + "meth" + unbound) + self.assertEqual(self._get_summary_line(C().meth), + "meth" + bound + " method of test.test_pydoc.C instance") + C.cmeth.__func__.__text_signature__ = text_signature + self.assertEqual(self._get_summary_line(C.cmeth), + "cmeth" + bound + " method of builtins.type instance") + C.smeth.__text_signature__ = text_signature + self.assertEqual(self._get_summary_line(C.smeth), + "smeth" + unbound) + @requires_docstrings def test_staticmethod(self): class X: diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py index a6f5af17d7d51b..bf3698ac78a880 100644 --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -2342,6 +2342,16 @@ def test_bug_gh91616(self): self.assertTrue(re.fullmatch(r'(?s:(?>.*?\.).*)\Z', "a.txt")) # reproducer self.assertTrue(re.fullmatch(r'(?s:(?=(?P.*?\.))(?P=g0).*)\Z', "a.txt")) + def test_bug_gh106052(self): + self.assertEqual(re.match("(?>(?:ab?c)+)", "aca").span(), (0, 2)) + self.assertEqual(re.match("(?:ab?c)++", "aca").span(), (0, 2)) + self.assertEqual(re.match("(?>(?:ab?c)*)", "aca").span(), (0, 2)) + self.assertEqual(re.match("(?:ab?c)*+", "aca").span(), (0, 2)) + self.assertEqual(re.match("(?>(?:ab?c)?)", "a").span(), (0, 0)) + self.assertEqual(re.match("(?:ab?c)?+", "a").span(), (0, 0)) + self.assertEqual(re.match("(?>(?:ab?c){1,3})", "aca").span(), (0, 2)) + self.assertEqual(re.match("(?:ab?c){1,3}+", "aca").span(), (0, 2)) + @unittest.skipIf(multiprocessing is None, 'test requires multiprocessing') def test_regression_gh94675(self): pattern = re.compile(r'(?<=[({}])(((//[^\n]*)?[\n])([\000-\040])*)*' @@ -2362,6 +2372,9 @@ def test_regression_gh94675(self): p.terminate() p.join() + def test_fail(self): + self.assertEqual(re.search(r'12(?!)|3', '123')[0], '3') + def get_debug_out(pat): with captured_stdout() as out: @@ -2438,6 +2451,7 @@ def test_atomic_group(self): 17: SUCCESS ''') + @unittest.expectedFailure # gh-106052 def test_possesive_repeat_one(self): self.assertEqual(get_debug_out(r'a?+'), '''\ POSSESSIVE_REPEAT 0 1 @@ -2450,6 +2464,7 @@ def test_possesive_repeat_one(self): 12: SUCCESS ''') + @unittest.expectedFailure # gh-106052 def test_possesive_repeat(self): self.assertEqual(get_debug_out(r'(?:ab)?+'), '''\ POSSESSIVE_REPEAT 0 1 diff --git a/Lib/test/test_reprlib.py b/Lib/test/test_reprlib.py index e7216d427200c1..502287b620d066 100644 --- a/Lib/test/test_reprlib.py +++ b/Lib/test/test_reprlib.py @@ -765,5 +765,14 @@ def test_assigned_attributes(self): for name in assigned: self.assertIs(getattr(wrapper, name), getattr(wrapped, name)) + def test__wrapped__(self): + class X: + def __repr__(self): + return 'X()' + f = __repr__ # save reference to check it later + __repr__ = recursive_repr()(__repr__) + + self.assertIs(X.f, X.__repr__.__wrapped__) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_rlcompleter.py b/Lib/test/test_rlcompleter.py index 6b5fc9a0247f4b..7347fca71be2fe 100644 --- a/Lib/test/test_rlcompleter.py +++ b/Lib/test/test_rlcompleter.py @@ -53,7 +53,10 @@ def test_attr_matches(self): ['str.{}('.format(x) for x in dir(str) if x.startswith('s')]) self.assertEqual(self.stdcompleter.attr_matches('tuple.foospamegg'), []) - expected = sorted({'None.%s%s' % (x, '(' if x != '__doc__' else '') + expected = sorted({'None.%s%s' % (x, + '()' if x == '__init_subclass__' + else '' if x == '__doc__' + else '(') for x in dir(None)}) self.assertEqual(self.stdcompleter.attr_matches('None.'), expected) self.assertEqual(self.stdcompleter.attr_matches('None._'), expected) diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py index b6d5b8c3d82580..f2e02dab1c3ca5 100644 --- a/Lib/test/test_smtplib.py +++ b/Lib/test/test_smtplib.py @@ -831,6 +831,7 @@ class SimSMTPChannel(smtpd.SMTPChannel): def __init__(self, extra_features, *args, **kw): self._extrafeatures = ''.join( [ "250-{0}\r\n".format(x) for x in extra_features ]) + self.all_received_lines = [] super(SimSMTPChannel, self).__init__(*args, **kw) # AUTH related stuff. It would be nice if support for this were in smtpd. @@ -845,6 +846,7 @@ def found_terminator(self): self.smtp_state = self.COMMAND self.push('%s %s' % (e.smtp_code, e.smtp_error)) return + self.all_received_lines.append(self.received_lines) super().found_terminator() @@ -1349,6 +1351,18 @@ def test_name_field_not_included_in_envelop_addresses(self): self.assertEqual(self.serv._addresses['from'], 'michael@example.com') self.assertEqual(self.serv._addresses['tos'], ['rene@example.com']) + def test_lowercase_mail_from_rcpt_to(self): + m = 'A test message' + smtp = smtplib.SMTP( + HOST, self.port, local_hostname='localhost', + timeout=support.LOOPBACK_TIMEOUT) + self.addCleanup(smtp.close) + + smtp.sendmail('John', 'Sally', m) + + self.assertIn(['mail from: size=14'], self.serv._SMTPchannel.all_received_lines) + self.assertIn(['rcpt to:'], self.serv._SMTPchannel.all_received_lines) + class SimSMTPUTF8Server(SimSMTPServer): diff --git a/Lib/test/test_stable_abi_ctypes.py b/Lib/test/test_stable_abi_ctypes.py index 4e74bb374c93bf..566d36a3f5ba11 100644 --- a/Lib/test/test_stable_abi_ctypes.py +++ b/Lib/test/test_stable_abi_ctypes.py @@ -144,7 +144,9 @@ def test_windows_feature_macros(self): "PyDict_DelItem", "PyDict_DelItemString", "PyDict_GetItem", + "PyDict_GetItemRef", "PyDict_GetItemString", + "PyDict_GetItemStringRef", "PyDict_GetItemWithError", "PyDict_Items", "PyDict_Keys", diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index f0fa6454b1f91a..aa2cf2b1edc584 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -28,6 +28,12 @@ # === Helper functions and class === +# Test copied from Lib/test/test_math.py +# detect evidence of double-rounding: fsum is not always correctly +# rounded on machines that suffer from double rounding. +x, y = 1e16, 2.9999 # use temporary values to defeat peephole optimizer +HAVE_DOUBLE_ROUNDING = (x + y == 1e16 + 4) + def sign(x): """Return -1.0 for negatives, including -0.0, otherwise +1.0.""" return math.copysign(1, x) @@ -2564,6 +2570,79 @@ def test_different_scales(self): self.assertAlmostEqual(statistics.correlation(x, y), 1) self.assertAlmostEqual(statistics.covariance(x, y), 0.1) + def test_sqrtprod_helper_function_fundamentals(self): + # Verify that results are close to sqrt(x * y) + for i in range(100): + x = random.expovariate() + y = random.expovariate() + expected = math.sqrt(x * y) + actual = statistics._sqrtprod(x, y) + with self.subTest(x=x, y=y, expected=expected, actual=actual): + self.assertAlmostEqual(expected, actual) + + x, y, target = 0.8035720646477457, 0.7957468097636939, 0.7996498651651661 + self.assertEqual(statistics._sqrtprod(x, y), target) + self.assertNotEqual(math.sqrt(x * y), target) + + # Test that range extremes avoid underflow and overflow + smallest = sys.float_info.min * sys.float_info.epsilon + self.assertEqual(statistics._sqrtprod(smallest, smallest), smallest) + biggest = sys.float_info.max + self.assertEqual(statistics._sqrtprod(biggest, biggest), biggest) + + # Check special values and the sign of the result + special_values = [0.0, -0.0, 1.0, -1.0, 4.0, -4.0, + math.nan, -math.nan, math.inf, -math.inf] + for x, y in itertools.product(special_values, repeat=2): + try: + expected = math.sqrt(x * y) + except ValueError: + expected = 'ValueError' + try: + actual = statistics._sqrtprod(x, y) + except ValueError: + actual = 'ValueError' + with self.subTest(x=x, y=y, expected=expected, actual=actual): + if isinstance(expected, str) and expected == 'ValueError': + self.assertEqual(actual, 'ValueError') + continue + self.assertIsInstance(actual, float) + if math.isnan(expected): + self.assertTrue(math.isnan(actual)) + continue + self.assertEqual(actual, expected) + self.assertEqual(sign(actual), sign(expected)) + + @requires_IEEE_754 + @unittest.skipIf(HAVE_DOUBLE_ROUNDING, + "accuracy not guaranteed on machines with double rounding") + @support.cpython_only # Allow for a weaker sumprod() implmentation + def test_sqrtprod_helper_function_improved_accuracy(self): + # Test a known example where accuracy is improved + x, y, target = 0.8035720646477457, 0.7957468097636939, 0.7996498651651661 + self.assertEqual(statistics._sqrtprod(x, y), target) + self.assertNotEqual(math.sqrt(x * y), target) + + def reference_value(x: float, y: float) -> float: + x = decimal.Decimal(x) + y = decimal.Decimal(y) + with decimal.localcontext() as ctx: + ctx.prec = 200 + return float((x * y).sqrt()) + + # Verify that the new function with improved accuracy + # agrees with a reference value more often than old version. + new_agreements = 0 + old_agreements = 0 + for i in range(10_000): + x = random.expovariate() + y = random.expovariate() + new = statistics._sqrtprod(x, y) + old = math.sqrt(x * y) + ref = reference_value(x, y) + new_agreements += (new == ref) + old_agreements += (old == ref) + self.assertGreater(new_agreements, old_agreements) def test_correlation_spearman(self): # https://statistics.laerd.com/statistical-guides/spearmans-rank-order-correlation-statistical-guide-2.php diff --git a/Lib/test/test_super.py b/Lib/test/test_super.py index 664cf70b3cf0fa..43162c540b55ae 100644 --- a/Lib/test/test_super.py +++ b/Lib/test/test_super.py @@ -5,6 +5,9 @@ from test import shadowed_super +ADAPTIVE_WARMUP_DELAY = 2 + + class A: def f(self): return 'A' @@ -419,8 +422,47 @@ def test(name): super(MyType, type(mytype)).__setattr__(mytype, "bar", 1) self.assertEqual(mytype.bar, 1) - test("foo1") - test("foo2") + for _ in range(ADAPTIVE_WARMUP_DELAY): + test("foo1") + + def test_reassigned_new(self): + class A: + def __new__(cls): + pass + + def __init_subclass__(cls): + if "__new__" not in cls.__dict__: + cls.__new__ = cls.__new__ + + class B(A): + pass + + class C(B): + def __new__(cls): + return super().__new__(cls) + + for _ in range(ADAPTIVE_WARMUP_DELAY): + C() + + def test_mixed_staticmethod_hierarchy(self): + # This test is just a desugared version of `test_reassigned_new` + class A: + @staticmethod + def some(cls, *args, **kwargs): + self.assertFalse(args) + self.assertFalse(kwargs) + + class B(A): + def some(cls, *args, **kwargs): + return super().some(cls, *args, **kwargs) + + class C(B): + @staticmethod + def some(cls): + return super().some(cls) + + for _ in range(ADAPTIVE_WARMUP_DELAY): + C.some(C) if __name__ == "__main__": diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 37f75ad54387a0..9dce15ed1529e7 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -14,11 +14,21 @@ from test.support.script_helper import assert_python_ok, assert_python_failure from test.support import threading_helper from test.support import import_helper +try: + from test.support import interpreters +except ImportError: + interpreters = None import textwrap import unittest import warnings +def requires_subinterpreters(meth): + """Decorator to skip a test if subinterpreters are not supported.""" + return unittest.skipIf(interpreters is None, + 'subinterpreters required')(meth) + + # count the number of test runs, used to create unique # strings to intern in test_intern() INTERN_NUMRUNS = 0 @@ -699,6 +709,37 @@ def __hash__(self): self.assertRaises(TypeError, sys.intern, S("abc")) + @requires_subinterpreters + def test_subinterp_intern_dynamically_allocated(self): + global INTERN_NUMRUNS + INTERN_NUMRUNS += 1 + s = "never interned before" + str(INTERN_NUMRUNS) + t = sys.intern(s) + self.assertIs(t, s) + + interp = interpreters.create() + interp.run(textwrap.dedent(f''' + import sys + t = sys.intern({s!r}) + assert id(t) != {id(s)}, (id(t), {id(s)}) + assert id(t) != {id(t)}, (id(t), {id(t)}) + ''')) + + @requires_subinterpreters + def test_subinterp_intern_statically_allocated(self): + # See Tools/build/generate_global_objects.py for the list + # of strings that are always statically allocated. + s = '__init__' + t = sys.intern(s) + + print('------------------------') + interp = interpreters.create() + interp.run(textwrap.dedent(f''' + import sys + t = sys.intern({s!r}) + assert id(t) == {id(t)}, (id(t), {id(t)}) + ''')) + def test_sys_flags(self): self.assertTrue(sys.flags) attrs = ("debug", @@ -960,12 +1001,12 @@ def test_debugmallocstats(self): "sys.getallocatedblocks unavailable on this build") def test_getallocatedblocks(self): try: - import _testcapi + import _testinternalcapi except ImportError: with_pymalloc = support.with_pymalloc() else: try: - alloc_name = _testcapi.pymem_getallocatorsname() + alloc_name = _testinternalcapi.pymem_getallocatorsname() except RuntimeError as exc: # "cannot get allocators name" (ex: tracemalloc is used) with_pymalloc = True diff --git a/Lib/test/test_tcl.py b/Lib/test/test_tcl.py index d07b83acb1b505..ebdb58f91d3d8a 100644 --- a/Lib/test/test_tcl.py +++ b/Lib/test/test_tcl.py @@ -20,14 +20,6 @@ tcl_version = tuple(map(int, _tkinter.TCL_VERSION.split('.'))) -_tk_patchlevel = None -def get_tk_patchlevel(): - global _tk_patchlevel - if _tk_patchlevel is None: - tcl = Tcl() - _tk_patchlevel = tcl.info_patchlevel() - return _tk_patchlevel - class TkinterTest(unittest.TestCase): @@ -571,7 +563,6 @@ def test_splitlist(self): (1, '2', (3.4,)) if self.wantobjects else ('1', '2', '3.4')), ] - tk_patchlevel = get_tk_patchlevel() if not self.wantobjects: expected = ('12', '\u20ac', '\xe2\x82\xac', '3.4') else: @@ -580,8 +571,8 @@ def test_splitlist(self): (call('dict', 'create', 12, '\u20ac', b'\xe2\x82\xac', (3.4,)), expected), ] - dbg_info = ('want objects? %s, Tcl version: %s, Tk patchlevel: %s' - % (self.wantobjects, tcl_version, tk_patchlevel)) + dbg_info = ('want objects? %s, Tcl version: %s, Tcl patchlevel: %s' + % (self.wantobjects, tcl_version, self.interp.info_patchlevel())) for arg, res in testcases: self.assertEqual(splitlist(arg), res, 'arg=%a, %s' % (arg, dbg_info)) diff --git a/Lib/test/test_tkinter/support.py b/Lib/test/test_tkinter/support.py index 9154ebac5c48f8..10e64bf40a4afa 100644 --- a/Lib/test/test_tkinter/support.py +++ b/Lib/test/test_tkinter/support.py @@ -79,28 +79,28 @@ def simulate_mouse_click(widget, x, y): import _tkinter tcl_version = tuple(map(int, _tkinter.TCL_VERSION.split('.'))) +tk_version = tuple(map(int, _tkinter.TK_VERSION.split('.'))) -def requires_tcl(*version): - if len(version) <= 2: - return unittest.skipUnless(tcl_version >= version, - 'requires Tcl version >= ' + '.'.join(map(str, version))) +def requires_tk(*version): + if len(version) <= 2 and tk_version >= version: + return lambda test: test def deco(test): @functools.wraps(test) def newtest(self): - if get_tk_patchlevel() < version: - self.skipTest('requires Tcl version >= ' + + root = getattr(self, 'root', None) + if get_tk_patchlevel(root) < version: + self.skipTest('requires Tk version >= ' + '.'.join(map(str, version))) test(self) return newtest return deco _tk_patchlevel = None -def get_tk_patchlevel(): +def get_tk_patchlevel(root): global _tk_patchlevel if _tk_patchlevel is None: - tcl = tkinter.Tcl() - _tk_patchlevel = tcl.info_patchlevel() + _tk_patchlevel = tkinter._parse_version(root.tk.globalgetvar('tk_patchLevel')) return _tk_patchlevel units = { diff --git a/Lib/test/test_tkinter/test_images.py b/Lib/test/test_tkinter/test_images.py index c07de867ce04b7..317b0a5c8f4a30 100644 --- a/Lib/test/test_tkinter/test_images.py +++ b/Lib/test/test_tkinter/test_images.py @@ -2,7 +2,7 @@ import tkinter from test import support from test.support import os_helper -from test.test_tkinter.support import AbstractTkTest, AbstractDefaultRootTest, requires_tcl +from test.test_tkinter.support import AbstractTkTest, AbstractDefaultRootTest, requires_tk support.requires('gui') @@ -144,6 +144,14 @@ def test_configure_foreground(self): self.assertEqual(image['foreground'], '-foreground {} {} #000000 yellow') + def test_bug_100814(self): + # gh-100814: Passing a callable option value causes AttributeError. + with self.assertRaises(tkinter.TclError): + tkinter.BitmapImage('::img::test', master=self.root, spam=print) + image = tkinter.BitmapImage('::img::test', master=self.root) + with self.assertRaises(tkinter.TclError): + image.configure(spam=print) + class PhotoImageTest(AbstractTkTest, unittest.TestCase): @@ -213,11 +221,11 @@ def test_create_from_gif_file(self): def test_create_from_gif_data(self): self.check_create_from_data('gif') - @requires_tcl(8, 6) + @requires_tk(8, 6) def test_create_from_png_file(self): self.check_create_from_file('png') - @requires_tcl(8, 6) + @requires_tk(8, 6) def test_create_from_png_data(self): self.check_create_from_data('png') @@ -274,6 +282,14 @@ def test_configure_palette(self): image.configure(palette='3/4/2') self.assertEqual(image['palette'], '3/4/2') + def test_bug_100814(self): + # gh-100814: Passing a callable option value causes AttributeError. + with self.assertRaises(tkinter.TclError): + tkinter.PhotoImage('::img::test', master=self.root, spam=print) + image = tkinter.PhotoImage('::img::test', master=self.root) + with self.assertRaises(tkinter.TclError): + image.configure(spam=print) + def test_blank(self): image = self.create() image.blank() diff --git a/Lib/test/test_tkinter/test_widgets.py b/Lib/test/test_tkinter/test_widgets.py index 34e67c0cbc44a3..d3f942db7baf9a 100644 --- a/Lib/test/test_tkinter/test_widgets.py +++ b/Lib/test/test_tkinter/test_widgets.py @@ -4,7 +4,7 @@ import os from test.support import requires -from test.test_tkinter.support import (requires_tcl, +from test.test_tkinter.support import (requires_tk, get_tk_patchlevel, widget_eq, AbstractDefaultRootTest) from test.test_tkinter.widget_tests import ( @@ -613,7 +613,7 @@ def test_configure_inactiveselectbackground(self): widget = self.create() self.checkColorParam(widget, 'inactiveselectbackground') - @requires_tcl(8, 6) + @requires_tk(8, 6) def test_configure_insertunfocussed(self): widget = self.create() self.checkEnumParam(widget, 'insertunfocussed', @@ -924,7 +924,7 @@ def test_coords(self): for i in range(4): self.assertIsInstance(coords[i], float) - @requires_tcl(8, 6) + @requires_tk(8, 6) def test_moveto(self): widget = self.create() i1 = widget.create_rectangle(1, 1, 20, 20, tags='group') @@ -969,7 +969,7 @@ def test_configure_activestyle(self): self.checkEnumParam(widget, 'activestyle', 'dotbox', 'none', 'underline') - test_configure_justify = requires_tcl(8, 6, 5)(StandardOptionsTests.test_configure_justify) + test_configure_justify = requires_tk(8, 6, 5)(StandardOptionsTests.test_configure_justify) def test_configure_listvariable(self): widget = self.create() @@ -1108,7 +1108,7 @@ def test_configure_digits(self): def test_configure_from(self): widget = self.create() - conv = float if get_tk_patchlevel() >= (8, 6, 10) else float_round + conv = float if get_tk_patchlevel(self.root) >= (8, 6, 10) else float_round self.checkFloatParam(widget, 'from', 100, 14.9, 15.1, conv=conv) def test_configure_label(self): @@ -1235,19 +1235,19 @@ def test_configure_opaqueresize(self): widget = self.create() self.checkBooleanParam(widget, 'opaqueresize') - @requires_tcl(8, 6, 5) + @requires_tk(8, 6, 5) def test_configure_proxybackground(self): widget = self.create() self.checkColorParam(widget, 'proxybackground') - @requires_tcl(8, 6, 5) + @requires_tk(8, 6, 5) def test_configure_proxyborderwidth(self): widget = self.create() self.checkPixelsParam(widget, 'proxyborderwidth', 0, 1.3, 2.9, 6, -2, '10p', conv=False) - @requires_tcl(8, 6, 5) + @requires_tk(8, 6, 5) def test_configure_proxyrelief(self): widget = self.create() self.checkReliefParam(widget, 'proxyrelief') diff --git a/Lib/test/test_tkinter/widget_tests.py b/Lib/test/test_tkinter/widget_tests.py index f60087a6e9f385..31f82f459beefd 100644 --- a/Lib/test/test_tkinter/widget_tests.py +++ b/Lib/test/test_tkinter/widget_tests.py @@ -1,7 +1,7 @@ # Common tests for test_tkinter/test_widgets.py and test_ttk/test_widgets.py import tkinter -from test.test_tkinter.support import (AbstractTkTest, tcl_version, +from test.test_tkinter.support import (AbstractTkTest, tk_version, pixels_conv, tcl_obj_eq) import test.support @@ -22,7 +22,7 @@ def scaling(self): return self._scaling def _str(self, value): - if not self._stringify and self.wantobjects and tcl_version >= (8, 6): + if not self._stringify and self.wantobjects and tk_version >= (8, 6): return value if isinstance(value, tuple): return ' '.join(map(self._str, value)) @@ -156,7 +156,7 @@ def checkReliefParam(self, widget, name): 'flat', 'groove', 'raised', 'ridge', 'solid', 'sunken') errmsg='bad relief "spam": must be '\ 'flat, groove, raised, ridge, solid, or sunken' - if tcl_version < (8, 6): + if tk_version < (8, 6): errmsg = None self.checkInvalidParam(widget, name, 'spam', errmsg=errmsg) diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index d1552d8a20808f..7863e27fccd972 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -2521,7 +2521,7 @@ def test_tabs(self): def test_async(self): self.check_tokenize('async = 1', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) EQUAL '=' (1, 6) (1, 7) NUMBER '1' (1, 8) (1, 9) """) @@ -2530,21 +2530,21 @@ def test_async(self): NAME 'a' (1, 0) (1, 1) EQUAL '=' (1, 2) (1, 3) LPAR '(' (1, 4) (1, 5) - ASYNC 'async' (1, 5) (1, 10) + NAME 'async' (1, 5) (1, 10) EQUAL '=' (1, 11) (1, 12) NUMBER '1' (1, 13) (1, 14) RPAR ')' (1, 14) (1, 15) """) self.check_tokenize('async()', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) LPAR '(' (1, 5) (1, 6) RPAR ')' (1, 6) (1, 7) """) self.check_tokenize('class async(Bar):pass', """\ NAME 'class' (1, 0) (1, 5) - ASYNC 'async' (1, 6) (1, 11) + NAME 'async' (1, 6) (1, 11) LPAR '(' (1, 11) (1, 12) NAME 'Bar' (1, 12) (1, 15) RPAR ')' (1, 15) (1, 16) @@ -2554,13 +2554,13 @@ def test_async(self): self.check_tokenize('class async:pass', """\ NAME 'class' (1, 0) (1, 5) - ASYNC 'async' (1, 6) (1, 11) + NAME 'async' (1, 6) (1, 11) COLON ':' (1, 11) (1, 12) NAME 'pass' (1, 12) (1, 16) """) self.check_tokenize('await = 1', """\ - AWAIT 'await' (1, 0) (1, 5) + NAME 'await' (1, 0) (1, 5) EQUAL '=' (1, 6) (1, 7) NUMBER '1' (1, 8) (1, 9) """) @@ -2568,11 +2568,11 @@ def test_async(self): self.check_tokenize('foo.async', """\ NAME 'foo' (1, 0) (1, 3) DOT '.' (1, 3) (1, 4) - ASYNC 'async' (1, 4) (1, 9) + NAME 'async' (1, 4) (1, 9) """) self.check_tokenize('async for a in b: pass', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) NAME 'for' (1, 6) (1, 9) NAME 'a' (1, 10) (1, 11) NAME 'in' (1, 12) (1, 14) @@ -2582,7 +2582,7 @@ def test_async(self): """) self.check_tokenize('async with a as b: pass', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) NAME 'with' (1, 6) (1, 10) NAME 'a' (1, 11) (1, 12) NAME 'as' (1, 13) (1, 15) @@ -2592,45 +2592,45 @@ def test_async(self): """) self.check_tokenize('async.foo', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) DOT '.' (1, 5) (1, 6) NAME 'foo' (1, 6) (1, 9) """) self.check_tokenize('async', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) """) self.check_tokenize('async\n#comment\nawait', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) NEWLINE '' (1, 5) (1, 5) - AWAIT 'await' (3, 0) (3, 5) + NAME 'await' (3, 0) (3, 5) """) self.check_tokenize('async\n...\nawait', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) NEWLINE '' (1, 5) (1, 5) ELLIPSIS '...' (2, 0) (2, 3) NEWLINE '' (2, 3) (2, 3) - AWAIT 'await' (3, 0) (3, 5) + NAME 'await' (3, 0) (3, 5) """) self.check_tokenize('async\nawait', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) NEWLINE '' (1, 5) (1, 5) - AWAIT 'await' (2, 0) (2, 5) + NAME 'await' (2, 0) (2, 5) """) self.check_tokenize('foo.async + 1', """\ NAME 'foo' (1, 0) (1, 3) DOT '.' (1, 3) (1, 4) - ASYNC 'async' (1, 4) (1, 9) + NAME 'async' (1, 4) (1, 9) PLUS '+' (1, 10) (1, 11) NUMBER '1' (1, 12) (1, 13) """) self.check_tokenize('async def foo(): pass', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) NAME 'def' (1, 6) (1, 9) NAME 'foo' (1, 10) (1, 13) LPAR '(' (1, 13) (1, 14) @@ -2647,7 +2647,7 @@ def foo(await): await async += 1 ''', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) NAME 'def' (1, 6) (1, 9) NAME 'foo' (1, 10) (1, 13) LPAR '(' (1, 13) (1, 14) @@ -2658,12 +2658,12 @@ def foo(await): NAME 'def' (2, 2) (2, 5) NAME 'foo' (2, 6) (2, 9) LPAR '(' (2, 9) (2, 10) - AWAIT 'await' (2, 10) (2, 15) + NAME 'await' (2, 10) (2, 15) RPAR ')' (2, 15) (2, 16) COLON ':' (2, 16) (2, 17) NEWLINE '' (2, 17) (2, 17) INDENT '' (3, -1) (3, -1) - AWAIT 'await' (3, 4) (3, 9) + NAME 'await' (3, 4) (3, 9) EQUAL '=' (3, 10) (3, 11) NUMBER '1' (3, 12) (3, 13) NEWLINE '' (3, 13) (3, 13) @@ -2673,18 +2673,18 @@ def foo(await): COLON ':' (4, 6) (4, 7) NEWLINE '' (4, 7) (4, 7) INDENT '' (5, -1) (5, -1) - AWAIT 'await' (5, 4) (5, 9) + NAME 'await' (5, 4) (5, 9) NEWLINE '' (5, 9) (5, 9) DEDENT '' (6, -1) (6, -1) DEDENT '' (6, -1) (6, -1) - ASYNC 'async' (6, 0) (6, 5) + NAME 'async' (6, 0) (6, 5) PLUSEQUAL '+=' (6, 6) (6, 8) NUMBER '1' (6, 9) (6, 10) NEWLINE '' (6, 10) (6, 10) """) self.check_tokenize('async def foo():\n async for i in 1: pass', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) NAME 'def' (1, 6) (1, 9) NAME 'foo' (1, 10) (1, 13) LPAR '(' (1, 13) (1, 14) @@ -2692,7 +2692,7 @@ def foo(await): COLON ':' (1, 15) (1, 16) NEWLINE '' (1, 16) (1, 16) INDENT '' (2, -1) (2, -1) - ASYNC 'async' (2, 2) (2, 7) + NAME 'async' (2, 2) (2, 7) NAME 'for' (2, 8) (2, 11) NAME 'i' (2, 12) (2, 13) NAME 'in' (2, 14) (2, 16) @@ -2703,14 +2703,14 @@ def foo(await): """) self.check_tokenize('async def foo(async): await', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) NAME 'def' (1, 6) (1, 9) NAME 'foo' (1, 10) (1, 13) LPAR '(' (1, 13) (1, 14) - ASYNC 'async' (1, 14) (1, 19) + NAME 'async' (1, 14) (1, 19) RPAR ')' (1, 19) (1, 20) COLON ':' (1, 20) (1, 21) - AWAIT 'await' (1, 22) (1, 27) + NAME 'await' (1, 22) (1, 27) """) self.check_tokenize('''\ @@ -2734,7 +2734,7 @@ async def bar(): pass COLON ':' (3, 11) (3, 12) NAME 'pass' (3, 13) (3, 17) NEWLINE '' (3, 17) (3, 17) - ASYNC 'async' (4, 2) (4, 7) + NAME 'async' (4, 2) (4, 7) NAME 'def' (4, 8) (4, 11) NAME 'bar' (4, 12) (4, 15) LPAR '(' (4, 15) (4, 16) @@ -2742,7 +2742,7 @@ async def bar(): pass COLON ':' (4, 17) (4, 18) NAME 'pass' (4, 19) (4, 23) NEWLINE '' (4, 23) (4, 23) - AWAIT 'await' (6, 2) (6, 7) + NAME 'await' (6, 2) (6, 7) EQUAL '=' (6, 8) (6, 9) NUMBER '2' (6, 10) (6, 11) DEDENT '' (6, -1) (6, -1) @@ -2755,7 +2755,7 @@ def baz(): pass async def bar(): pass await = 2''', """\ - ASYNC 'async' (1, 0) (1, 5) + NAME 'async' (1, 0) (1, 5) NAME 'def' (1, 6) (1, 9) NAME 'f' (1, 10) (1, 11) LPAR '(' (1, 11) (1, 12) @@ -2770,7 +2770,7 @@ async def bar(): pass COLON ':' (3, 11) (3, 12) NAME 'pass' (3, 13) (3, 17) NEWLINE '' (3, 17) (3, 17) - ASYNC 'async' (4, 2) (4, 7) + NAME 'async' (4, 2) (4, 7) NAME 'def' (4, 8) (4, 11) NAME 'bar' (4, 12) (4, 15) LPAR '(' (4, 15) (4, 16) @@ -2778,7 +2778,7 @@ async def bar(): pass COLON ':' (4, 17) (4, 18) NAME 'pass' (4, 19) (4, 23) NEWLINE '' (4, 23) (4, 23) - AWAIT 'await' (6, 2) (6, 7) + NAME 'await' (6, 2) (6, 7) EQUAL '=' (6, 8) (6, 9) NUMBER '2' (6, 10) (6, 11) DEDENT '' (6, -1) (6, -1) diff --git a/Lib/test/test_tools/test_sundry.py b/Lib/test/test_tools/test_sundry.py index 2f8ba272164d32..d0b702d392cdf6 100644 --- a/Lib/test/test_tools/test_sundry.py +++ b/Lib/test/test_tools/test_sundry.py @@ -19,17 +19,11 @@ class TestSundryScripts(unittest.TestCase): # cleanly the logging module. @import_helper.mock_register_at_fork def test_sundry(self, mock_os): - old_modules = import_helper.modules_setup() - try: - for fn in os.listdir(scriptsdir): - if not fn.endswith('.py'): - continue - - name = fn[:-3] - import_tool(name) - finally: - # Unload all modules loaded in this test - import_helper.modules_cleanup(*old_modules) + for fn in os.listdir(scriptsdir): + if not fn.endswith('.py'): + continue + name = fn[:-3] + import_tool(name) if __name__ == '__main__': diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index da7d1fb559203e..7c6fdbf762921f 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -918,7 +918,7 @@ class CPythonTracebackErrorCaretTests( @cpython_only @requires_debug_ranges() -class CPythonTracebackErrorCaretTests( +class CPythonTracebackLegacyErrorCaretTests( CAPIExceptionFormattingLegacyMixin, TracebackErrorLocationCaretTestBase, unittest.TestCase, diff --git a/Lib/test/test_ttk/test_style.py b/Lib/test/test_ttk/test_style.py index 0ec95cf6b5ffc9..f9c56ec2357451 100644 --- a/Lib/test/test_ttk/test_style.py +++ b/Lib/test/test_ttk/test_style.py @@ -170,7 +170,7 @@ def test_map_custom_copy(self): newname = f'C.{name}' self.assertEqual(style.map(newname), {}) style.map(newname, **default) - if theme == 'alt' and name == '.' and get_tk_patchlevel() < (8, 6, 1): + if theme == 'alt' and name == '.' and get_tk_patchlevel(self.root) < (8, 6, 1): default['embossed'] = [('disabled', '1')] self.assertEqual(style.map(newname), default) for key, value in default.items(): diff --git a/Lib/test/test_ttk/test_widgets.py b/Lib/test/test_ttk/test_widgets.py index 79d65b496abdc6..fd1a748a498ac5 100644 --- a/Lib/test/test_ttk/test_widgets.py +++ b/Lib/test/test_ttk/test_widgets.py @@ -5,7 +5,7 @@ import sys from test.test_ttk_textonly import MockTclObj -from test.test_tkinter.support import (AbstractTkTest, tcl_version, get_tk_patchlevel, +from test.test_tkinter.support import (AbstractTkTest, tk_version, get_tk_patchlevel, simulate_mouse_click, AbstractDefaultRootTest) from test.test_tkinter.widget_tests import (add_standard_options, AbstractWidgetTest, StandardOptionsTests, IntegerSizeTests, PixelSizeTests) @@ -19,7 +19,7 @@ def test_configure_class(self): widget = self.create() self.assertEqual(widget['class'], '') errmsg='attempt to change read-only option' - if get_tk_patchlevel() < (8, 6, 0, 'beta', 3): + if get_tk_patchlevel(self.root) < (8, 6, 0, 'beta', 3): errmsg='Attempt to change read-only option' self.checkInvalidParam(widget, 'class', 'Foo', errmsg=errmsg) widget2 = self.create(class_='Foo') @@ -560,7 +560,7 @@ def test_configure_orient(self): widget = self.create() self.assertEqual(str(widget['orient']), 'vertical') errmsg='attempt to change read-only option' - if get_tk_patchlevel() < (8, 6, 0, 'beta', 3): + if get_tk_patchlevel(self.root) < (8, 6, 0, 'beta', 3): errmsg='Attempt to change read-only option' self.checkInvalidParam(widget, 'orient', 'horizontal', errmsg=errmsg) @@ -1526,7 +1526,7 @@ def test_heading(self): def test_heading_callback(self): def simulate_heading_click(x, y): - if tcl_version >= (8, 6): + if tk_version >= (8, 6): self.assertEqual(self.tv.identify_column(x), '#0') self.assertEqual(self.tv.identify_region(x, y), 'heading') simulate_mouse_click(self.tv, x, y) diff --git a/Lib/test/test_type_comments.py b/Lib/test/test_type_comments.py index aba4a44be9da96..9a11fab237235e 100644 --- a/Lib/test/test_type_comments.py +++ b/Lib/test/test_type_comments.py @@ -260,8 +260,8 @@ def test_asyncdef(self): self.assertEqual(tree.body[1].type_comment, None) def test_asyncvar(self): - for tree in self.parse_all(asyncvar, maxver=6): - pass + with self.assertRaises(SyntaxError): + self.classic_parse(asyncvar) def test_asynccomp(self): for tree in self.parse_all(asynccomp, minver=6): diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py index 81744940f25b82..f2efee90dc0240 100644 --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -1397,6 +1397,7 @@ class A: pass class B(typing.Generic[T]): pass class C(B[int]): pass class D(B[str], float): pass + self.assertEqual(types.get_original_bases(A), (object,)) self.assertEqual(types.get_original_bases(B), (typing.Generic[T],)) self.assertEqual(types.get_original_bases(C), (B[int],)) @@ -1409,6 +1410,18 @@ class F(list[int]): pass self.assertEqual(types.get_original_bases(E), (list[T],)) self.assertEqual(types.get_original_bases(F), (list[int],)) + class FirstBase(typing.Generic[T]): pass + class SecondBase(typing.Generic[T]): pass + class First(FirstBase[int]): pass + class Second(SecondBase[int]): pass + class G(First, Second): pass + self.assertEqual(types.get_original_bases(G), (First, Second)) + + class First_(typing.Generic[T]): pass + class Second_(typing.Generic[T]): pass + class H(First_, Second_): pass + self.assertEqual(types.get_original_bases(H), (First_, Second_)) + class ClassBasedNamedTuple(typing.NamedTuple): x: int diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 0450a87577ecea..fa39c796197959 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -4091,6 +4091,22 @@ class C(Generic[T]): pass with self.assertRaises(TypeError): C[()] + def test_generic_subclass_checks(self): + for typ in [list[int], List[int], + tuple[int, str], Tuple[int, str], + typing.Callable[..., None], + collections.abc.Callable[..., None]]: + with self.subTest(typ=typ): + self.assertRaises(TypeError, issubclass, typ, object) + self.assertRaises(TypeError, issubclass, typ, type) + self.assertRaises(TypeError, issubclass, typ, typ) + self.assertRaises(TypeError, issubclass, object, typ) + + # isinstance is fine: + self.assertTrue(isinstance(typ, object)) + # but, not when the right arg is also a generic: + self.assertRaises(TypeError, isinstance, typ, typ) + def test_init(self): T = TypeVar('T') S = TypeVar('S') diff --git a/Lib/test/test_zlib.py b/Lib/test/test_zlib.py index 3dac70eb12852c..2113757254c0ed 100644 --- a/Lib/test/test_zlib.py +++ b/Lib/test/test_zlib.py @@ -7,7 +7,7 @@ import pickle import random import sys -from test.support import bigmemtest, _1G, _4G +from test.support import bigmemtest, _1G, _4G, skip_on_s390x zlib = import_helper.import_module('zlib') @@ -44,10 +44,7 @@ # zlib.decompress(func1(data)) == zlib.decompress(func2(data)) == data # # Make the assumption that s390x always has an accelerator to simplify the skip -# condition. Windows doesn't have os.uname() but it doesn't support s390x. -skip_on_s390x = unittest.skipIf(hasattr(os, 'uname') and os.uname().machine == 's390x', - 'skipped on s390x') - +# condition. class VersionTestCase(unittest.TestCase): diff --git a/Lib/textwrap.py b/Lib/textwrap.py index 98bedd27ea3a11..7ca393d1c371aa 100644 --- a/Lib/textwrap.py +++ b/Lib/textwrap.py @@ -476,13 +476,19 @@ def indent(text, prefix, predicate=None): consist solely of whitespace characters. """ if predicate is None: - def predicate(line): - return line.strip() - - def prefixed_lines(): - for line in text.splitlines(True): - yield (prefix + line if predicate(line) else line) - return ''.join(prefixed_lines()) + # str.splitlines(True) doesn't produce empty string. + # ''.splitlines(True) => [] + # 'foo\n'.splitlines(True) => ['foo\n'] + # So we can use just `not s.isspace()` here. + predicate = lambda s: not s.isspace() + + prefixed_lines = [] + for line in text.splitlines(True): + if predicate(line): + prefixed_lines.append(prefix) + prefixed_lines.append(line) + + return ''.join(prefixed_lines) if __name__ == "__main__": diff --git a/Lib/tkinter/__init__.py b/Lib/tkinter/__init__.py index c675c511e04533..c59f8d11e8a9da 100644 --- a/Lib/tkinter/__init__.py +++ b/Lib/tkinter/__init__.py @@ -4069,8 +4069,6 @@ def __init__(self, imgtype, name=None, cnf={}, master=None, **kw): elif kw: cnf = kw options = () for k, v in cnf.items(): - if callable(v): - v = self._register(v) options = options + ('-'+k, v) self.tk.call(('image', 'create', imgtype, name,) + options) self.name = name @@ -4097,8 +4095,6 @@ def configure(self, **kw): for k, v in _cnfmerge(kw).items(): if v is not None: if k[-1] == '_': k = k[:-1] - if callable(v): - v = self._register(v) res = res + ('-'+k, v) self.tk.call((self.name, 'config') + res) diff --git a/Lib/token.py b/Lib/token.py index 487f6edd3c951c..b620317106e173 100644 --- a/Lib/token.py +++ b/Lib/token.py @@ -59,20 +59,18 @@ COLONEQUAL = 53 EXCLAMATION = 54 OP = 55 -AWAIT = 56 -ASYNC = 57 -TYPE_IGNORE = 58 -TYPE_COMMENT = 59 -SOFT_KEYWORD = 60 -FSTRING_START = 61 -FSTRING_MIDDLE = 62 -FSTRING_END = 63 -COMMENT = 64 -NL = 65 +TYPE_IGNORE = 56 +TYPE_COMMENT = 57 +SOFT_KEYWORD = 58 +FSTRING_START = 59 +FSTRING_MIDDLE = 60 +FSTRING_END = 61 +COMMENT = 62 +NL = 63 # These aren't used by the C tokenizer but are needed for tokenize.py -ERRORTOKEN = 66 -ENCODING = 67 -N_TOKENS = 68 +ERRORTOKEN = 64 +ENCODING = 65 +N_TOKENS = 66 # Special definitions for cooperation with parser NT_OFFSET = 256 diff --git a/Lib/types.py b/Lib/types.py index 6110e6e1de7249..b4aa19cec40c89 100644 --- a/Lib/types.py +++ b/Lib/types.py @@ -165,14 +165,11 @@ class Baz(list[str]): ... assert get_original_bases(int) == (object,) """ try: - return cls.__orig_bases__ + return cls.__dict__.get("__orig_bases__", cls.__bases__) except AttributeError: - try: - return cls.__bases__ - except AttributeError: - raise TypeError( - f'Expected an instance of type, not {type(cls).__name__!r}' - ) from None + raise TypeError( + f"Expected an instance of type, not {type(cls).__name__!r}" + ) from None class DynamicClassAttribute: diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index 9729759434a9f4..0fc25ec74565df 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -246,9 +246,9 @@ def library_recipes(): result.extend([ dict( - name="OpenSSL 1.1.1u", - url="https://www.openssl.org/source/openssl-1.1.1u.tar.gz", - checksum='e2f8d84b523eecd06c7be7626830370300fbcc15386bf5142d72758f6963ebc6', + name="OpenSSL 3.0.9", + url="https://www.openssl.org/source/openssl-3.0.9.tar.gz", + checksum='eb1ab04781474360f77c318ab89d8c5a03abc38e63d65a603cabbf1b00a1dc90', buildrecipe=build_universal_openssl, configure=None, install=None, diff --git a/Mac/BuildScript/resources/ReadMe.rtf b/Mac/BuildScript/resources/ReadMe.rtf index 5bc356d5267045..efd76b9b1ae64b 100644 --- a/Mac/BuildScript/resources/ReadMe.rtf +++ b/Mac/BuildScript/resources/ReadMe.rtf @@ -11,7 +11,7 @@ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\partightenfactor0 \f1\b \cf0 NOTE: -\f0\b0 This is a beta preview of Python 3.12.0, the next feature release of Python 3. It is not intended for production use.\ +\f0\b0 This is an alpha preview of Python 3.13.0, the next feature release of Python 3. It is not intended for production use.\ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 \cf0 \ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 @@ -19,14 +19,14 @@ \f1\b \cf0 \ul \ulc0 Certificate verification and OpenSSL\ \f0\b0 \ulnone \ -This package includes its own private copy of OpenSSL 1.1.1. The trust certificates in system and user keychains managed by the +This package includes its own private copy of OpenSSL 3.0. The trust certificates in system and user keychains managed by the \f2\i Keychain Access \f0\i0 application and the \f2\i security \f0\i0 command line utility are not used as defaults by the Python \f3 ssl \f0 module. A sample command script is included in -\f3 /Applications/Python 3.11 +\f3 /Applications/Python 3.13 \f0 to install a curated bundle of default root certificates from the third-party \f3 certifi \f0 package ({\field{\*\fldinst{HYPERLINK "https://pypi.org/project/certifi/"}}{\fldrslt https://pypi.org/project/certifi/}}). Double-click on diff --git a/Mac/BuildScript/resources/Welcome.rtf b/Mac/BuildScript/resources/Welcome.rtf index 83b7aa9d883a16..79851e1f4a69cc 100644 --- a/Mac/BuildScript/resources/Welcome.rtf +++ b/Mac/BuildScript/resources/Welcome.rtf @@ -12,9 +12,8 @@ \f1\b macOS $MACOSX_DEPLOYMENT_TARGET \f0\b0 .\ \ -\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\partightenfactor0 -\f1\b \cf0 Python for macOS +\f1\b Python for macOS \f0\b0 consists of the {\field{\*\fldinst{HYPERLINK "https://www.python.org"}}{\fldrslt Python}} programming language interpreter and its batteries-included standard library to allow easy access to macOS features. It also includes the Python integrated development environment, \f1\b IDLE \f0\b0 . You can also use the included @@ -27,5 +26,5 @@ At the end of this install, click on \ \f1\b NOTE: -\f0\b0 This is a beta test preview of Python 3.12.0, the next feature release of Python 3. It is not intended for production use.\ +\f0\b0 This is an alpha test preview of Python 3.13.0, the next feature release of Python 3. It is not intended for production use.\ } \ No newline at end of file diff --git a/Makefile.pre.in b/Makefile.pre.in index 553b2aa480c184..52236f7924503d 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -381,6 +381,7 @@ PYTHON_OBJS= \ Python/context.o \ Python/dynamic_annotations.o \ Python/errors.o \ + Python/executor.o \ Python/flowgraph.o \ Python/frame.o \ Python/frozenmain.o \ @@ -774,9 +775,13 @@ coverage-report: regen-token regen-frozen # Run "Argument Clinic" over all source files .PHONY: clinic clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/clinic/clinic.py --make --srcdir $(srcdir) + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/clinic/clinic.py --make --exclude Lib/test/clinic.test.c --srcdir $(srcdir) $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_global_objects.py +.PHONY: clinic-tests +clinic-tests: check-clean-src $(srcdir)/Lib/test/clinic.test.c + $(PYTHON_FOR_REGEN) $(srcdir)/Tools/clinic/clinic.py -f $(srcdir)/Lib/test/clinic.test.c + # Build the interpreter $(BUILDPYTHON): Programs/python.o $(LINK_PYTHON_DEPS) $(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(LINK_PYTHON_OBJS) $(LIBS) $(MODLIBS) $(SYSLIBS) @@ -1315,7 +1320,7 @@ regen-limited-abi: all # Regenerate all generated files .PHONY: regen-all -regen-all: regen-cases regen-opcode regen-opcode-targets regen-typeslots \ +regen-all: regen-cases regen-opcode regen-typeslots \ regen-token regen-ast regen-keyword regen-sre regen-frozen clinic \ regen-pegen-metaparser regen-pegen regen-test-frozenmain \ regen-test-levenshtein regen-global-objects @@ -1426,12 +1431,14 @@ regen-opcode: $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_opcode_h.py \ $(srcdir)/Lib/opcode.py \ $(srcdir)/Lib/_opcode_metadata.py \ + $(srcdir)/Include/opcode_ids.h.new \ $(srcdir)/Include/opcode.h.new \ - $(srcdir)/Include/internal/pycore_opcode.h.new \ - $(srcdir)/Include/internal/pycore_intrinsics.h.new + $(srcdir)/Python/opcode_targets.h.new \ + $(srcdir)/Include/internal/pycore_opcode.h.new + $(UPDATE_FILE) $(srcdir)/Include/opcode_ids.h $(srcdir)/Include/opcode_ids.h.new $(UPDATE_FILE) $(srcdir)/Include/opcode.h $(srcdir)/Include/opcode.h.new + $(UPDATE_FILE) $(srcdir)/Python/opcode_targets.h $(srcdir)/Python/opcode_targets.h.new $(UPDATE_FILE) $(srcdir)/Include/internal/pycore_opcode.h $(srcdir)/Include/internal/pycore_opcode.h.new - $(UPDATE_FILE) $(srcdir)/Include/internal/pycore_intrinsics.h $(srcdir)/Include/internal/pycore_intrinsics.h.new .PHONY: regen-token regen-token: @@ -1532,13 +1539,6 @@ Objects/unicodeobject.o: $(srcdir)/Objects/unicodeobject.c $(UNICODE_DEPS) Objects/dictobject.o: $(srcdir)/Objects/stringlib/eq.h Objects/setobject.o: $(srcdir)/Objects/stringlib/eq.h -.PHONY: regen-opcode-targets -regen-opcode-targets: - # Regenerate Python/opcode_targets.h from Lib/opcode.py - # using Python/makeopcodetargets.py - $(PYTHON_FOR_REGEN) $(srcdir)/Python/makeopcodetargets.py \ - $(srcdir)/Python/opcode_targets.h.new - $(UPDATE_FILE) $(srcdir)/Python/opcode_targets.h $(srcdir)/Python/opcode_targets.h.new .PHONY: regen-cases regen-cases: @@ -1564,10 +1564,13 @@ Python/ceval.o: \ $(srcdir)/Python/ceval_macros.h \ $(srcdir)/Python/condvar.h \ $(srcdir)/Python/generated_cases.c.h \ - $(srcdir)/Python/executor_cases.c.h \ - $(srcdir)/Include/internal/pycore_opcode_metadata.h \ $(srcdir)/Python/opcode_targets.h +Python/executor.o: \ + $(srcdir)/Include/internal/pycore_opcode_metadata.h \ + $(srcdir)/Python/ceval_macros.h \ + $(srcdir)/Python/executor_cases.c.h + Python/flowgraph.o: \ $(srcdir)/Include/internal/pycore_opcode_metadata.h @@ -1796,6 +1799,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_runtime.h \ $(srcdir)/Include/internal/pycore_runtime_init_generated.h \ $(srcdir)/Include/internal/pycore_runtime_init.h \ + $(srcdir)/Include/internal/pycore_setobject.h \ $(srcdir)/Include/internal/pycore_signal.h \ $(srcdir)/Include/internal/pycore_sliceobject.h \ $(srcdir)/Include/internal/pycore_strhex.h \ @@ -1937,8 +1941,7 @@ altinstall: commoninstall .PHONY: commoninstall commoninstall: check-clean-src @FRAMEWORKALTINSTALLFIRST@ \ altbininstall libinstall inclinstall libainstall \ - sharedinstall altmaninstall \ - @FRAMEWORKALTINSTALLLAST@ + sharedinstall altmaninstall @FRAMEWORKALTINSTALLLAST@ # Install shared libraries enabled by Setup DESTDIRS= $(exec_prefix) $(LIBDIR) $(BINLIBDEST) $(DESTSHARED) diff --git a/Misc/ACKS b/Misc/ACKS index 645ad5b700baaa..8b8c5ad8434bd7 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -289,6 +289,7 @@ Edward Catmur Lorenzo M. Catucci Bruno Cauet Donn Cave +James Cave Charles Cazabon Jesús Cea Avión Per Cederqvist @@ -1700,6 +1701,7 @@ Ngalim Siregar Kragen Sitaker Kaartic Sivaraam Stanisław Skonieczny +Bart Skowron Roman Skurikhin Ville Skyttä Michael Sloan diff --git a/Misc/NEWS.d/3.10.0a1.rst b/Misc/NEWS.d/3.10.0a1.rst index 301612c4307da4..79d85a40df8bbe 100644 --- a/Misc/NEWS.d/3.10.0a1.rst +++ b/Misc/NEWS.d/3.10.0a1.rst @@ -228,8 +228,8 @@ format string in f-string and :meth:`str.format`. .. section: Core and Builtins The implementation of :func:`signal.siginterrupt` now uses -:c:func:`sigaction` (if it is available in the system) instead of the -deprecated :c:func:`siginterrupt`. Patch by Pablo Galindo. +:c:func:`!sigaction` (if it is available in the system) instead of the +deprecated :c:func:`!siginterrupt`. Patch by Pablo Galindo. .. @@ -2176,7 +2176,7 @@ None. .. nonce: YoYoYo .. section: Library -Add a new :data:`os.RWF_APPEND` flag for :func:`os.pwritev`. +Add a new :const:`os.RWF_APPEND` flag for :func:`os.pwritev`. .. @@ -2304,7 +2304,7 @@ Restored the deprecated :mod:`xml.etree.cElementTree` module. .. nonce: ZCk0_c .. section: Library -:data:`~mmap.MAP_POPULATE` constant has now been added to the list of +:const:`~mmap.MAP_POPULATE` constant has now been added to the list of exported :mod:`mmap` module flags. .. diff --git a/Misc/NEWS.d/3.10.0a2.rst b/Misc/NEWS.d/3.10.0a2.rst index 061a82e90afd6b..78b25779802d6e 100644 --- a/Misc/NEWS.d/3.10.0a2.rst +++ b/Misc/NEWS.d/3.10.0a2.rst @@ -604,7 +604,7 @@ changes the working directory. PR by Anthony Sottile. .. nonce: 9wXTtY .. section: Library -The :mod:`shelve` module now uses :data:`pickle.DEFAULT_PROTOCOL` by default +The :mod:`shelve` module now uses :const:`pickle.DEFAULT_PROTOCOL` by default instead of :mod:`pickle` protocol ``3``. .. @@ -847,8 +847,8 @@ Victor Stinner. .. section: C API Fix potential crash in deallocating method objects when dynamically -allocated `PyMethodDef`'s lifetime is managed through the ``self`` argument -of a `PyCFunction`. +allocated :c:type:`PyMethodDef`'s lifetime is managed through the ``self`` argument +of a :c:type:`PyCFunction`. .. diff --git a/Misc/NEWS.d/3.10.0a3.rst b/Misc/NEWS.d/3.10.0a3.rst index f24b6d43e5783f..755109cbd376f4 100644 --- a/Misc/NEWS.d/3.10.0a3.rst +++ b/Misc/NEWS.d/3.10.0a3.rst @@ -1466,7 +1466,7 @@ success. Patch by Victor Stinner. .. nonce: S3FWTP .. section: C API -The :c:data:`METH_FASTCALL` calling convention is added to the limited API. +The :c:macro:`METH_FASTCALL` calling convention is added to the limited API. The functions :c:func:`PyModule_AddType`, :c:func:`PyType_FromModuleAndSpec`, :c:func:`PyType_GetModule` and :c:func:`PyType_GetModuleState` are added to the limited API on Windows. diff --git a/Misc/NEWS.d/3.10.0a5.rst b/Misc/NEWS.d/3.10.0a5.rst index 497e3849171831..dc95e8ce072fd9 100644 --- a/Misc/NEWS.d/3.10.0a5.rst +++ b/Misc/NEWS.d/3.10.0a5.rst @@ -667,4 +667,4 @@ exception (if an exception is set). Patch by Victor Stinner. .. section: C API Fixed a compiler warning in :c:func:`Py_UNICODE_ISSPACE()` on platforms with -signed ``wchar_t``. +signed :c:type:`wchar_t`. diff --git a/Misc/NEWS.d/3.10.0a6.rst b/Misc/NEWS.d/3.10.0a6.rst index 803df6f51ce628..313aa689254040 100644 --- a/Misc/NEWS.d/3.10.0a6.rst +++ b/Misc/NEWS.d/3.10.0a6.rst @@ -294,8 +294,8 @@ actual dictionary. This created problems for introspection tools. .. nonce: SwcSuU .. section: Library -Added :data:`~os.O_EVTONLY`, :data:`~os.O_FSYNC`, :data:`~os.O_SYMLINK` and -:data:`~os.O_NOFOLLOW_ANY` for macOS. Patch by Dong-hee Na. +Added :const:`~os.O_EVTONLY`, :const:`~os.O_FSYNC`, :const:`~os.O_SYMLINK` and +:const:`~os.O_NOFOLLOW_ANY` for macOS. Patch by Dong-hee Na. .. @@ -304,7 +304,7 @@ Added :data:`~os.O_EVTONLY`, :data:`~os.O_FSYNC`, :data:`~os.O_SYMLINK` and .. nonce: a7Dote .. section: Library -Adds :data:`resource.RLIMIT_KQUEUES` constant from FreeBSD to the +Adds :const:`resource.RLIMIT_KQUEUES` constant from FreeBSD to the :mod:`resource` module. .. diff --git a/Misc/NEWS.d/3.10.0a7.rst b/Misc/NEWS.d/3.10.0a7.rst index 286d0a8a7e9190..7933f71b01c14d 100644 --- a/Misc/NEWS.d/3.10.0a7.rst +++ b/Misc/NEWS.d/3.10.0a7.rst @@ -215,8 +215,8 @@ a non-Python signal handler. .. nonce: VouZjn .. section: Core and Builtins -Add ``__match_args__`` to :c:type:`structsequence` based classes. Patch by -Pablo Galindo. +Add ``__match_args__`` to :ref:`struct sequence objects `. +Patch by Pablo Galindo. .. @@ -713,7 +713,7 @@ this situation. Also ensures that the :func:`tempfile.gettempdir()` and .. section: Library Expose ``X509_V_FLAG_ALLOW_PROXY_CERTS`` as -:data:`~ssl.VERIFY_ALLOW_PROXY_CERTS` to allow proxy certificate validation +:const:`~ssl.VERIFY_ALLOW_PROXY_CERTS` to allow proxy certificate validation as explained in https://www.openssl.org/docs/man1.1.1/man7/proxy-certificates.html. diff --git a/Misc/NEWS.d/3.10.0b1.rst b/Misc/NEWS.d/3.10.0b1.rst index f29fc6632db26c..3c71bc73b812a1 100644 --- a/Misc/NEWS.d/3.10.0b1.rst +++ b/Misc/NEWS.d/3.10.0b1.rst @@ -871,7 +871,7 @@ assert_called_once_with) will unconditionally pass. .. nonce: -1XPDH .. section: Library -Add :data:`ssl.OP_IGNORE_UNEXPECTED_EOF` constants (OpenSSL 3.0.0) +Add :const:`ssl.OP_IGNORE_UNEXPECTED_EOF` constants (OpenSSL 3.0.0) .. @@ -1375,8 +1375,8 @@ Add "Annotations Best Practices" document as a new HOWTO. .. nonce: K5aSl1 .. section: Documentation -Document the new :const:`Py_TPFLAGS_MAPPING` and -:const:`Py_TPFLAGS_SEQUENCE` type flags. +Document the new :c:macro:`Py_TPFLAGS_MAPPING` and +:c:macro:`Py_TPFLAGS_SEQUENCE` type flags. .. @@ -1711,7 +1711,7 @@ IDLE's shell now shows prompts in a separate side-bar. .. nonce: wvWt23 .. section: C API -Add a new :c:data:`Py_TPFLAGS_DISALLOW_INSTANTIATION` type flag to disallow +Add a new :c:macro:`Py_TPFLAGS_DISALLOW_INSTANTIATION` type flag to disallow creating type instances. Patch by Victor Stinner. .. @@ -1759,7 +1759,7 @@ module. .. nonce: Co3YhZ .. section: C API -Introduce :const:`Py_TPFLAGS_IMMUTABLETYPE` flag for immutable type objects, +Introduce :c:macro:`Py_TPFLAGS_IMMUTABLETYPE` flag for immutable type objects, and modify :c:func:`PyType_Ready` to set it for static types. Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/3.11.0a1.rst b/Misc/NEWS.d/3.11.0a1.rst index 2f40252344f36a..17ee5138dbf90f 100644 --- a/Misc/NEWS.d/3.11.0a1.rst +++ b/Misc/NEWS.d/3.11.0a1.rst @@ -888,7 +888,7 @@ zlib.decompress on input data that expands that large. .. nonce: YHuV_s .. section: Core and Builtins -Heap types with the :const:`Py_TPFLAGS_IMMUTABLETYPE` flag can now inherit +Heap types with the :c:macro:`Py_TPFLAGS_IMMUTABLETYPE` flag can now inherit the :pep:`590` vectorcall protocol. Previously, this was only possible for :ref:`static types `. Patch by Erlend E. Aasland. @@ -1468,8 +1468,8 @@ an installed expat library <= 2.2.0. On Unix, if the ``sem_clockwait()`` function is available in the C library (glibc 2.30 and newer), the :meth:`threading.Lock.acquire` method now uses -the monotonic clock (:data:`time.CLOCK_MONOTONIC`) for the timeout, rather -than using the system clock (:data:`time.CLOCK_REALTIME`), to not be +the monotonic clock (:const:`time.CLOCK_MONOTONIC`) for the timeout, rather +than using the system clock (:const:`time.CLOCK_REALTIME`), to not be affected by system clock changes. Patch by Victor Stinner. .. @@ -2087,8 +2087,8 @@ Upgrade bundled pip to 21.2.3 and setuptools to 57.4.0 .. section: Library Fix the :func:`os.set_inheritable` function on FreeBSD 14 for file -descriptor opened with the :data:`~os.O_PATH` flag: ignore the -:data:`~errno.EBADF` error on ``ioctl()``, fallback on the ``fcntl()`` +descriptor opened with the :const:`~os.O_PATH` flag: ignore the +:const:`~errno.EBADF` error on ``ioctl()``, fallback on the ``fcntl()`` implementation. Patch by Victor Stinner. .. @@ -2575,7 +2575,7 @@ E. Aasland. .. nonce: bamAGF .. section: Library -Set the proper :const:`Py_TPFLAGS_MAPPING` and :const:`Py_TPFLAGS_SEQUENCE` +Set the proper :c:macro:`Py_TPFLAGS_MAPPING` and :c:macro:`Py_TPFLAGS_SEQUENCE` flags for subclasses created before a parent has been registered as a :class:`collections.abc.Mapping` or :class:`collections.abc.Sequence`. @@ -2693,7 +2693,7 @@ libgcc_s.so file (ex: EMFILE error). Patch by Victor Stinner. .. section: Library The _thread.RLock type now fully implement the GC protocol: add a traverse -function and the :const:`Py_TPFLAGS_HAVE_GC` flag. Patch by Victor Stinner. +function and the :c:macro:`Py_TPFLAGS_HAVE_GC` flag. Patch by Victor Stinner. .. @@ -5014,7 +5014,7 @@ must now be used to set an object type and size. Patch by Victor Stinner. .. section: C API The :c:func:`PyType_Ready` function now raises an error if a type is defined -with the :const:`Py_TPFLAGS_HAVE_GC` flag set but has no traverse function +with the :c:macro:`Py_TPFLAGS_HAVE_GC` flag set but has no traverse function (:c:member:`PyTypeObject.tp_traverse`). Patch by Victor Stinner. .. diff --git a/Misc/NEWS.d/3.11.0a4.rst b/Misc/NEWS.d/3.11.0a4.rst index bcb6e8b7bdde31..3dd335929d655f 100644 --- a/Misc/NEWS.d/3.11.0a4.rst +++ b/Misc/NEWS.d/3.11.0a4.rst @@ -839,7 +839,7 @@ patch by Kumar Aditya. .. nonce: jeiPiX .. section: Library -Added :data:`signal.SIGSTKFLT` on platforms where this signal is defined. +Added :const:`signal.SIGSTKFLT` on platforms where this signal is defined. .. diff --git a/Misc/NEWS.d/3.11.0a6.rst b/Misc/NEWS.d/3.11.0a6.rst index 8621edcfb04bb3..fcec71c6f59da2 100644 --- a/Misc/NEWS.d/3.11.0a6.rst +++ b/Misc/NEWS.d/3.11.0a6.rst @@ -352,7 +352,7 @@ rather than ``JUMP_FORWARD`` with an argument of ``(2**32)+offset``. .. nonce: 3Z_qxd .. section: Core and Builtins -Correct the docstring for the :meth:`__bool__` method. Patch by Jelle +Correct the docstring for the :meth:`~object.__bool__` method. Patch by Jelle Zijlstra. .. diff --git a/Misc/NEWS.d/3.11.0a7.rst b/Misc/NEWS.d/3.11.0a7.rst index d3e59a2195669f..94c15f1c1f5237 100644 --- a/Misc/NEWS.d/3.11.0a7.rst +++ b/Misc/NEWS.d/3.11.0a7.rst @@ -275,7 +275,7 @@ initializing to ``list_extend``. Patch by Jeremiah Pascual. .. nonce: cnaIK3 .. section: Core and Builtins -Speed up throwing exception in generator with :const:`METH_FASTCALL` calling +Speed up throwing exception in generator with :c:macro:`METH_FASTCALL` calling convention. Patch by Kumar Aditya. .. diff --git a/Misc/NEWS.d/3.11.0b1.rst b/Misc/NEWS.d/3.11.0b1.rst index 1338819375bc89..71efc21cbc4a61 100644 --- a/Misc/NEWS.d/3.11.0b1.rst +++ b/Misc/NEWS.d/3.11.0b1.rst @@ -817,8 +817,8 @@ it is ever needed and document the existing mechanism for ``posix_spawn()``. .. nonce: HFtERN .. section: Library -Fix :data:`signal.NSIG` value on FreeBSD to accept signal numbers greater -than 32, like :data:`signal.SIGRTMIN` and :data:`signal.SIGRTMAX`. Patch by +Fix :const:`signal.NSIG` value on FreeBSD to accept signal numbers greater +than 32, like :const:`signal.SIGRTMIN` and :const:`signal.SIGRTMAX`. Patch by Victor Stinner. .. @@ -1799,7 +1799,7 @@ The documentation now lists which members of C structs are part of the .. nonce: FIVe9I .. section: Documentation -All docstrings in code snippets are now wrapped into :func:`PyDoc_STR` to +All docstrings in code snippets are now wrapped into :c:macro:`PyDoc_STR` to follow the guideline of `PEP 7's Documentation Strings paragraph `_. Patch by Oleg Iarygin. diff --git a/Misc/NEWS.d/3.12.0a1.rst b/Misc/NEWS.d/3.12.0a1.rst index d706343adf583c..5178f4055e7b8e 100644 --- a/Misc/NEWS.d/3.12.0a1.rst +++ b/Misc/NEWS.d/3.12.0a1.rst @@ -203,7 +203,7 @@ the interpreter. .. nonce: LYAWlE .. section: Core and Builtins -Bugfix: :func:`PyFunction_GetAnnotations` should return a borrowed +Bugfix: :c:func:`PyFunction_GetAnnotations` should return a borrowed reference. It was returning a new reference. .. @@ -736,7 +736,7 @@ new types. .. nonce: 6eoc8k .. section: Core and Builtins -On WASI :data:`~errno.ENOTCAPABLE` is now mapped to :exc:`PermissionError`. +On WASI :const:`~errno.ENOTCAPABLE` is now mapped to :exc:`PermissionError`. The :mod:`errno` modules exposes the new error number. ``getpath.py`` now ignores :exc:`PermissionError` when it cannot open landmark files ``pybuilddir.txt`` and ``pyenv.cfg``. @@ -2649,7 +2649,7 @@ calling any callbacks. Patch by Kumar Aditya. .. nonce: i807-g .. section: Library -Fail gracefully if :data:`~errno.EPERM` or :data:`~errno.ENOSYS` is raised +Fail gracefully if :const:`~errno.EPERM` or :const:`~errno.ENOSYS` is raised when loading :mod:`!crypt` methods. This may happen when trying to load ``MD5`` on a Linux kernel with :abbr:`FIPS (Federal Information Processing Standard)` enabled. @@ -2698,8 +2698,8 @@ Upgrade bundled pip to 22.2. .. nonce: VT34A5 .. section: Library -Fix check for existence of :data:`os.EFD_CLOEXEC`, :data:`os.EFD_NONBLOCK` -and :data:`os.EFD_SEMAPHORE` flags on older kernel versions where these +Fix check for existence of :const:`os.EFD_CLOEXEC`, :const:`os.EFD_NONBLOCK` +and :const:`os.EFD_SEMAPHORE` flags on older kernel versions where these flags are not present. Patch by Kumar Aditya. .. @@ -2752,7 +2752,7 @@ by Shin-myoung-serp. .. section: Library Add deprecation warning for enum ``member.member`` access (e.g. -``Color.RED.BLUE``). +``Color.RED.BLUE``). Remove ``EnumMeta.__getattr__``. .. @@ -3553,7 +3553,7 @@ Make :class:`multiprocessing.Pool` raise an exception if .. nonce: HY0Uzj .. section: Library -Add :data:`os.PIDFD_NONBLOCK` flag to open a file descriptor for a process +Add :const:`os.PIDFD_NONBLOCK` flag to open a file descriptor for a process with :func:`os.pidfd_open` in non-blocking mode. Patch by Kumar Aditya. .. @@ -4171,7 +4171,7 @@ Add an index_pages parameter to support using non-default index page names. .. nonce: qtT3CE .. section: Library -Drop support for :class:`bytes` on :attr:`sys.path`. +Drop support for :class:`bytes` on :data:`sys.path`. .. @@ -5308,7 +5308,7 @@ parameter. Patch by Kumar Aditya. .. section: Build Python now always use the ``%zu`` and ``%zd`` printf formats to format a -``size_t`` or ``Py_ssize_t`` number. Building Python 3.12 requires a C11 +:c:type:`size_t` or ``Py_ssize_t`` number. Building Python 3.12 requires a C11 compiler, so these printf formats are now always supported. Patch by Victor Stinner. @@ -5856,8 +5856,8 @@ Configuration for the :ref:`integer string conversion length limitation Extensions classes that set ``tp_dictoffset`` and ``tp_weaklistoffset`` lose the support for multiple inheritance, but are now safe. Extension classes -should use :const:`Py_TPFLAGS_MANAGED_DICT` and -:const:`Py_TPFLAGS_MANAGED_WEAKREF` instead. +should use :c:macro:`Py_TPFLAGS_MANAGED_DICT` and +:c:macro:`Py_TPFLAGS_MANAGED_WEAKREF` instead. .. @@ -5898,7 +5898,7 @@ Support C extensions using managed dictionaries by setting the .. nonce: QoDHEu .. section: C API -API for implementing vectorcall (:c:data:`Py_TPFLAGS_HAVE_VECTORCALL`, +API for implementing vectorcall (:c:macro:`Py_TPFLAGS_HAVE_VECTORCALL`, :c:func:`PyVectorcall_NARGS` and :c:func:`PyVectorcall_Call`) was added to the limited API and stable ABI. @@ -5920,12 +5920,12 @@ Philip Georgi. .. nonce: -DdGEy .. section: C API -The :const:`Py_TPFLAGS_HAVE_VECTORCALL` flag is now removed from a class +The :c:macro:`Py_TPFLAGS_HAVE_VECTORCALL` flag is now removed from a class when the class's :py:meth:`~object.__call__` method is reassigned. This makes vectorcall safe to use with mutable types (i.e. heap types without the :const:`immutable ` flag). Mutable types that do not override :c:member:`~PyTypeObject.tp_call` now inherit the -:const:`Py_TPFLAGS_HAVE_VECTORCALL` flag. +:c:macro:`Py_TPFLAGS_HAVE_VECTORCALL` flag. .. @@ -5934,7 +5934,7 @@ not override :c:member:`~PyTypeObject.tp_call` now inherit the .. nonce: aiRSgr .. section: C API -Creating :c:data:`immutable types ` with mutable +Creating :c:macro:`immutable types ` with mutable bases is deprecated and is planned to be disabled in Python 3.14. .. diff --git a/Misc/NEWS.d/3.12.0a2.rst b/Misc/NEWS.d/3.12.0a2.rst index d871384903e7cd..f781e38665a8ea 100644 --- a/Misc/NEWS.d/3.12.0a2.rst +++ b/Misc/NEWS.d/3.12.0a2.rst @@ -279,7 +279,7 @@ Fix source locations of :keyword:`match` sub-patterns. Added the methods :c:func:`PyObject_Vectorcall` and :c:func:`PyObject_VectorcallMethod` to the :ref:`Limited API ` along -with the auxiliary macro constant :const:`PY_VECTORCALL_ARGUMENTS_OFFSET`. +with the auxiliary macro constant :c:macro:`PY_VECTORCALL_ARGUMENTS_OFFSET`. The availability of these functions enables more efficient :PEP:`590` vector calls from binary extension modules that avoid argument boxing/unboxing @@ -397,7 +397,7 @@ longobject.c to speed up some operations. .. nonce: nSGEkG .. section: Core and Builtins -Expose :data:`~socket.ETH_P_ALL` and some of the :ref:`ETHERTYPE_* constants +Expose :const:`~socket.ETH_P_ALL` and some of the :ref:`ETHERTYPE_* constants ` in :mod:`socket`. Patch by Noam Cohen. .. diff --git a/Misc/NEWS.d/3.12.0a3.rst b/Misc/NEWS.d/3.12.0a3.rst index 3d1e43350d136e..3e6f8de5d911f2 100644 --- a/Misc/NEWS.d/3.12.0a3.rst +++ b/Misc/NEWS.d/3.12.0a3.rst @@ -505,7 +505,7 @@ return True from this method; now they correctly return False. .. nonce: ZoOY5G .. section: Library -Add an :data:`~ssl.OP_ENABLE_KTLS` option for enabling the use of the kernel +Add an :const:`~ssl.OP_ENABLE_KTLS` option for enabling the use of the kernel TLS (kTLS). Patch by Illia Volochii. .. diff --git a/Misc/NEWS.d/3.12.0a4.rst b/Misc/NEWS.d/3.12.0a4.rst index dd26d4d964d6b7..8951490f41b94c 100644 --- a/Misc/NEWS.d/3.12.0a4.rst +++ b/Misc/NEWS.d/3.12.0a4.rst @@ -317,7 +317,7 @@ Improve performance of ``list.pop`` for small lists. .. nonce: yP4Na0 .. section: Core and Builtins -Add :data:`ssl.OP_LEGACY_SERVER_CONNECT` +Add :const:`ssl.OP_LEGACY_SERVER_CONNECT` .. @@ -356,7 +356,7 @@ arrays. .. nonce: mHRdQn .. section: Library -Add :data:`socket.IP_PKTINFO` constant. +Add :const:`socket.IP_PKTINFO` constant. .. diff --git a/Misc/NEWS.d/3.12.0a6.rst b/Misc/NEWS.d/3.12.0a6.rst index f6beb5b7ec3dbc..07967028bdee70 100644 --- a/Misc/NEWS.d/3.12.0a6.rst +++ b/Misc/NEWS.d/3.12.0a6.rst @@ -303,7 +303,7 @@ Kim. .. nonce: Vxz0Mr .. section: Library -Add :data:`mmap.MAP_ALIGNED_SUPER` FreeBSD and :data:`mmap.MAP_CONCEAL` +Add :const:`mmap.MAP_ALIGNED_SUPER` FreeBSD and :const:`mmap.MAP_CONCEAL` OpenBSD constants to :mod:`mmap`. Patch by Yeojin Kim. .. diff --git a/Misc/NEWS.d/3.12.0a7.rst b/Misc/NEWS.d/3.12.0a7.rst index 8f078e50823a00..1ef81747558857 100644 --- a/Misc/NEWS.d/3.12.0a7.rst +++ b/Misc/NEWS.d/3.12.0a7.rst @@ -605,7 +605,7 @@ reported unauthenticated EOFs (i.e. without close_notify) as a clean TLS-level EOF. It now raises :exc:`~ssl.SSLEOFError`, matching the behavior in previous versions of OpenSSL. The :attr:`~ssl.SSLContext.options` attribute on :class:`~ssl.SSLContext` also no longer includes -:data:`~ssl.OP_IGNORE_UNEXPECTED_EOF` by default. This option may be set to +:const:`~ssl.OP_IGNORE_UNEXPECTED_EOF` by default. This option may be set to specify the previous OpenSSL 3.0 behavior. .. diff --git a/Misc/NEWS.d/3.12.0b1.rst b/Misc/NEWS.d/3.12.0b1.rst index 96d76f89fa9c23..652b706880fb92 100644 --- a/Misc/NEWS.d/3.12.0b1.rst +++ b/Misc/NEWS.d/3.12.0b1.rst @@ -842,7 +842,7 @@ filesystem case. .. section: Library Improve performance of :meth:`pathlib.Path.glob` by using -:data:`re.IGNORECASE` to implement case-insensitive matching. +:const:`re.IGNORECASE` to implement case-insensitive matching. .. @@ -1205,7 +1205,7 @@ the future, it will raise a ``KeyError``. Fixed a bug where :mod:`pdb` crashes when reading source file with different encoding by replacing :func:`io.open` with :func:`io.open_code`. The new -method would also call into the hook set by :func:`PyFile_SetOpenCodeHook`. +method would also call into the hook set by :c:func:`PyFile_SetOpenCodeHook`. .. @@ -1882,7 +1882,7 @@ both cases. .. nonce: 564i32 .. section: Library -Add :data:`~csv.QUOTE_STRINGS` and :data:`~csv.QUOTE_NOTNULL` to the suite +Add :const:`~csv.QUOTE_STRINGS` and :const:`~csv.QUOTE_NOTNULL` to the suite of :mod:`csv` module quoting styles. .. @@ -1955,7 +1955,7 @@ introduced in :pep:`692`. .. section: Documentation Clarifying documentation about the url parameter to urllib.request.urlopen -and urllib.request.Requst needing to be encoded properly. +and urllib.request.Request needing to be encoded properly. .. @@ -2382,7 +2382,7 @@ Patch by Dong-hee Na. .. section: C API Add support of more formatting options (left aligning, octals, uppercase -hexadecimals, :c:expr:`intmax_t`, :c:expr:`ptrdiff_t`, :c:expr:`wchar_t` C +hexadecimals, :c:type:`intmax_t`, :c:type:`ptrdiff_t`, :c:type:`wchar_t` C strings, variable width and precision) in :c:func:`PyUnicode_FromFormat` and :c:func:`PyUnicode_FromFormatV`. diff --git a/Misc/NEWS.d/3.6.0a1.rst b/Misc/NEWS.d/3.6.0a1.rst index 53f09b3dfe3363..98f1215fb91873 100644 --- a/Misc/NEWS.d/3.6.0a1.rst +++ b/Misc/NEWS.d/3.6.0a1.rst @@ -125,7 +125,7 @@ Setuptools 19.0. .. section: Core and Builtins Memory functions of the :c:func:`PyMem_Malloc` domain -(:c:data:`PYMEM_DOMAIN_MEM`) now use the :ref:`pymalloc allocator +(:c:macro:`PYMEM_DOMAIN_MEM`) now use the :ref:`pymalloc allocator ` rather than system :c:func:`malloc`. Applications calling :c:func:`PyMem_Malloc` without holding the GIL can now crash: use ``PYTHONMALLOC=debug`` environment variable to validate the usage of memory diff --git a/Misc/NEWS.d/3.6.0rc1.rst b/Misc/NEWS.d/3.6.0rc1.rst index 15769f950db239..658f8c902d8704 100644 --- a/Misc/NEWS.d/3.6.0rc1.rst +++ b/Misc/NEWS.d/3.6.0rc1.rst @@ -69,8 +69,8 @@ supported. .. nonce: ilNIWN .. section: Library -Add new :data:`socket.TCP_CONGESTION` (Linux 2.6.13) and -:data:`socket.TCP_USER_TIMEOUT` (Linux 2.6.37) constants. Patch written by +Add new :const:`socket.TCP_CONGESTION` (Linux 2.6.13) and +:const:`socket.TCP_USER_TIMEOUT` (Linux 2.6.37) constants. Patch written by Omar Sandoval. .. diff --git a/Misc/NEWS.d/3.7.0a1.rst b/Misc/NEWS.d/3.7.0a1.rst index ef93454784b77f..712558bf98d018 100644 --- a/Misc/NEWS.d/3.7.0a1.rst +++ b/Misc/NEWS.d/3.7.0a1.rst @@ -3274,7 +3274,7 @@ Added support for bytes paths in os.fwalk(). .. nonce: 37jMwb .. section: Library -Add new :data:`socket.TCP_NOTSENT_LOWAT` (Linux 3.12) constant. Patch by +Add new :const:`socket.TCP_NOTSENT_LOWAT` (Linux 3.12) constant. Patch by Nathaniel J. Smith. .. @@ -3871,8 +3871,8 @@ as an integer. Function only available on Android. .. nonce: ilNIWN .. section: Library -Add new :data:`socket.TCP_CONGESTION` (Linux 2.6.13) and -:data:`socket.TCP_USER_TIMEOUT` (Linux 2.6.37) constants. Patch written by +Add new :const:`socket.TCP_CONGESTION` (Linux 2.6.13) and +:const:`socket.TCP_USER_TIMEOUT` (Linux 2.6.37) constants. Patch written by Omar Sandoval. .. diff --git a/Misc/NEWS.d/3.7.0a3.rst b/Misc/NEWS.d/3.7.0a3.rst index 368efb73567c40..52df0e7e82b080 100644 --- a/Misc/NEWS.d/3.7.0a3.rst +++ b/Misc/NEWS.d/3.7.0a3.rst @@ -754,8 +754,8 @@ now accepts characters as arguments. Based on patch by Steve Fink. .. nonce: DYQL0g .. section: Library -Add 3 new clock identifiers: :data:`time.CLOCK_BOOTTIME`, -:data:`time.CLOCK_PROF` and :data:`time.CLOCK_UPTIME`. +Add 3 new clock identifiers: :const:`time.CLOCK_BOOTTIME`, +:const:`time.CLOCK_PROF` and :const:`time.CLOCK_UPTIME`. .. diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst index 2634832b78a96e..467e992780382f 100644 --- a/Misc/NEWS.d/3.8.0a1.rst +++ b/Misc/NEWS.d/3.8.0a1.rst @@ -1934,7 +1934,7 @@ failure. .. nonce: _ct_0H .. section: Library -The :data:`time.CLOCK_UPTIME_RAW` constant is now available for macOS 10.12. +The :const:`time.CLOCK_UPTIME_RAW` constant is now available for macOS 10.12. .. diff --git a/Misc/NEWS.d/3.8.0a4.rst b/Misc/NEWS.d/3.8.0a4.rst index 9841195210c9e7..524a05a7ae9704 100644 --- a/Misc/NEWS.d/3.8.0a4.rst +++ b/Misc/NEWS.d/3.8.0a4.rst @@ -92,7 +92,7 @@ the field. .. nonce: wejLoC .. section: Core and Builtins -On AIX, :attr:`sys.platform` doesn't contain the major version anymore. +On AIX, :data:`sys.platform` doesn't contain the major version anymore. Always return ``'aix'``, instead of ``'aix3'`` .. ``'aix7'``. Since older Python versions include the version number, it is recommended to always use ``sys.platform.startswith('aix')``. Contributed by M. Felt. @@ -955,7 +955,7 @@ Add a new :mod:`_testinternalcapi` module to test the internal C API. .. section: Tests Fix ``test_imap4_host_default_value()`` of ``test_imaplib``: catch also -:data:`errno.ENETUNREACH` error. +:const:`errno.ENETUNREACH` error. .. diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst index 5ae14293df3e33..43676836478c91 100644 --- a/Misc/NEWS.d/3.9.0a1.rst +++ b/Misc/NEWS.d/3.9.0a1.rst @@ -1164,7 +1164,7 @@ defines them with eponymous methods. .. nonce: bmhquU .. section: Library -Add :data:`os.P_PIDFD` constant, which may be passed to :func:`os.waitid` to +Add :const:`os.P_PIDFD` constant, which may be passed to :func:`os.waitid` to wait on a Linux process file descriptor. .. @@ -1193,8 +1193,8 @@ Expose the Linux ``pidfd_open`` syscall as :func:`os.pidfd_open`. .. nonce: 7jvYFA .. section: Library -Added constants :data:`~fcntl.F_OFD_GETLK`, :data:`~fcntl.F_OFD_SETLK` and -:data:`~fcntl.F_OFD_SETLKW` to the :mod:`fcntl` module. Patch by Dong-hee +Added constants :const:`~fcntl.F_OFD_GETLK`, :const:`~fcntl.F_OFD_SETLK` and +:const:`~fcntl.F_OFD_SETLKW` to the :mod:`fcntl` module. Patch by Dong-hee Na. .. @@ -1283,7 +1283,7 @@ Fixed erroneous equality comparison in statistics.NormalDist(). .. nonce: 86ExWB .. section: Library -Added :data:`~os.CLD_KILLED` and :data:`~os.CLD_STOPPED` for +Added :const:`~os.CLD_KILLED` and :const:`~os.CLD_STOPPED` for :attr:`si_code`. Patch by Dong-hee Na. .. @@ -1355,8 +1355,8 @@ objects, patch by Samuel Colvin. .. nonce: 9w-IGF .. section: Library -Add missing :data:`stat.S_IFDOOR`, :data:`stat.S_IFPORT`, -:data:`stat.S_IFWHT`, :func:`stat.S_ISDOOR`, :func:`stat.S_ISPORT`, and +Add missing :const:`stat.S_IFDOOR`, :const:`stat.S_IFPORT`, +:const:`stat.S_IFWHT`, :func:`stat.S_ISDOOR`, :func:`stat.S_ISPORT`, and :func:`stat.S_ISWHT` values to the Python implementation of :mod:`stat`. .. @@ -4983,7 +4983,7 @@ set to CP_UTF7 or CP_UTF8. .. nonce: -0g2O3 .. section: Windows -Make :data:`winreg.REG_MULTI_SZ` support zero-length strings. +Make :const:`winreg.REG_MULTI_SZ` support zero-length strings. .. @@ -5686,7 +5686,7 @@ positional argument. .. nonce: zrmgki .. section: C API -Add :func:`PyConfig_SetWideStringList` function. +Add :c:func:`PyConfig_SetWideStringList` function. .. @@ -5706,7 +5706,7 @@ and :c:func:`_PyObject_CallMethodOneArg`. .. nonce: qZC0N_ .. section: C API -The :const:`METH_FASTCALL` calling convention has been documented. +The :c:macro:`METH_FASTCALL` calling convention has been documented. .. diff --git a/Misc/NEWS.d/3.9.0a5.rst b/Misc/NEWS.d/3.9.0a5.rst index 25342d21d8f0b1..8a1219501e81bf 100644 --- a/Misc/NEWS.d/3.9.0a5.rst +++ b/Misc/NEWS.d/3.9.0a5.rst @@ -582,7 +582,7 @@ Fix :mod:`json.tool` to catch :exc:`BrokenPipeError`. Patch by Dong-hee Na. Avoid a possible *"RuntimeError: dictionary changed size during iteration"* from :func:`inspect.getmodule` when it tried to loop through -:attr:`sys.modules`. +:data:`sys.modules`. .. @@ -989,7 +989,7 @@ modules are built. Add ``--with-platlibdir`` option to the configure script: name of the platform-specific library directory, stored in the new -:attr:`sys.platlibdir` attribute. It is used to build the path of +:data:`sys.platlibdir` attribute. It is used to build the path of platform-specific extension modules and the path of the standard library. It is equal to ``"lib"`` on most platforms. On Fedora and SuSE, it is equal to ``"lib64"`` on 64-bit platforms. Patch by Jan Matějek, Matěj Cepl, diff --git a/Misc/NEWS.d/3.9.0a6.rst b/Misc/NEWS.d/3.9.0a6.rst index 9594964917f390..519c7f833ebcb8 100644 --- a/Misc/NEWS.d/3.9.0a6.rst +++ b/Misc/NEWS.d/3.9.0a6.rst @@ -680,7 +680,7 @@ child process, reset the lock to the unlocked state. Rename also the private .. nonce: kIjVge .. section: Library -Expose :data:`~socket.CAN_RAW_JOIN_FILTERS` in the :mod:`socket` module. +Expose :const:`~socket.CAN_RAW_JOIN_FILTERS` in the :mod:`socket` module. .. @@ -735,7 +735,7 @@ number of groups. For other implementations, double the group list size. .. nonce: HFpHZS .. section: Library -Add :data:`time.CLOCK_TAI` constant if the operating system support it. +Add :const:`time.CLOCK_TAI` constant if the operating system support it. .. diff --git a/Misc/NEWS.d/next/Build/2023-07-23-00-38-51.gh-issue-106962.VVYrWB.rst b/Misc/NEWS.d/next/Build/2023-07-23-00-38-51.gh-issue-106962.VVYrWB.rst new file mode 100644 index 00000000000000..32e196fe26d3b7 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2023-07-23-00-38-51.gh-issue-106962.VVYrWB.rst @@ -0,0 +1 @@ +Detect MPI compilers in :file:`configure`. diff --git a/Misc/NEWS.d/next/Build/2023-07-28-18-17-33.gh-issue-106881.U3Ezdq.rst b/Misc/NEWS.d/next/Build/2023-07-28-18-17-33.gh-issue-106881.U3Ezdq.rst new file mode 100644 index 00000000000000..40b2609e95c7e9 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2023-07-28-18-17-33.gh-issue-106881.U3Ezdq.rst @@ -0,0 +1 @@ +Check for `linux/limits.h` before including it in `Modules/posixmodule.c`. diff --git a/Misc/NEWS.d/next/Build/2023-08-01-17-12-53.gh-issue-105481.42nsDE.rst b/Misc/NEWS.d/next/Build/2023-08-01-17-12-53.gh-issue-105481.42nsDE.rst new file mode 100644 index 00000000000000..1e61c37b609469 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2023-08-01-17-12-53.gh-issue-105481.42nsDE.rst @@ -0,0 +1 @@ +Remove the make target ``regen-opcode-targets``, merge its work into ``regen-opcode`` which repeats most of the calculation. This simplifies the code for the build and reduces code duplication. diff --git a/Misc/NEWS.d/next/Build/2023-08-09-17-05-33.gh-issue-107814.c0Oapq.rst b/Misc/NEWS.d/next/Build/2023-08-09-17-05-33.gh-issue-107814.c0Oapq.rst new file mode 100644 index 00000000000000..d3723353470ce2 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2023-08-09-17-05-33.gh-issue-107814.c0Oapq.rst @@ -0,0 +1 @@ +When calling ``find_python.bat`` with ``-q`` it did not properly silence the output of nuget. That is now fixed. diff --git a/Misc/NEWS.d/next/C API/2020-11-11-22-36-29.bpo-42327.ODSZBM.rst b/Misc/NEWS.d/next/C API/2020-11-11-22-36-29.bpo-42327.ODSZBM.rst index 3d935aceb57a79..bcea7a1f9825b1 100644 --- a/Misc/NEWS.d/next/C API/2020-11-11-22-36-29.bpo-42327.ODSZBM.rst +++ b/Misc/NEWS.d/next/C API/2020-11-11-22-36-29.bpo-42327.ODSZBM.rst @@ -1 +1 @@ -Add :func:`PyModule_Add` function: similar to :c:func:`PyModule_AddObjectRef` and :c:func:`PyModule_AddObject`, but always steals a reference to the value. +Add :c:func:`PyModule_Add` function: similar to :c:func:`PyModule_AddObjectRef` and :c:func:`PyModule_AddObject`, but always steals a reference to the value. diff --git a/Misc/NEWS.d/next/C API/2023-05-31-18-37-57.gh-issue-105156.R4El5V.rst b/Misc/NEWS.d/next/C API/2023-05-31-18-37-57.gh-issue-105156.R4El5V.rst index cbdb8379f24ccd..536e484116690d 100644 --- a/Misc/NEWS.d/next/C API/2023-05-31-18-37-57.gh-issue-105156.R4El5V.rst +++ b/Misc/NEWS.d/next/C API/2023-05-31-18-37-57.gh-issue-105156.R4El5V.rst @@ -1,4 +1,4 @@ Deprecate the old ``Py_UNICODE`` and ``PY_UNICODE_TYPE`` types: use directly -the ``wchar_t`` type instead. Since Python 3.3, ``Py_UNICODE`` and -``PY_UNICODE_TYPE`` are just aliases to ``wchar_t``. Patch by Victor +the :c:type:`wchar_t` type instead. Since Python 3.3, ``Py_UNICODE`` and +``PY_UNICODE_TYPE`` are just aliases to :c:type:`wchar_t`. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2023-06-23-02-57-15.gh-issue-106004.-OToh6.rst b/Misc/NEWS.d/next/C API/2023-06-23-02-57-15.gh-issue-106004.-OToh6.rst new file mode 100644 index 00000000000000..c7a006b2bc0759 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-06-23-02-57-15.gh-issue-106004.-OToh6.rst @@ -0,0 +1,4 @@ +Adds :c:func:`PyDict_GetItemRef` and :c:func:`PyDict_GetItemStringRef` +functions: similar to :c:func:`PyDict_GetItemWithError` but returning a +:term:`strong reference` instead of a :term:`borrowed reference`. Patch by +Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2023-07-22-14-40-48.gh-issue-106320.H3u7x4.rst b/Misc/NEWS.d/next/C API/2023-07-22-14-40-48.gh-issue-106320.H3u7x4.rst new file mode 100644 index 00000000000000..1e0ba0d71e7555 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-07-22-14-40-48.gh-issue-106320.H3u7x4.rst @@ -0,0 +1,5 @@ +Remove private ``_PyUnicode_AsString()`` alias to +:c:func:`PyUnicode_AsUTF8`. It was kept for backward compatibility with +Python 3.0 - 3.2. The :c:func:`PyUnicode_AsUTF8` is available since Python +3.3. The :c:func:`PyUnicode_AsUTF8String` function can be used to keep +compatibility with Python 3.2 and older. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2023-07-25-13-41-09.gh-issue-107226.N919zH.rst b/Misc/NEWS.d/next/C API/2023-07-25-13-41-09.gh-issue-107226.N919zH.rst new file mode 100644 index 00000000000000..6178f18517d48f --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-07-25-13-41-09.gh-issue-107226.N919zH.rst @@ -0,0 +1,2 @@ +:c:func:`PyModule_AddObjectRef` is now only available in the limited API +version 3.10 or later. diff --git a/Misc/NEWS.d/next/C API/2023-07-25-17-23-08.gh-issue-107249.xqk2ke.rst b/Misc/NEWS.d/next/C API/2023-07-25-17-23-08.gh-issue-107249.xqk2ke.rst new file mode 100644 index 00000000000000..a7139024329fae --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-07-25-17-23-08.gh-issue-107249.xqk2ke.rst @@ -0,0 +1,2 @@ +Implement the :c:macro:`Py_UNUSED` macro for Windows MSVC compiler. Patch by +Victor Stinner. diff --git a/Misc/NEWS.d/next/C API/2023-08-10-11-12-25.gh-issue-107810.oJ40Qx.rst b/Misc/NEWS.d/next/C API/2023-08-10-11-12-25.gh-issue-107810.oJ40Qx.rst new file mode 100644 index 00000000000000..c8a1f6d122b61b --- /dev/null +++ b/Misc/NEWS.d/next/C API/2023-08-10-11-12-25.gh-issue-107810.oJ40Qx.rst @@ -0,0 +1 @@ +Improve :exc:`DeprecationWarning` for uses of :c:type:`PyType_Spec` with metaclasses that have custom ``tp_new``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-04-04-00-40-04.gh-issue-96663.PdR9hK.rst b/Misc/NEWS.d/next/Core and Builtins/2023-04-04-00-40-04.gh-issue-96663.PdR9hK.rst new file mode 100644 index 00000000000000..cb806b5ea7a9f3 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-04-04-00-40-04.gh-issue-96663.PdR9hK.rst @@ -0,0 +1 @@ +Add a better, more introspect-able error message when setting attributes on classes without a ``__dict__`` and no slot member for the attribute. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-05-30-08-09-43.gh-issue-105035.OWUlHy.rst b/Misc/NEWS.d/next/Core and Builtins/2023-05-30-08-09-43.gh-issue-105035.OWUlHy.rst index c0ee2da9d45037..dbfcd658d945d4 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2023-05-30-08-09-43.gh-issue-105035.OWUlHy.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2023-05-30-08-09-43.gh-issue-105035.OWUlHy.rst @@ -1,2 +1,2 @@ -Fix :func:`super` calls on types with custom :attr:`tp_getattro` +Fix :func:`super` calls on types with custom :c:member:`~PyTypeObject.tp_getattro` implementation (e.g. meta-types.) diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-13-14-55-45.gh-issue-106723.KsMufQ.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-13-14-55-45.gh-issue-106723.KsMufQ.rst new file mode 100644 index 00000000000000..207f397f17d3f3 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-13-14-55-45.gh-issue-106723.KsMufQ.rst @@ -0,0 +1 @@ +Propagate ``frozen_modules`` to multiprocessing spawned process interpreters. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-16-07-55-19.gh-issue-106485.wPb1bH.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-16-07-55-19.gh-issue-106485.wPb1bH.rst new file mode 100644 index 00000000000000..1f80082821edac --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-16-07-55-19.gh-issue-106485.wPb1bH.rst @@ -0,0 +1,2 @@ +Reduce the number of materialized instances dictionaries by dematerializing +them when possible. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-18-16-13-51.gh-issue-106092.bObgRM.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-18-16-13-51.gh-issue-106092.bObgRM.rst new file mode 100644 index 00000000000000..7fb5b45c763e45 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-18-16-13-51.gh-issue-106092.bObgRM.rst @@ -0,0 +1,2 @@ +Fix a segmentation fault caused by a use-after-free bug in ``frame_dealloc`` +when the trashcan delays the deallocation of a ``PyFrameObject``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-20-01-15-58.gh-issue-106908.cDmcVI.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-20-01-15-58.gh-issue-106908.cDmcVI.rst new file mode 100644 index 00000000000000..9c9b84599cb551 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-20-01-15-58.gh-issue-106908.cDmcVI.rst @@ -0,0 +1,3 @@ +Fix various hangs, reference leaks, test failures, and tracing/introspection +bugs when running with :envvar:`PYTHONUOPS` or :option:`-X uops <-X>` +enabled. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-20-12-21-37.gh-issue-105699.08ywGV.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-20-12-21-37.gh-issue-105699.08ywGV.rst new file mode 100644 index 00000000000000..82312718cd047e --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-20-12-21-37.gh-issue-105699.08ywGV.rst @@ -0,0 +1,4 @@ +Python no longer crashes due to an infrequent race in setting +``Py_FileSystemDefaultEncoding`` and ``Py_FileSystemDefaultEncodeErrors`` +(both deprecated), when simultaneously initializing two isolated +subinterpreters. Now they are only set during runtime initialization. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-20-15-15-57.gh-issue-105699.DdqHFg.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-20-15-15-57.gh-issue-105699.DdqHFg.rst new file mode 100644 index 00000000000000..4a257c6282220f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-20-15-15-57.gh-issue-105699.DdqHFg.rst @@ -0,0 +1,3 @@ +Python no longer crashes due an infrequent race when initialzing +per-interpreter interned strings. The crash would manifest when the +interpreter was finalized. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-21-14-37-48.gh-issue-106917.1jWp_m.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-21-14-37-48.gh-issue-106917.1jWp_m.rst new file mode 100644 index 00000000000000..82c74d5465458a --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-21-14-37-48.gh-issue-106917.1jWp_m.rst @@ -0,0 +1,4 @@ +Fix classmethod-style :func:`super` method calls (i.e., where the second +argument to :func:`super`, or the implied second argument drawn from +``self/cls`` in the case of zero-arg super, is a type) when the target of +the call is not a classmethod. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-22-14-35-38.gh-issue-107015.Ghp58t.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-22-14-35-38.gh-issue-107015.Ghp58t.rst new file mode 100644 index 00000000000000..77618a5bd50f2a --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-22-14-35-38.gh-issue-107015.Ghp58t.rst @@ -0,0 +1,3 @@ +The ASYNC and AWAIT tokens are removed from the Grammar, which removes the +posibility of making ``async`` and ``await`` soft keywords when using +``feature_version<7`` in :func:`ast.parse`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-23-13-07-34.gh-issue-107122.9HFUyb.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-23-13-07-34.gh-issue-107122.9HFUyb.rst new file mode 100644 index 00000000000000..64ac8ac6df09b8 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-23-13-07-34.gh-issue-107122.9HFUyb.rst @@ -0,0 +1 @@ +Add :meth:`dbm.gnu.gdbm.clear` to :mod:`dbm.gnu`. Patch By Dong-hee Na. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-23-21-16-54.gh-issue-107122.VNuNcq.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-23-21-16-54.gh-issue-107122.VNuNcq.rst new file mode 100644 index 00000000000000..5b7cc98ddc6414 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-23-21-16-54.gh-issue-107122.VNuNcq.rst @@ -0,0 +1 @@ +Add :meth:`dbm.ndbm.ndbm.clear` to :mod:`dbm.ndbm`. Patch By Dong-hee Na. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-24-11-11-41.gh-issue-104621.vM8Y_l.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-24-11-11-41.gh-issue-104621.vM8Y_l.rst new file mode 100644 index 00000000000000..86c976295f2620 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-24-11-11-41.gh-issue-104621.vM8Y_l.rst @@ -0,0 +1 @@ +Unsupported modules now always fail to be imported. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-25-15-29-26.gh-issue-106931.kKU1le.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-25-15-29-26.gh-issue-106931.kKU1le.rst new file mode 100644 index 00000000000000..e0def5331b6c82 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-25-15-29-26.gh-issue-106931.kKU1le.rst @@ -0,0 +1,3 @@ +Statically allocated string objects are now interned globally instead of +per-interpreter. This fixes a situation where such a string would only be +interned in a single interpreter. Normal string objects are unaffected. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-26-12-18-10.gh-issue-106897.EsGurc.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-26-12-18-10.gh-issue-106897.EsGurc.rst new file mode 100644 index 00000000000000..d787dc4aad2d29 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-26-12-18-10.gh-issue-106897.EsGurc.rst @@ -0,0 +1,3 @@ +Add a ``RERAISE`` event to ``sys.monitoring``, which occurs when an +exception is reraise, either explicitly by a plain ``raise`` statement, or +implicitly in an ``except`` or ``finally`` block. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-26-18-53-34.gh-issue-106895.DdEwV8.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-26-18-53-34.gh-issue-106895.DdEwV8.rst new file mode 100644 index 00000000000000..370a29d34c860a --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-26-18-53-34.gh-issue-106895.DdEwV8.rst @@ -0,0 +1,2 @@ +Raise a ``ValueError`` when a monitoring callback funtion returns +``DISABLE`` for events that cannot be disabled locally. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-26-21-28-06.gh-issue-106898.8Wjuiv.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-26-21-28-06.gh-issue-106898.8Wjuiv.rst new file mode 100644 index 00000000000000..f1b1c4c64b4aca --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-26-21-28-06.gh-issue-106898.8Wjuiv.rst @@ -0,0 +1,3 @@ +Add the exception as the third argument to ``PY_UNIND`` callbacks in +``sys.monitoring``. This makes the ``PY_UNWIND`` callback consistent with +the other exception hanlding callbacks. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-27-11-18-04.gh-issue-106078.WEy2Yn.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-27-11-18-04.gh-issue-106078.WEy2Yn.rst new file mode 100644 index 00000000000000..f5a0e539e5d05f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-27-11-18-04.gh-issue-106078.WEy2Yn.rst @@ -0,0 +1 @@ +Isolate :mod:`!_decimal` (apply :pep:`687`). Patch by Charlie Zhao. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-27-11-47-29.gh-issue-104432.oGHF-z.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-27-11-47-29.gh-issue-104432.oGHF-z.rst new file mode 100644 index 00000000000000..e47927b4e11886 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-27-11-47-29.gh-issue-104432.oGHF-z.rst @@ -0,0 +1,4 @@ +Fix potential unaligned memory access on C APIs involving returned sequences +of `char *` pointers within the :mod:`grp` and :mod:`socket` modules. These +were revealed using a ``-fsaniziter=alignment`` build on ARM macOS. Patch by +Christopher Chavez. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-29-22-01-30.gh-issue-104584.tINuoA.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-29-22-01-30.gh-issue-104584.tINuoA.rst new file mode 100644 index 00000000000000..059524831597b7 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-29-22-01-30.gh-issue-104584.tINuoA.rst @@ -0,0 +1,2 @@ +Fix an issue which caused incorrect inline caches to be read when running +with :envvar:`PYTHONUOPS` or :option:`-X uops <-X>` enabled. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-30-05-20-16.gh-issue-107263.q0IU2M.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-30-05-20-16.gh-issue-107263.q0IU2M.rst new file mode 100644 index 00000000000000..fb0940b456dae5 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-30-05-20-16.gh-issue-107263.q0IU2M.rst @@ -0,0 +1,3 @@ +Increase C recursion limit for functions other than the main interpreter +from 800 to 1500. This should allow functions like ``list.__repr__`` and +``json.dumps`` to handle all the inputs that they could prior to 3.12 diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-30-14-18-49.gh-issue-107455.Es53l7.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-30-14-18-49.gh-issue-107455.Es53l7.rst new file mode 100644 index 00000000000000..84a93251e799d5 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-30-14-18-49.gh-issue-107455.Es53l7.rst @@ -0,0 +1,3 @@ +Improve error messages when converting an incompatible type to +:class:`ctypes.c_char_p`, :class:`ctypes.c_wchar_p` and +:class:`ctypes.c_void_p`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-07-30-18-05-11.gh-issue-100964.HluhBJ.rst b/Misc/NEWS.d/next/Core and Builtins/2023-07-30-18-05-11.gh-issue-100964.HluhBJ.rst new file mode 100644 index 00000000000000..99ebc926e2ce2d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-07-30-18-05-11.gh-issue-100964.HluhBJ.rst @@ -0,0 +1,2 @@ +Clear generators' exception state after ``return`` to break reference +cycles. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-08-01-09-41-36.gh-issue-106608.OFZogw.rst b/Misc/NEWS.d/next/Core and Builtins/2023-08-01-09-41-36.gh-issue-106608.OFZogw.rst new file mode 100644 index 00000000000000..20d43a7c4f754a --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-08-01-09-41-36.gh-issue-106608.OFZogw.rst @@ -0,0 +1 @@ +Make ``_PyUOpExecutorObject`` variable length. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-08-02-12-24-51.gh-issue-107080.PNolFU.rst b/Misc/NEWS.d/next/Core and Builtins/2023-08-02-12-24-51.gh-issue-107080.PNolFU.rst new file mode 100644 index 00000000000000..5084c854360e35 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-08-02-12-24-51.gh-issue-107080.PNolFU.rst @@ -0,0 +1,4 @@ +Trace refs builds (``--with-trace-refs``) were crashing when used with +isolated subinterpreters. The problematic global state has been isolated to +each interpreter. Other fixing the crashes, this change does not affect +users. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-08-03-11-13-09.gh-issue-107596.T3yPGI.rst b/Misc/NEWS.d/next/Core and Builtins/2023-08-03-11-13-09.gh-issue-107596.T3yPGI.rst new file mode 100644 index 00000000000000..8912de73680b44 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-08-03-11-13-09.gh-issue-107596.T3yPGI.rst @@ -0,0 +1 @@ +Specialize subscripting :class:`str` objects by :class:`int` indexes. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-08-03-13-38-14.gh-issue-84436.gl1wHx.rst b/Misc/NEWS.d/next/Core and Builtins/2023-08-03-13-38-14.gh-issue-84436.gl1wHx.rst new file mode 100644 index 00000000000000..71044c32feebcc --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-08-03-13-38-14.gh-issue-84436.gl1wHx.rst @@ -0,0 +1 @@ +Skip reference count modifications for many known immortal objects. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-08-04-21-25-26.gh-issue-107724.EbBXMr.rst b/Misc/NEWS.d/next/Core and Builtins/2023-08-04-21-25-26.gh-issue-107724.EbBXMr.rst new file mode 100644 index 00000000000000..6e853cf72a3348 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-08-04-21-25-26.gh-issue-107724.EbBXMr.rst @@ -0,0 +1,3 @@ +In pre-release versions of 3.12, up to rc1, the sys.monitoring callback +function for the ``PY_THROW`` event was missing the third, exception +argument. That is now fixed. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-08-05-04-47-18.gh-issue-107674.0sYhR2.rst b/Misc/NEWS.d/next/Core and Builtins/2023-08-05-04-47-18.gh-issue-107674.0sYhR2.rst new file mode 100644 index 00000000000000..acfbf1fa2adf2c --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-08-05-04-47-18.gh-issue-107674.0sYhR2.rst @@ -0,0 +1 @@ +Fixed performance regression in ``sys.settrace``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-08-05-09-06-56.gh-issue-105848.Drc-1-.rst b/Misc/NEWS.d/next/Core and Builtins/2023-08-05-09-06-56.gh-issue-105848.Drc-1-.rst new file mode 100644 index 00000000000000..6c1c3229475f6f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-08-05-09-06-56.gh-issue-105848.Drc-1-.rst @@ -0,0 +1,3 @@ +Modify the bytecode so that the actual callable for a :opcode:`CALL` is at a +consistent position on the stack (regardless of whether or not +bound-method-calling optimizations are active). diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-08-05-15-45-07.gh-issue-107659.QgtQ5M.rst b/Misc/NEWS.d/next/Core and Builtins/2023-08-05-15-45-07.gh-issue-107659.QgtQ5M.rst new file mode 100644 index 00000000000000..31cc6982400d5d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-08-05-15-45-07.gh-issue-107659.QgtQ5M.rst @@ -0,0 +1 @@ +Add docstrings for :func:`ctypes.pointer` and :func:`ctypes.POINTER`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-08-08-02-46-46.gh-issue-107758.R5kyBI.rst b/Misc/NEWS.d/next/Core and Builtins/2023-08-08-02-46-46.gh-issue-107758.R5kyBI.rst new file mode 100644 index 00000000000000..192f1df26e613e --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-08-08-02-46-46.gh-issue-107758.R5kyBI.rst @@ -0,0 +1 @@ +Make the ``dump_stack()`` routine used by the ``lltrace`` feature (low-level interpreter debugging) robust against recursion by ensuring that it never calls a ``__repr__`` method implemented in Python. Also make the similar output for Tier-2 uops appear on ``stdout`` (instead of ``stderr``), to match the ``lltrace`` code in ceval.c. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-08-09-08-31-20.gh-issue-84805.7JRWua.rst b/Misc/NEWS.d/next/Core and Builtins/2023-08-09-08-31-20.gh-issue-84805.7JRWua.rst new file mode 100644 index 00000000000000..23dfba989fa552 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-08-09-08-31-20.gh-issue-84805.7JRWua.rst @@ -0,0 +1,2 @@ +Autogenerate signature for :c:macro:`METH_NOARGS` and :c:macro:`METH_O` +extension functions. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-08-10-17-36-27.gh-issue-91051.LfaeNW.rst b/Misc/NEWS.d/next/Core and Builtins/2023-08-10-17-36-27.gh-issue-91051.LfaeNW.rst new file mode 100644 index 00000000000000..b4b90ad4ea0ecc --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-08-10-17-36-27.gh-issue-91051.LfaeNW.rst @@ -0,0 +1,2 @@ +Fix abort / segfault when using all eight type watcher slots, on platforms +where ``char`` is signed by default. diff --git a/Misc/NEWS.d/next/Documentation/2023-05-16-22-08-24.gh-issue-54738.mJvCnj.rst b/Misc/NEWS.d/next/Documentation/2023-05-16-22-08-24.gh-issue-54738.mJvCnj.rst new file mode 100644 index 00000000000000..4da58fc982b6d7 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2023-05-16-22-08-24.gh-issue-54738.mJvCnj.rst @@ -0,0 +1 @@ +Add documentation on how to localize the :mod:`argparse` module. diff --git a/Misc/NEWS.d/next/Documentation/2023-07-21-11-51-57.gh-issue-106948.K_JQ7j.rst b/Misc/NEWS.d/next/Documentation/2023-07-21-11-51-57.gh-issue-106948.K_JQ7j.rst new file mode 100644 index 00000000000000..42b6348153b56a --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2023-07-21-11-51-57.gh-issue-106948.K_JQ7j.rst @@ -0,0 +1 @@ +Add a number of standard external names to ``nitpick_ignore``. diff --git a/Misc/NEWS.d/next/Documentation/2023-07-22-15-14-13.gh-issue-107008.3JQ1Vt.rst b/Misc/NEWS.d/next/Documentation/2023-07-22-15-14-13.gh-issue-107008.3JQ1Vt.rst new file mode 100644 index 00000000000000..a0fa27ec10303e --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2023-07-22-15-14-13.gh-issue-107008.3JQ1Vt.rst @@ -0,0 +1,2 @@ +Document the :mod:`curses` module variables :const:`~curses.LINES` and +:const:`~curses.COLS`. diff --git a/Misc/NEWS.d/next/Documentation/2023-07-26-16-33-04.gh-issue-107305.qB2LS4.rst b/Misc/NEWS.d/next/Documentation/2023-07-26-16-33-04.gh-issue-107305.qB2LS4.rst new file mode 100644 index 00000000000000..038f9e68a5422a --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2023-07-26-16-33-04.gh-issue-107305.qB2LS4.rst @@ -0,0 +1,3 @@ +Add documentation for :c:type:`PyInterpreterConfig` and +:c:func:`Py_NewInterpreterFromConfig`. Also clarify some of the nearby docs +relative to per-interpreter GIL. diff --git a/Misc/NEWS.d/next/Library/2020-05-03-00-33-15.bpo-18319.faPTlx.rst b/Misc/NEWS.d/next/Library/2020-05-03-00-33-15.bpo-18319.faPTlx.rst new file mode 100644 index 00000000000000..a1a4cf6d63725a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-03-00-33-15.bpo-18319.faPTlx.rst @@ -0,0 +1,2 @@ +Ensure `gettext(msg)` retrieve translations even if a plural form exists. In +other words: `gettext(msg) == ngettext(msg, '', 1)`. diff --git a/Misc/NEWS.d/next/Library/2020-11-10-07-04-15.bpo-40988.5kBC-O.rst b/Misc/NEWS.d/next/Library/2020-11-10-07-04-15.bpo-40988.5kBC-O.rst new file mode 100644 index 00000000000000..9323d93c59b05a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-11-10-07-04-15.bpo-40988.5kBC-O.rst @@ -0,0 +1,3 @@ +Improve performance of :class:`functools.singledispatchmethod` by caching the +generated dispatch wrapper. Optimization suggested by frederico. Patch by +@mental32, Alex Waygood and Pieter Eendebak. diff --git a/Misc/NEWS.d/next/Library/2021-08-16-17-52-26.bpo-44850.r8jx5u.rst b/Misc/NEWS.d/next/Library/2021-08-16-17-52-26.bpo-44850.r8jx5u.rst new file mode 100644 index 00000000000000..1fe5497f856e90 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2021-08-16-17-52-26.bpo-44850.r8jx5u.rst @@ -0,0 +1,2 @@ +Improve performance of :func:`operator.methodcaller` using the :pep:`590` ``vectorcall`` convention. +Patch by Anthony Lee and Pieter Eendebak. diff --git a/Misc/NEWS.d/next/Library/2021-10-31-16-06-28.bpo-43633.vflwXv.rst b/Misc/NEWS.d/next/Library/2021-10-31-16-06-28.bpo-43633.vflwXv.rst new file mode 100644 index 00000000000000..025de1e1a7d6ef --- /dev/null +++ b/Misc/NEWS.d/next/Library/2021-10-31-16-06-28.bpo-43633.vflwXv.rst @@ -0,0 +1 @@ +Improve the textual representation of IPv4-mapped IPv6 addresses (:rfc:`4291` Sections 2.2, 2.5.5.2) in :mod:`ipaddress`. Patch by Oleksandr Pavliuk. diff --git a/Misc/NEWS.d/next/Library/2023-04-08-12-43-52.gh-issue-101162.yOCd_J.rst b/Misc/NEWS.d/next/Library/2023-04-08-12-43-52.gh-issue-101162.yOCd_J.rst new file mode 100644 index 00000000000000..e9fadc8f436d9b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-04-08-12-43-52.gh-issue-101162.yOCd_J.rst @@ -0,0 +1,2 @@ +Forbid using :func:`builtins.issubclass` with :class:`types.GenericAlias` as +the first argument. diff --git a/Misc/NEWS.d/next/Library/2023-06-07-00-13-00.gh-issue-70303.frwUKH.rst b/Misc/NEWS.d/next/Library/2023-06-07-00-13-00.gh-issue-70303.frwUKH.rst new file mode 100644 index 00000000000000..39a891ac5964ab --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-06-07-00-13-00.gh-issue-70303.frwUKH.rst @@ -0,0 +1,4 @@ +Emit :exc:`FutureWarning` from :meth:`pathlib.Path.glob` and +:meth:`~pathlib.Path.rglob` if the given pattern ends with "``**``". In a +future Python release, patterns with this ending will match both files and +directories. Add a trailing slash to only match directories. diff --git a/Misc/NEWS.d/next/Library/2023-06-30-16-42-44.gh-issue-106263.tk-t93.rst b/Misc/NEWS.d/next/Library/2023-06-30-16-42-44.gh-issue-106263.tk-t93.rst new file mode 100644 index 00000000000000..23763818d84ba5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-06-30-16-42-44.gh-issue-106263.tk-t93.rst @@ -0,0 +1,2 @@ +Fix crash when calling ``repr`` with a manually constructed SignalDict object. +Patch by Charlie Zhao. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2023-07-03-03-46-20.gh-issue-106350.LLcTEe.rst b/Misc/NEWS.d/next/Library/2023-07-03-03-46-20.gh-issue-106350.LLcTEe.rst new file mode 100644 index 00000000000000..681d63a6668be8 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-03-03-46-20.gh-issue-106350.LLcTEe.rst @@ -0,0 +1,2 @@ +Detect possible memory allocation failure in the libtommath function :c:func:`mp_init` +used by the ``_tkinter`` module. diff --git a/Misc/NEWS.d/next/Library/2023-07-07-14-52-31.gh-issue-106052.ak8nbs.rst b/Misc/NEWS.d/next/Library/2023-07-07-14-52-31.gh-issue-106052.ak8nbs.rst new file mode 100644 index 00000000000000..f2d4c2f7b18ec7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-07-14-52-31.gh-issue-106052.ak8nbs.rst @@ -0,0 +1,2 @@ +:mod:`re` module: fix the matching of possessive quantifiers in the case of +a subpattern containing backtracking. diff --git a/Misc/NEWS.d/next/Library/2023-07-07-18-22-07.gh-issue-106527.spHQ0W.rst b/Misc/NEWS.d/next/Library/2023-07-07-18-22-07.gh-issue-106527.spHQ0W.rst new file mode 100644 index 00000000000000..204bda1c73eb36 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-07-18-22-07.gh-issue-106527.spHQ0W.rst @@ -0,0 +1 @@ +Reduce overhead to add and remove :mod:`asyncio` readers and writers. diff --git a/Misc/NEWS.d/next/Library/2023-07-09-00-36-33.gh-issue-106558.Zqsj6F.rst b/Misc/NEWS.d/next/Library/2023-07-09-00-36-33.gh-issue-106558.Zqsj6F.rst new file mode 100644 index 00000000000000..8fe677f5d84b5f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-09-00-36-33.gh-issue-106558.Zqsj6F.rst @@ -0,0 +1,3 @@ +Remove ref cycle in callers of +:func:`~multiprocessing.managers.convert_to_error` by deleting ``result`` +from scope in a ``finally`` block. diff --git a/Misc/NEWS.d/next/Library/2023-07-09-13-10-54.gh-issue-106566.NN35-U.rst b/Misc/NEWS.d/next/Library/2023-07-09-13-10-54.gh-issue-106566.NN35-U.rst new file mode 100644 index 00000000000000..3b88dc79183876 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-09-13-10-54.gh-issue-106566.NN35-U.rst @@ -0,0 +1 @@ +Optimize ``(?!)`` (pattern which alwais fails) in regular expressions. diff --git a/Misc/NEWS.d/next/Library/2023-07-15-10-24-56.gh-issue-106774.FJcqCj.rst b/Misc/NEWS.d/next/Library/2023-07-15-10-24-56.gh-issue-106774.FJcqCj.rst new file mode 100644 index 00000000000000..ed467573b89e14 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-15-10-24-56.gh-issue-106774.FJcqCj.rst @@ -0,0 +1 @@ +Update the bundled copy of pip to version 23.2.1. diff --git a/Misc/NEWS.d/next/Library/2023-07-19-09-11-08.gh-issue-106751.U9nD_B.rst b/Misc/NEWS.d/next/Library/2023-07-19-09-11-08.gh-issue-106751.U9nD_B.rst new file mode 100644 index 00000000000000..b9a9b563ad2267 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-19-09-11-08.gh-issue-106751.U9nD_B.rst @@ -0,0 +1 @@ +Optimize :meth:`_PollLikeSelector.select` for many iteration case. diff --git a/Misc/NEWS.d/next/Library/2023-07-20-06-00-35.gh-issue-106739.W1hygr.rst b/Misc/NEWS.d/next/Library/2023-07-20-06-00-35.gh-issue-106739.W1hygr.rst new file mode 100644 index 00000000000000..168e2019395696 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-20-06-00-35.gh-issue-106739.W1hygr.rst @@ -0,0 +1 @@ +Add the ``rtype_cache`` to the warning message (as an addition to the type of leaked objects and the number of leaked objects already included in the message) to make debugging leaked objects easier when the multiprocessing resource tracker process finds leaked objects at shutdown. This helps more quickly identify what was leaked and/or why the leaked object was not properly cleaned up. diff --git a/Misc/NEWS.d/next/Library/2023-07-22-12-53-53.gh-issue-105002.gkfsW0.rst b/Misc/NEWS.d/next/Library/2023-07-22-12-53-53.gh-issue-105002.gkfsW0.rst new file mode 100644 index 00000000000000..b4c133a5cb1244 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-22-12-53-53.gh-issue-105002.gkfsW0.rst @@ -0,0 +1,3 @@ +Fix invalid result from :meth:`PurePath.relative_to` method when attempting to walk +a "``..``" segment in *other* with *walk_up* enabled. A :exc:`ValueError` exception +is now raised in this case. diff --git a/Misc/NEWS.d/next/Library/2023-07-22-13-09-28.gh-issue-106186.EIsUNG.rst b/Misc/NEWS.d/next/Library/2023-07-22-13-09-28.gh-issue-106186.EIsUNG.rst new file mode 100644 index 00000000000000..07fdcc96fa38a6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-22-13-09-28.gh-issue-106186.EIsUNG.rst @@ -0,0 +1,3 @@ +Do not report ``MultipartInvariantViolationDefect`` defect +when the :class:`email.parser.Parser` class is used +to parse emails with ``headersonly=True``. diff --git a/Misc/NEWS.d/next/Library/2023-07-22-14-29-34.gh-issue-65495.fw84qM.rst b/Misc/NEWS.d/next/Library/2023-07-22-14-29-34.gh-issue-65495.fw84qM.rst new file mode 100644 index 00000000000000..e75b6c0f1d6759 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-22-14-29-34.gh-issue-65495.fw84qM.rst @@ -0,0 +1 @@ +Use lowercase ``mail from`` and ``rcpt to`` in :class:`smptlib.SMTP`. diff --git a/Misc/NEWS.d/next/Library/2023-07-22-15-51-33.gh-issue-83006.21zaCz.rst b/Misc/NEWS.d/next/Library/2023-07-22-15-51-33.gh-issue-83006.21zaCz.rst new file mode 100644 index 00000000000000..e64d1860828430 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-22-15-51-33.gh-issue-83006.21zaCz.rst @@ -0,0 +1,2 @@ +Document behavior of :func:`shutil.disk_usage` for non-mounted filesystems +on Unix. diff --git a/Misc/NEWS.d/next/Library/2023-07-22-16-44-58.gh-issue-82500.cQYoPj.rst b/Misc/NEWS.d/next/Library/2023-07-22-16-44-58.gh-issue-82500.cQYoPj.rst new file mode 100644 index 00000000000000..065394fd6ee712 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-22-16-44-58.gh-issue-82500.cQYoPj.rst @@ -0,0 +1 @@ +Fix overflow on 32-bit systems with :mod:`asyncio` :func:`os.sendfile` implemention. diff --git a/Misc/NEWS.d/next/Library/2023-07-22-21-57-34.gh-issue-107089.Dnget2.rst b/Misc/NEWS.d/next/Library/2023-07-22-21-57-34.gh-issue-107089.Dnget2.rst new file mode 100644 index 00000000000000..9d5ba1a2d7ccba --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-22-21-57-34.gh-issue-107089.Dnget2.rst @@ -0,0 +1,2 @@ +Shelves opened with :func:`shelve.open` have a much faster :meth:`clear` +method. Patch by James Cave. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2023-07-23-12-26-23.gh-issue-62519.w8-81X.rst b/Misc/NEWS.d/next/Library/2023-07-23-12-26-23.gh-issue-62519.w8-81X.rst new file mode 100644 index 00000000000000..96e2a3dcc24fb0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-23-12-26-23.gh-issue-62519.w8-81X.rst @@ -0,0 +1,2 @@ +Make :func:`gettext.pgettext` search plural definitions when +translation is not found. diff --git a/Misc/NEWS.d/next/Library/2023-07-23-13-05-32.gh-issue-105578.XAQtyR.rst b/Misc/NEWS.d/next/Library/2023-07-23-13-05-32.gh-issue-105578.XAQtyR.rst new file mode 100644 index 00000000000000..4a03f5c35ff6c0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-23-13-05-32.gh-issue-105578.XAQtyR.rst @@ -0,0 +1,2 @@ +Deprecate :class:`typing.AnyStr` in favor of the new Type Parameter syntax. +See PEP 695. diff --git a/Misc/NEWS.d/next/Library/2023-07-24-01-21-16.gh-issue-46376.w-xuDL.rst b/Misc/NEWS.d/next/Library/2023-07-24-01-21-16.gh-issue-46376.w-xuDL.rst new file mode 100644 index 00000000000000..8e8f0245b4539b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-24-01-21-16.gh-issue-46376.w-xuDL.rst @@ -0,0 +1 @@ +Prevent memory leak and use-after-free when using pointers to pointers with ctypes diff --git a/Misc/NEWS.d/next/Library/2023-07-28-14-56-35.gh-issue-107369.bvTq8F.rst b/Misc/NEWS.d/next/Library/2023-07-28-14-56-35.gh-issue-107369.bvTq8F.rst new file mode 100644 index 00000000000000..76aeab65e90a20 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-28-14-56-35.gh-issue-107369.bvTq8F.rst @@ -0,0 +1,2 @@ +Optimize :func:`textwrap.indent`. It is ~30% faster for large input. Patch +by Inada Naoki. diff --git a/Misc/NEWS.d/next/Library/2023-07-29-02-36-50.gh-issue-107409.HG27Nu.rst b/Misc/NEWS.d/next/Library/2023-07-29-02-36-50.gh-issue-107409.HG27Nu.rst new file mode 100644 index 00000000000000..1ecc7207605c70 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-29-02-36-50.gh-issue-107409.HG27Nu.rst @@ -0,0 +1 @@ +Set :attr:`!__wrapped__` attribute in :func:`reprlib.recursive_repr`. diff --git a/Misc/NEWS.d/next/Library/2023-08-01-15-17-20.gh-issue-105481.vMbmj_.rst b/Misc/NEWS.d/next/Library/2023-08-01-15-17-20.gh-issue-105481.vMbmj_.rst new file mode 100644 index 00000000000000..153c18a6f00953 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-08-01-15-17-20.gh-issue-105481.vMbmj_.rst @@ -0,0 +1 @@ +:data:`opcode.ENABLE_SPECIALIZATION` (which was added in 3.12 but never documented or intended for external usage) is moved to :data:`_opcode.ENABLE_SPECIALIZATION` where tests can access it. diff --git a/Misc/NEWS.d/next/Library/2023-08-01-21-43-58.gh-issue-105481.cl2ajS.rst b/Misc/NEWS.d/next/Library/2023-08-01-21-43-58.gh-issue-105481.cl2ajS.rst new file mode 100644 index 00000000000000..d02f909e870188 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-08-01-21-43-58.gh-issue-105481.cl2ajS.rst @@ -0,0 +1,2 @@ +Remove ``opcode.is_pseudo``, ``opcode.MIN_PSEUDO_OPCODE`` and ``opcode.MAX_PSEUDO_OPCODE``, +which were added in 3.12, were never documented and were not intended to be used externally. diff --git a/Misc/NEWS.d/next/Library/2023-08-03-11-31-11.gh-issue-107576.pO_s9I.rst b/Misc/NEWS.d/next/Library/2023-08-03-11-31-11.gh-issue-107576.pO_s9I.rst new file mode 100644 index 00000000000000..67677dd3c8ed24 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-08-03-11-31-11.gh-issue-107576.pO_s9I.rst @@ -0,0 +1,3 @@ +Fix :func:`types.get_original_bases` to only return +:attr:`!__orig_bases__` if it is present on ``cls`` directly. Patch by +James Hilton-Balfe. diff --git a/Misc/NEWS.d/next/Library/2023-08-03-12-52-19.gh-issue-107077.-pzHD6.rst b/Misc/NEWS.d/next/Library/2023-08-03-12-52-19.gh-issue-107077.-pzHD6.rst new file mode 100644 index 00000000000000..ecaf437a48e0ae --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-08-03-12-52-19.gh-issue-107077.-pzHD6.rst @@ -0,0 +1,6 @@ +Seems that in some conditions, OpenSSL will return ``SSL_ERROR_SYSCALL`` +instead of ``SSL_ERROR_SSL`` when a certification verification has failed, +but the error parameters will still contain ``ERR_LIB_SSL`` and +``SSL_R_CERTIFICATE_VERIFY_FAILED``. We are now detecting this situation and +raising the appropiate ``ssl.SSLCertVerificationError``. Patch by Pablo +Galindo diff --git a/Misc/NEWS.d/next/Library/2023-08-05-05-10-41.gh-issue-106684.P9zRXb.rst b/Misc/NEWS.d/next/Library/2023-08-05-05-10-41.gh-issue-106684.P9zRXb.rst new file mode 100644 index 00000000000000..02c52d714e9df7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-08-05-05-10-41.gh-issue-106684.P9zRXb.rst @@ -0,0 +1 @@ +Raise :exc:`ResourceWarning` when :class:`asyncio.StreamWriter` is not closed leading to memory leaks. Patch by Kumar Aditya. diff --git a/Misc/NEWS.d/next/Library/2023-08-06-15-29-00.gh-issue-100814.h195gW.rst b/Misc/NEWS.d/next/Library/2023-08-06-15-29-00.gh-issue-100814.h195gW.rst new file mode 100644 index 00000000000000..86cb7bf79f3078 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-08-06-15-29-00.gh-issue-100814.h195gW.rst @@ -0,0 +1,2 @@ +Passing a callable object as an option value to a Tkinter image now raises +the expected TclError instead of an AttributeError. diff --git a/Misc/NEWS.d/next/Library/2023-08-07-14-12-07.gh-issue-107715.238r2f.rst b/Misc/NEWS.d/next/Library/2023-08-07-14-12-07.gh-issue-107715.238r2f.rst new file mode 100644 index 00000000000000..cd2a5d0d5324a2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-08-07-14-12-07.gh-issue-107715.238r2f.rst @@ -0,0 +1 @@ +Fix `doctest.DocTestFinder.find` in presence of class names with special characters. Patch by Gertjan van Zwieten. diff --git a/Misc/NEWS.d/next/Library/2023-08-07-14-24-42.gh-issue-107710.xfOCfj.rst b/Misc/NEWS.d/next/Library/2023-08-07-14-24-42.gh-issue-107710.xfOCfj.rst new file mode 100644 index 00000000000000..70f8b58e7ff5f5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-08-07-14-24-42.gh-issue-107710.xfOCfj.rst @@ -0,0 +1 @@ +Speed up :func:`logging.getHandlerNames`. diff --git a/Misc/NEWS.d/next/Library/2023-08-08-19-57-45.gh-issue-107782.mInjFE.rst b/Misc/NEWS.d/next/Library/2023-08-08-19-57-45.gh-issue-107782.mInjFE.rst new file mode 100644 index 00000000000000..fb8a50de3a9eee --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-08-08-19-57-45.gh-issue-107782.mInjFE.rst @@ -0,0 +1,2 @@ +:mod:`pydoc` is now able to show signatures which are not representable in +Python, e.g. for ``getattr`` and ``dict.pop``. diff --git a/Misc/NEWS.d/next/Library/2023-08-09-15-37-20.gh-issue-107812.CflAXa.rst b/Misc/NEWS.d/next/Library/2023-08-09-15-37-20.gh-issue-107812.CflAXa.rst new file mode 100644 index 00000000000000..0aac44fb418836 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-08-09-15-37-20.gh-issue-107812.CflAXa.rst @@ -0,0 +1 @@ +Extend socket's netlink support to the FreeBSD platform. diff --git a/Misc/NEWS.d/next/Security/2023-03-07-21-46-29.gh-issue-102509.5ouaH_.rst b/Misc/NEWS.d/next/Security/2023-03-07-21-46-29.gh-issue-102509.5ouaH_.rst new file mode 100644 index 00000000000000..d1a8e8b5a8d3c4 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2023-03-07-21-46-29.gh-issue-102509.5ouaH_.rst @@ -0,0 +1,2 @@ +Start initializing ``ob_digit`` during creation of :c:type:`PyLongObject` +objects. Patch by Illia Volochii. diff --git a/Misc/NEWS.d/next/Security/2023-06-13-20-52-24.gh-issue-102988.Kei7Vf.rst b/Misc/NEWS.d/next/Security/2023-06-13-20-52-24.gh-issue-102988.Kei7Vf.rst index e0434ccd2ccab5..c67ec45737b535 100644 --- a/Misc/NEWS.d/next/Security/2023-06-13-20-52-24.gh-issue-102988.Kei7Vf.rst +++ b/Misc/NEWS.d/next/Security/2023-06-13-20-52-24.gh-issue-102988.Kei7Vf.rst @@ -1,4 +1,4 @@ -CVE-2023-27043: Prevent :func:`email.utils.parseaddr` -and :func:`email.utils.getaddresses` from returning the realname portion of an -invalid RFC2822 email header in the email address portion of the 2-tuple -returned after being parsed by :class:`email._parseaddr.AddressList`. +Reverted the :mod:`email.utils` security improvement change released in +3.12beta4 that unintentionally caused :mod:`email.utils.getaddresses` to fail +to parse email addresses with a comma in the quoted name field. +See :gh:`106669`. diff --git a/Misc/NEWS.d/next/Security/2023-08-05-03-51-05.gh-issue-107774.VPjaTR.rst b/Misc/NEWS.d/next/Security/2023-08-05-03-51-05.gh-issue-107774.VPjaTR.rst new file mode 100644 index 00000000000000..b89b50c79f7e2a --- /dev/null +++ b/Misc/NEWS.d/next/Security/2023-08-05-03-51-05.gh-issue-107774.VPjaTR.rst @@ -0,0 +1,3 @@ +PEP 669 specifies that ``sys.monitoring.register_callback`` will generate an +audit event. Pre-releases of Python 3.12 did not generate the audit event. +This is now fixed. diff --git a/Misc/NEWS.d/next/Tests/2022-06-09-21-27-38.gh-issue-69714.49tyHW.rst b/Misc/NEWS.d/next/Tests/2022-06-09-21-27-38.gh-issue-69714.49tyHW.rst new file mode 100644 index 00000000000000..e28b94a171c40e --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2022-06-09-21-27-38.gh-issue-69714.49tyHW.rst @@ -0,0 +1 @@ +Add additional tests to :mod:`calendar` to achieve full test coverage. diff --git a/Misc/NEWS.d/next/Tests/2023-07-22-13-49-40.gh-issue-106714.btYI5S.rst b/Misc/NEWS.d/next/Tests/2023-07-22-13-49-40.gh-issue-106714.btYI5S.rst new file mode 100644 index 00000000000000..955620521c8f68 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-07-22-13-49-40.gh-issue-106714.btYI5S.rst @@ -0,0 +1,3 @@ +test_capi: Fix test_no_FatalError_infinite_loop() to no longer write a +coredump, by using test.support.SuppressCrashReport. Patch by Victor +Stinner. diff --git a/Misc/NEWS.d/next/Tests/2023-07-24-16-56-59.gh-issue-107178.Gq1usE.rst b/Misc/NEWS.d/next/Tests/2023-07-24-16-56-59.gh-issue-107178.Gq1usE.rst new file mode 100644 index 00000000000000..dd6becf6b00130 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-07-24-16-56-59.gh-issue-107178.Gq1usE.rst @@ -0,0 +1,2 @@ +Add the C API test for functions in the Mapping Protocol, the Sequence +Protocol and some functions in the Object Protocol. diff --git a/Misc/NEWS.d/next/Tests/2023-07-25-14-36-33.gh-issue-107237.y1pY79.rst b/Misc/NEWS.d/next/Tests/2023-07-25-14-36-33.gh-issue-107237.y1pY79.rst new file mode 100644 index 00000000000000..a04f7eeddef174 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-07-25-14-36-33.gh-issue-107237.y1pY79.rst @@ -0,0 +1,2 @@ +``test_logging``: Fix ``test_udp_reconnection()`` by increasing the timeout +from 100 ms to 5 minutes (LONG_TIMEOUT). Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Tools-Demos/2022-07-23-00-33-28.gh-issue-95065.NfCCpp.rst b/Misc/NEWS.d/next/Tools-Demos/2022-07-23-00-33-28.gh-issue-95065.NfCCpp.rst new file mode 100644 index 00000000000000..3641716769cd56 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2022-07-23-00-33-28.gh-issue-95065.NfCCpp.rst @@ -0,0 +1,6 @@ +It is now possible to deprecate passing parameters positionally with +Argument Clinic, using the new ``* [from X.Y]`` syntax. +(To be read as *"keyword-only from Python version X.Y"*.) +See :ref:`clinic-howto-deprecate-positional` for more information. +Patch by Erlend E. Aasland with help from Alex Waygood, +Nikita Sobolev, and Serhiy Storchaka. diff --git a/Misc/NEWS.d/next/Tools-Demos/2023-07-21-23-16-05.gh-issue-106970.NLRnml.rst b/Misc/NEWS.d/next/Tools-Demos/2023-07-21-23-16-05.gh-issue-106970.NLRnml.rst new file mode 100644 index 00000000000000..194e3351b0470c --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2023-07-21-23-16-05.gh-issue-106970.NLRnml.rst @@ -0,0 +1,4 @@ +Fix bugs in the Argument Clinic ``destination clear`` command; the +destination buffers would never be cleared, and the ``destination`` +directive parser would simply continue to the fault handler after processing +the command. Patch by Erlend E. Aasland. diff --git a/Misc/NEWS.d/next/Tools-Demos/2023-07-30-23-32-16.gh-issue-107467.5O9p3G.rst b/Misc/NEWS.d/next/Tools-Demos/2023-07-30-23-32-16.gh-issue-107467.5O9p3G.rst new file mode 100644 index 00000000000000..2996837371be0f --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2023-07-30-23-32-16.gh-issue-107467.5O9p3G.rst @@ -0,0 +1,2 @@ +The Argument Clinic command-line tool now prints to stderr instead of stdout +on failure. diff --git a/Misc/NEWS.d/next/Tools-Demos/2023-08-04-00-04-40.gh-issue-107609.2DqgtL.rst b/Misc/NEWS.d/next/Tools-Demos/2023-08-04-00-04-40.gh-issue-107609.2DqgtL.rst new file mode 100644 index 00000000000000..080a6c15d9b8c5 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2023-08-04-00-04-40.gh-issue-107609.2DqgtL.rst @@ -0,0 +1,3 @@ +Fix duplicate module check in Argument Clinic. Previously, a duplicate +definition would incorrectly be silently accepted. Patch by Erlend E. +Aasland. diff --git a/Misc/NEWS.d/next/Tools-Demos/2023-08-07-16-30-48.gh-issue-95065.-im4R5.rst b/Misc/NEWS.d/next/Tools-Demos/2023-08-07-16-30-48.gh-issue-95065.-im4R5.rst new file mode 100644 index 00000000000000..7284f5bd548810 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2023-08-07-16-30-48.gh-issue-95065.-im4R5.rst @@ -0,0 +1,2 @@ +Argument Clinic now supports overriding automatically generated signature by +using directive ``@text_signature``. See :ref:`clinic-howto-override-signature`. diff --git a/Misc/NEWS.d/next/Tools-Demos/2023-08-08-12-21-41.gh-issue-104683.DRsAQE.rst b/Misc/NEWS.d/next/Tools-Demos/2023-08-08-12-21-41.gh-issue-104683.DRsAQE.rst new file mode 100644 index 00000000000000..ee3a70967b098b --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2023-08-08-12-21-41.gh-issue-104683.DRsAQE.rst @@ -0,0 +1 @@ +Add ``--exclude`` option to Argument Clinic CLI. diff --git a/Misc/NEWS.d/next/Windows/2023-07-18-13-01-26.gh-issue-106844.mci4xO.rst b/Misc/NEWS.d/next/Windows/2023-07-18-13-01-26.gh-issue-106844.mci4xO.rst new file mode 100644 index 00000000000000..1fdf162ef4ecdd --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2023-07-18-13-01-26.gh-issue-106844.mci4xO.rst @@ -0,0 +1 @@ +Fix integer overflow and truncating by the null character in :func:`!_winapi.LCMapStringEx` which affects :func:`ntpath.normcase`. diff --git a/Misc/NEWS.d/next/macOS/2023-07-30-23-42-20.gh-issue-99079.JAtoh1.rst b/Misc/NEWS.d/next/macOS/2023-07-30-23-42-20.gh-issue-99079.JAtoh1.rst new file mode 100644 index 00000000000000..d0eef4ec1003ce --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-07-30-23-42-20.gh-issue-99079.JAtoh1.rst @@ -0,0 +1 @@ +Update macOS installer to use OpenSSL 3.0.9. diff --git a/Misc/stable_abi.toml b/Misc/stable_abi.toml index dd2c9910b83ccb..16d5c1a07ae3e2 100644 --- a/Misc/stable_abi.toml +++ b/Misc/stable_abi.toml @@ -2446,3 +2446,7 @@ added = '3.13' [function.PyModule_Add] added = '3.13' +[function.PyDict_GetItemRef] + added = '3.13' +[function.PyDict_GetItemStringRef] + added = '3.13' diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index 11a022e3d2044e..689f1d42ef0eee 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -159,7 +159,7 @@ @MODULE__XXTESTFUZZ_TRUE@_xxtestfuzz _xxtestfuzz/_xxtestfuzz.c _xxtestfuzz/fuzzer.c @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c -@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/unicode.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyos.c _testcapi/immortal.c _testcapi/heaptype_relative.c _testcapi/gc.c +@MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/vectorcall_limited.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyos.c _testcapi/immortal.c _testcapi/heaptype_relative.c _testcapi/gc.c @MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c # Some testing modules MUST be built as shared libraries. diff --git a/Modules/_abc.c b/Modules/_abc.c index 8a3aa9cb88880f..9473905243d438 100644 --- a/Modules/_abc.c +++ b/Modules/_abc.c @@ -7,6 +7,7 @@ #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_object.h" // _PyType_GetSubclasses() #include "pycore_runtime.h" // _Py_ID() +#include "pycore_setobject.h" // _PySet_NextEntry() #include "pycore_typeobject.h" // _PyType_GetMRO() #include "pycore_weakref.h" // _PyWeakref_GET_REF() #include "clinic/_abc.c.h" diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index ef9f7f8902e09e..39c803355ba95b 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -8,7 +8,7 @@ #include "pycore_pylifecycle.h" // _Py_IsInterpreterFinalizing() #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_runtime_init.h" // _Py_ID() -#include "structmember.h" // PyMemberDef + #include // offsetof() @@ -1398,7 +1398,8 @@ FutureObj_get_state(FutureObj *fut, void *Py_UNUSED(ignored)) default: assert (0); } - return Py_XNewRef(ret); + assert(_Py_IsImmortal(ret)); + return ret; } static PyObject * diff --git a/Modules/_bz2module.c b/Modules/_bz2module.c index eeefe6034998c8..0a84f25ca4cbe7 100644 --- a/Modules/_bz2module.c +++ b/Modules/_bz2module.c @@ -1,10 +1,10 @@ /* _bz2 - Low-level Python interface to libbzip2. */ #include "Python.h" -#include "structmember.h" // PyMemberDef #include #include +#include // offsetof() // Blocks output buffer wrappers #include "pycore_blocks_output_buffer.h" @@ -112,7 +112,7 @@ typedef struct { typedef struct { PyObject_HEAD bz_stream bzs; - char eof; /* T_BOOL expects a char */ + char eof; /* Py_T_BOOL expects a char */ PyObject *unused_data; char needs_input; char *input_buffer; @@ -714,11 +714,11 @@ PyDoc_STRVAR(BZ2Decompressor_needs_input_doc, "True if more input is needed before more decompressed data can be produced."); static PyMemberDef BZ2Decompressor_members[] = { - {"eof", T_BOOL, offsetof(BZ2Decompressor, eof), - READONLY, BZ2Decompressor_eof__doc__}, - {"unused_data", T_OBJECT_EX, offsetof(BZ2Decompressor, unused_data), - READONLY, BZ2Decompressor_unused_data__doc__}, - {"needs_input", T_BOOL, offsetof(BZ2Decompressor, needs_input), READONLY, + {"eof", Py_T_BOOL, offsetof(BZ2Decompressor, eof), + Py_READONLY, BZ2Decompressor_eof__doc__}, + {"unused_data", Py_T_OBJECT_EX, offsetof(BZ2Decompressor, unused_data), + Py_READONLY, BZ2Decompressor_unused_data__doc__}, + {"needs_input", Py_T_BOOL, offsetof(BZ2Decompressor, needs_input), Py_READONLY, BZ2Decompressor_needs_input_doc}, {NULL} }; diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c index 9a81531bdffb16..f2915f83b9d968 100644 --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -3,7 +3,7 @@ #include "pycore_long.h" // _PyLong_GetZero() #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_typeobject.h" // _PyType_GetModuleState() -#include "structmember.h" // PyMemberDef + #include typedef struct { @@ -1630,7 +1630,7 @@ static PyMethodDef deque_methods[] = { }; static PyMemberDef deque_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(dequeobject, weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(dequeobject, weakreflist), Py_READONLY}, {NULL}, }; @@ -2054,7 +2054,7 @@ static PyMethodDef defdict_methods[] = { }; static PyMemberDef defdict_members[] = { - {"default_factory", T_OBJECT, + {"default_factory", _Py_T_OBJECT, offsetof(defdictobject, default_factory), 0, PyDoc_STR("Factory for default value called by __missing__().")}, {NULL} @@ -2466,7 +2466,7 @@ tuplegetter_repr(_tuplegetterobject *self) static PyMemberDef tuplegetter_members[] = { - {"__doc__", T_OBJECT, offsetof(_tuplegetterobject, doc), 0}, + {"__doc__", _Py_T_OBJECT, offsetof(_tuplegetterobject, doc), 0}, {0} }; diff --git a/Modules/_csv.c b/Modules/_csv.c index c36d9805a12841..24a57e362521db 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -11,7 +11,8 @@ module instead. #define MODULE_VERSION "1.0" #include "Python.h" -#include "structmember.h" // PyMemberDef + +#include // offsetof() #include /*[clinic input] @@ -336,9 +337,9 @@ dialect_check_quoting(int quoting) #define D_OFF(x) offsetof(DialectObj, x) static struct PyMemberDef Dialect_memberlist[] = { - { "skipinitialspace", T_BOOL, D_OFF(skipinitialspace), READONLY }, - { "doublequote", T_BOOL, D_OFF(doublequote), READONLY }, - { "strict", T_BOOL, D_OFF(strict), READONLY }, + { "skipinitialspace", Py_T_BOOL, D_OFF(skipinitialspace), Py_READONLY }, + { "doublequote", Py_T_BOOL, D_OFF(doublequote), Py_READONLY }, + { "strict", Py_T_BOOL, D_OFF(strict), Py_READONLY }, { NULL } }; @@ -970,8 +971,8 @@ static struct PyMethodDef Reader_methods[] = { #define R_OFF(x) offsetof(ReaderObj, x) static struct PyMemberDef Reader_memberlist[] = { - { "dialect", T_OBJECT, R_OFF(dialect), READONLY }, - { "line_num", T_ULONG, R_OFF(line_num), READONLY }, + { "dialect", _Py_T_OBJECT, R_OFF(dialect), Py_READONLY }, + { "line_num", Py_T_ULONG, R_OFF(line_num), Py_READONLY }, { NULL } }; @@ -1364,7 +1365,7 @@ static struct PyMethodDef Writer_methods[] = { #define W_OFF(x) offsetof(WriterObj, x) static struct PyMemberDef Writer_memberlist[] = { - { "dialect", T_OBJECT, W_OFF(dialect), READONLY }, + { "dialect", _Py_T_OBJECT, W_OFF(dialect), Py_READONLY }, { NULL } }; diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index 200fd36748c403..dc80291d3b810b 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -110,7 +110,7 @@ bytes(cdata) #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_ceval.h" // _Py_EnterRecursiveCall() -#include "structmember.h" // PyMemberDef + #include #ifdef MS_WIN32 @@ -1728,9 +1728,9 @@ c_wchar_p_from_param(PyObject *type, PyObject *value) Py_DECREF(as_parameter); return value; } - /* XXX better message */ - PyErr_SetString(PyExc_TypeError, - "wrong type"); + PyErr_Format(PyExc_TypeError, + "'%.200s' object cannot be interpreted " + "as ctypes.c_wchar_p", Py_TYPE(value)->tp_name); return NULL; } @@ -1792,9 +1792,9 @@ c_char_p_from_param(PyObject *type, PyObject *value) Py_DECREF(as_parameter); return value; } - /* XXX better message */ - PyErr_SetString(PyExc_TypeError, - "wrong type"); + PyErr_Format(PyExc_TypeError, + "'%.200s' object cannot be interpreted " + "as ctypes.c_char_p", Py_TYPE(value)->tp_name); return NULL; } @@ -1927,9 +1927,9 @@ c_void_p_from_param(PyObject *type, PyObject *value) Py_DECREF(as_parameter); return value; } - /* XXX better message */ - PyErr_SetString(PyExc_TypeError, - "wrong type"); + PyErr_Format(PyExc_TypeError, + "'%.200s' object cannot be interpreted " + "as ctypes.c_void_p", Py_TYPE(value)->tp_name); return NULL; } @@ -2759,14 +2759,14 @@ PyCData_dealloc(PyObject *self) } static PyMemberDef PyCData_members[] = { - { "_b_base_", T_OBJECT, - offsetof(CDataObject, b_base), READONLY, + { "_b_base_", _Py_T_OBJECT, + offsetof(CDataObject, b_base), Py_READONLY, "the base object" }, - { "_b_needsfree_", T_INT, - offsetof(CDataObject, b_needsfree), READONLY, + { "_b_needsfree_", Py_T_INT, + offsetof(CDataObject, b_needsfree), Py_READONLY, "whether the object owns the memory or not" }, - { "_objects", T_OBJECT, - offsetof(CDataObject, b_objects), READONLY, + { "_objects", _Py_T_OBJECT, + offsetof(CDataObject, b_objects), Py_READONLY, "internal objects tree (NEVER CHANGE THIS OBJECT!)"}, { NULL }, }; @@ -4793,6 +4793,16 @@ static PyMappingMethods Array_as_mapping = { Array_ass_subscript, }; +PyDoc_STRVAR(array_doc, +"Abstract base class for arrays.\n" +"\n" +"The recommended way to create concrete array types is by multiplying any\n" +"ctypes data type with a non-negative integer. Alternatively, you can subclass\n" +"this type and define _length_ and _type_ class variables. Array elements can\n" +"be read and written using standard subscript and slice accesses for slice\n" +"reads, the resulting object is not itself an Array." +); + PyTypeObject PyCArray_Type = { PyVarObject_HEAD_INIT(NULL, 0) "_ctypes.Array", @@ -4813,8 +4823,8 @@ PyTypeObject PyCArray_Type = { 0, /* tp_getattro */ 0, /* tp_setattro */ &PyCData_as_buffer, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - PyDoc_STR("XXX to be provided"), /* tp_doc */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + array_doc, /* tp_doc */ (traverseproc)PyCData_traverse, /* tp_traverse */ (inquiry)PyCData_clear, /* tp_clear */ 0, /* tp_richcompare */ @@ -5129,6 +5139,8 @@ static PyObject * Pointer_get_contents(CDataObject *self, void *closure) { StgDictObject *stgdict; + PyObject *keep, *ptr_probe; + CDataObject *ptr2ptr; if (*(void **)self->b_ptr == NULL) { PyErr_SetString(PyExc_ValueError, @@ -5138,6 +5150,33 @@ Pointer_get_contents(CDataObject *self, void *closure) stgdict = PyObject_stgdict((PyObject *)self); assert(stgdict); /* Cannot be NULL for pointer instances */ + + keep = GetKeepedObjects(self); + if (keep != NULL) { + // check if it's a pointer to a pointer: + // pointers will have '0' key in the _objects + ptr_probe = PyDict_GetItemString(keep, "0"); + + if (ptr_probe != NULL) { + ptr2ptr = (CDataObject*) PyDict_GetItemString(keep, "1"); + if (ptr2ptr == NULL) { + PyErr_SetString(PyExc_ValueError, + "Unexpected NULL pointer in _objects"); + return NULL; + } + // don't construct a new object, + // return existing one instead to preserve refcount + assert( + *(void**) self->b_ptr == ptr2ptr->b_ptr || + *(void**) self->b_value.c == ptr2ptr->b_ptr || + *(void**) self->b_ptr == ptr2ptr->b_value.c || + *(void**) self->b_value.c == ptr2ptr->b_value.c + ); // double-check that we are returning the same thing + Py_INCREF(ptr2ptr); + return (PyObject *) ptr2ptr; + } + } + return PyCData_FromBaseObj(stgdict->proto, (PyObject *)self, 0, *(void **)self->b_ptr); diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index b3831ae7119a56..f9535db4f57c0e 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -54,12 +54,17 @@ */ +/*[clinic input] +module _ctypes +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=476a19c49b31a75c]*/ + #ifndef Py_BUILD_CORE_BUILTIN # define Py_BUILD_CORE_MODULE 1 #endif #include "Python.h" -#include "structmember.h" // PyMemberDef + #include @@ -98,6 +103,7 @@ #include "pycore_runtime.h" // _PyRuntime #include "pycore_global_objects.h" // _Py_ID() +#include "clinic/callproc.c.h" #define CTYPES_CAPSULE_NAME_PYMEM "_ctypes pymem" @@ -581,8 +587,8 @@ PyCArg_repr(PyCArgObject *self) } static PyMemberDef PyCArgType_members[] = { - { "_obj", T_OBJECT, - offsetof(PyCArgObject, obj), READONLY, + { "_obj", _Py_T_OBJECT, + offsetof(PyCArgObject, obj), Py_READONLY, "the wrapped object" }, { NULL }, }; @@ -1893,8 +1899,22 @@ unpickle(PyObject *self, PyObject *args) return NULL; } +/*[clinic input] +_ctypes.POINTER as create_pointer_type + + type as cls: object + A ctypes type. + / + +Create and return a new ctypes pointer type. + +Pointer types are cached and reused internally, +so calling this function repeatedly is cheap. +[clinic start generated code]*/ + static PyObject * -POINTER(PyObject *self, PyObject *cls) +create_pointer_type(PyObject *module, PyObject *cls) +/*[clinic end generated code: output=98c3547ab6f4f40b input=3b81cff5ff9b9d5b]*/ { PyObject *result; PyTypeObject *typ; @@ -1944,8 +1964,22 @@ POINTER(PyObject *self, PyObject *cls) return result; } +/*[clinic input] +_ctypes.pointer as create_pointer_inst + + obj as arg: object + / + +Create a new pointer instance, pointing to 'obj'. + +The returned object is of the type POINTER(type(obj)). Note that if you +just want to pass a pointer to an object to a foreign function call, you +should use byref(obj) which is much faster. +[clinic start generated code]*/ + static PyObject * -pointer(PyObject *self, PyObject *arg) +create_pointer_inst(PyObject *module, PyObject *arg) +/*[clinic end generated code: output=3b543bc9f0de2180 input=713685fdb4d9bc27]*/ { PyObject *result; PyObject *typ; @@ -1957,7 +1991,7 @@ pointer(PyObject *self, PyObject *arg) else if (PyErr_Occurred()) { return NULL; } - typ = POINTER(NULL, (PyObject *)Py_TYPE(arg)); + typ = create_pointer_type(NULL, (PyObject *)Py_TYPE(arg)); if (typ == NULL) return NULL; result = PyObject_CallOneArg(typ, arg); @@ -1997,8 +2031,8 @@ buffer_info(PyObject *self, PyObject *arg) PyMethodDef _ctypes_module_methods[] = { {"get_errno", get_errno, METH_NOARGS}, {"set_errno", set_errno, METH_VARARGS}, - {"POINTER", POINTER, METH_O }, - {"pointer", pointer, METH_O }, + CREATE_POINTER_TYPE_METHODDEF + CREATE_POINTER_INST_METHODDEF {"_unpickle", unpickle, METH_VARARGS }, {"buffer_info", buffer_info, METH_O, "Return buffer interface information"}, {"resize", resize, METH_VARARGS, "Resize the memory buffer of a ctypes instance"}, diff --git a/Modules/_ctypes/clinic/callproc.c.h b/Modules/_ctypes/clinic/callproc.c.h new file mode 100644 index 00000000000000..6f036bb66b25aa --- /dev/null +++ b/Modules/_ctypes/clinic/callproc.c.h @@ -0,0 +1,38 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif + + +PyDoc_STRVAR(create_pointer_type__doc__, +"POINTER($module, type, /)\n" +"--\n" +"\n" +"Create and return a new ctypes pointer type.\n" +"\n" +" type\n" +" A ctypes type.\n" +"\n" +"Pointer types are cached and reused internally,\n" +"so calling this function repeatedly is cheap."); + +#define CREATE_POINTER_TYPE_METHODDEF \ + {"POINTER", (PyCFunction)create_pointer_type, METH_O, create_pointer_type__doc__}, + +PyDoc_STRVAR(create_pointer_inst__doc__, +"pointer($module, obj, /)\n" +"--\n" +"\n" +"Create a new pointer instance, pointing to \'obj\'.\n" +"\n" +"The returned object is of the type POINTER(type(obj)). Note that if you\n" +"just want to pass a pointer to an object to a foreign function call, you\n" +"should use byref(obj) which is much faster."); + +#define CREATE_POINTER_INST_METHODDEF \ + {"pointer", (PyCFunction)create_pointer_inst, METH_O, create_pointer_inst__doc__}, +/*[clinic end generated code: output=ae26452a759ba56d input=a9049054013a1b77]*/ diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index b8cb0c012fd537..9002a1de7fb5b7 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -15,7 +15,7 @@ #include "pycore_long.h" // _PyLong_GetOne() #include "pycore_object.h" // _PyObject_Init() #include "datetime.h" -#include "structmember.h" // PyMemberDef + #include @@ -2727,13 +2727,13 @@ delta_reduce(PyDateTime_Delta* self, PyObject *Py_UNUSED(ignored)) static PyMemberDef delta_members[] = { - {"days", T_INT, OFFSET(days), READONLY, + {"days", Py_T_INT, OFFSET(days), Py_READONLY, PyDoc_STR("Number of days.")}, - {"seconds", T_INT, OFFSET(seconds), READONLY, + {"seconds", Py_T_INT, OFFSET(seconds), Py_READONLY, PyDoc_STR("Number of seconds (>= 0 and less than 1 day).")}, - {"microseconds", T_INT, OFFSET(microseconds), READONLY, + {"microseconds", Py_T_INT, OFFSET(microseconds), Py_READONLY, PyDoc_STR("Number of microseconds (>= 0 and less than 1 second).")}, {NULL} }; @@ -6834,8 +6834,7 @@ _datetime_exec(PyObject *module) return -1; } - if (PyModule_AddObject(module, "datetime_CAPI", x) < 0) { - Py_DECREF(x); + if (PyModule_Add(module, "datetime_CAPI", x) < 0) { return -1; } diff --git a/Modules/_dbmmodule.c b/Modules/_dbmmodule.c index 5be444d53e8da3..bd807698927e86 100644 --- a/Modules/_dbmmodule.c +++ b/Modules/_dbmmodule.c @@ -414,6 +414,38 @@ _dbm_dbm_setdefault_impl(dbmobject *self, PyTypeObject *cls, const char *key, return default_value; } +/*[clinic input] +_dbm.dbm.clear + cls: defining_class + / +Remove all items from the database. + +[clinic start generated code]*/ + +static PyObject * +_dbm_dbm_clear_impl(dbmobject *self, PyTypeObject *cls) +/*[clinic end generated code: output=8d126b9e1d01a434 input=43aa6ca1acb7f5f5]*/ +{ + _dbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); + check_dbmobject_open(self, state->dbm_error); + datum key; + // Invalidate cache + self->di_size = -1; + while (1) { + key = dbm_firstkey(self->di_dbm); + if (key.dptr == NULL) { + break; + } + if (dbm_delete(self->di_dbm, key) < 0) { + dbm_clearerr(self->di_dbm); + PyErr_SetString(state->dbm_error, "cannot delete item from database"); + return NULL; + } + } + Py_RETURN_NONE; +} + static PyObject * dbm__enter__(PyObject *self, PyObject *args) { @@ -431,6 +463,7 @@ static PyMethodDef dbm_methods[] = { _DBM_DBM_KEYS_METHODDEF _DBM_DBM_GET_METHODDEF _DBM_DBM_SETDEFAULT_METHODDEF + _DBM_DBM_CLEAR_METHODDEF {"__enter__", dbm__enter__, METH_NOARGS, NULL}, {"__exit__", dbm__exit__, METH_VARARGS, NULL}, {NULL, NULL} /* sentinel */ diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index f9dc6e875fa5fc..585214cc45d6cd 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -46,6 +46,7 @@ #endif struct PyDecContextObject; +struct DecCondMap; typedef struct { PyTypeObject *PyDecContextManager_Type; @@ -82,6 +83,9 @@ typedef struct { PyObject *SignalTuple; + struct DecCondMap *signal_map; + struct DecCondMap *cond_map; + /* External C-API functions */ binaryfunc _py_long_multiply; binaryfunc _py_long_floor_divide; @@ -91,9 +95,36 @@ typedef struct { PyCFunction _py_float_as_integer_ratio; } decimal_state; -static decimal_state global_state; +static inline decimal_state * +get_module_state(PyObject *mod) +{ + decimal_state *state = _PyModule_GetState(mod); + assert(state != NULL); + return state; +} + +static struct PyModuleDef _decimal_module; + +static inline decimal_state * +get_module_state_by_def(PyTypeObject *tp) +{ + PyObject *mod = PyType_GetModuleByDef(tp, &_decimal_module); + assert(mod != NULL); + return get_module_state(mod); +} + +static inline decimal_state * +find_state_left_or_right(PyObject *left, PyObject *right) +{ + PyObject *mod = PyType_GetModuleByDef(Py_TYPE(left), &_decimal_module); + if (mod == NULL) { + PyErr_Clear(); + mod = PyType_GetModuleByDef(Py_TYPE(right), &_decimal_module); + } + assert(mod != NULL); + return get_module_state(mod); +} -#define GLOBAL_STATE() (&global_state) #if !defined(MPD_VERSION_HEX) || MPD_VERSION_HEX < 0x02050000 #error "libmpdec version >= 2.5.0 required" @@ -181,7 +212,7 @@ incr_false(void) #define DEC_ERR_OCCURRED (DEC_INVALID_SIGNALS<<1) #define DEC_ERRORS (DEC_INVALID_SIGNALS|DEC_ERR_OCCURRED) -typedef struct { +typedef struct DecCondMap { const char *name; /* condition or signal name */ const char *fqname; /* fully qualified name */ uint32_t flag; /* libmpdec flag */ @@ -193,7 +224,7 @@ typedef struct { #define INEXACT 6 #define ROUNDED 7 #define SIGNAL_MAP_LEN 9 -static DecCondMap signal_map[] = { +static DecCondMap signal_map_template[] = { {"InvalidOperation", "decimal.InvalidOperation", MPD_IEEE_Invalid_operation, NULL}, {"FloatOperation", "decimal.FloatOperation", MPD_Float_operation, NULL}, {"DivisionByZero", "decimal.DivisionByZero", MPD_Division_by_zero, NULL}, @@ -207,7 +238,7 @@ static DecCondMap signal_map[] = { }; /* Exceptions that inherit from InvalidOperation */ -static DecCondMap cond_map[] = { +static DecCondMap cond_map_template[] = { {"InvalidOperation", "decimal.InvalidOperation", MPD_Invalid_operation, NULL}, {"ConversionSyntax", "decimal.ConversionSyntax", MPD_Conversion_syntax, NULL}, {"DivisionImpossible", "decimal.DivisionImpossible", MPD_Division_impossible, NULL}, @@ -219,6 +250,21 @@ static DecCondMap cond_map[] = { {NULL} }; +/* Return a duplicate of DecCondMap template */ +static inline DecCondMap * +dec_cond_map_init(DecCondMap *template, Py_ssize_t size) +{ + DecCondMap *cm; + cm = PyMem_Malloc(size); + if (cm == NULL) { + PyErr_NoMemory(); + return NULL; + } + + memcpy(cm, template, size); + return cm; +} + static const char *dec_signal_string[MPD_NUM_FLAGS] = { "Clamped", "InvalidOperation", @@ -268,14 +314,12 @@ value_error_int(const char *mesg) return -1; } -#ifdef CONFIG_32 static PyObject * value_error_ptr(const char *mesg) { PyErr_SetString(PyExc_ValueError, mesg); return NULL; } -#endif static int type_error_int(const char *mesg) @@ -309,11 +353,11 @@ dec_traphandler(mpd_context_t *ctx UNUSED) /* GCOV_NOT_REACHED */ } static PyObject * -flags_as_exception(uint32_t flags) +flags_as_exception(decimal_state *state, uint32_t flags) { DecCondMap *cm; - for (cm = signal_map; cm->name != NULL; cm++) { + for (cm = state->signal_map; cm->name != NULL; cm++) { if (flags&cm->flag) { return cm->ex; } @@ -323,11 +367,11 @@ flags_as_exception(uint32_t flags) } Py_LOCAL_INLINE(uint32_t) -exception_as_flag(PyObject *ex) +exception_as_flag(decimal_state *state, PyObject *ex) { DecCondMap *cm; - for (cm = signal_map; cm->name != NULL; cm++) { + for (cm = state->signal_map; cm->name != NULL; cm++) { if (cm->ex == ex) { return cm->flag; } @@ -338,7 +382,7 @@ exception_as_flag(PyObject *ex) } static PyObject * -flags_as_list(uint32_t flags) +flags_as_list(decimal_state *state, uint32_t flags) { PyObject *list; DecCondMap *cm; @@ -348,14 +392,14 @@ flags_as_list(uint32_t flags) return NULL; } - for (cm = cond_map; cm->name != NULL; cm++) { + for (cm = state->cond_map; cm->name != NULL; cm++) { if (flags&cm->flag) { if (PyList_Append(list, cm->ex) < 0) { goto error; } } } - for (cm = signal_map+1; cm->name != NULL; cm++) { + for (cm = state->signal_map+1; cm->name != NULL; cm++) { if (flags&cm->flag) { if (PyList_Append(list, cm->ex) < 0) { goto error; @@ -371,7 +415,7 @@ flags_as_list(uint32_t flags) } static PyObject * -signals_as_list(uint32_t flags) +signals_as_list(decimal_state *state, uint32_t flags) { PyObject *list; DecCondMap *cm; @@ -381,7 +425,7 @@ signals_as_list(uint32_t flags) return NULL; } - for (cm = signal_map; cm->name != NULL; cm++) { + for (cm = state->signal_map; cm->name != NULL; cm++) { if (flags&cm->flag) { if (PyList_Append(list, cm->ex) < 0) { Py_DECREF(list); @@ -394,7 +438,7 @@ signals_as_list(uint32_t flags) } static uint32_t -list_as_flags(PyObject *list) +list_as_flags(decimal_state *state, PyObject *list) { PyObject *item; uint32_t flags, x; @@ -406,7 +450,7 @@ list_as_flags(PyObject *list) flags = 0; for (j = 0; j < n; j++) { item = PyList_GetItem(list, j); - x = exception_as_flag(item); + x = exception_as_flag(state, item); if (x & DEC_ERRORS) { return x; } @@ -417,7 +461,7 @@ list_as_flags(PyObject *list) } static PyObject * -flags_as_dict(uint32_t flags) +flags_as_dict(decimal_state *state, uint32_t flags) { DecCondMap *cm; PyObject *dict; @@ -427,7 +471,7 @@ flags_as_dict(uint32_t flags) return NULL; } - for (cm = signal_map; cm->name != NULL; cm++) { + for (cm = state->signal_map; cm->name != NULL; cm++) { PyObject *b = flags&cm->flag ? Py_True : Py_False; if (PyDict_SetItem(dict, cm->ex, b) < 0) { Py_DECREF(dict); @@ -439,7 +483,7 @@ flags_as_dict(uint32_t flags) } static uint32_t -dict_as_flags(PyObject *val) +dict_as_flags(decimal_state *state, PyObject *val) { PyObject *b; DecCondMap *cm; @@ -458,7 +502,7 @@ dict_as_flags(PyObject *val) return DEC_INVALID_SIGNALS; } - for (cm = signal_map; cm->name != NULL; cm++) { + for (cm = state->signal_map; cm->name != NULL; cm++) { b = PyDict_GetItemWithError(val, cm->ex); if (b == NULL) { if (PyErr_Occurred()) { @@ -504,6 +548,7 @@ static int dec_addstatus(PyObject *context, uint32_t status) { mpd_context_t *ctx = CTX(context); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); ctx->status |= status; if (status & (ctx->traps|MPD_Malloc_error)) { @@ -514,11 +559,11 @@ dec_addstatus(PyObject *context, uint32_t status) return 1; } - ex = flags_as_exception(ctx->traps&status); + ex = flags_as_exception(state, ctx->traps&status); if (ex == NULL) { return 1; /* GCOV_NOT_REACHED */ } - siglist = flags_as_list(ctx->traps&status); + siglist = flags_as_list(state, ctx->traps&status); if (siglist == NULL) { return 1; } @@ -531,11 +576,9 @@ dec_addstatus(PyObject *context, uint32_t status) } static int -getround(PyObject *v) +getround(decimal_state *state, PyObject *v) { int i; - decimal_state *state = GLOBAL_STATE(); - if (PyUnicode_Check(v)) { for (i = 0; i < _PY_DEC_ROUND_GUARD; i++) { if (v == state->round_map[i]) { @@ -563,6 +606,8 @@ getround(PyObject *v) initialized to new SignalDicts. Once a SignalDict is tied to a context, it cannot be deleted. */ +static const char *INVALID_SIGNALDICT_ERROR_MSG = "invalid signal dict"; + static int signaldict_init(PyObject *self, PyObject *args UNUSED, PyObject *kwds UNUSED) { @@ -571,15 +616,21 @@ signaldict_init(PyObject *self, PyObject *args UNUSED, PyObject *kwds UNUSED) } static Py_ssize_t -signaldict_len(PyObject *self UNUSED) +signaldict_len(PyObject *self) { + if (SdFlagAddr(self) == NULL) { + return value_error_int(INVALID_SIGNALDICT_ERROR_MSG); + } return SIGNAL_MAP_LEN; } static PyObject * -signaldict_iter(PyObject *self UNUSED) +signaldict_iter(PyObject *self) { - decimal_state *state = GLOBAL_STATE(); + if (SdFlagAddr(self) == NULL) { + return value_error_ptr(INVALID_SIGNALDICT_ERROR_MSG); + } + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); return PyTuple_Type.tp_iter(state->SignalTuple); } @@ -587,8 +638,12 @@ static PyObject * signaldict_getitem(PyObject *self, PyObject *key) { uint32_t flag; + if (SdFlagAddr(self) == NULL) { + return value_error_ptr(INVALID_SIGNALDICT_ERROR_MSG); + } + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); - flag = exception_as_flag(key); + flag = exception_as_flag(state, key); if (flag & DEC_ERRORS) { return NULL; } @@ -602,11 +657,16 @@ signaldict_setitem(PyObject *self, PyObject *key, PyObject *value) uint32_t flag; int x; + if (SdFlagAddr(self) == NULL) { + return value_error_int(INVALID_SIGNALDICT_ERROR_MSG); + } + if (value == NULL) { return value_error_int("signal keys cannot be deleted"); } - flag = exception_as_flag(key); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + flag = exception_as_flag(state, key); if (flag & DEC_ERRORS) { return -1; } @@ -650,9 +710,14 @@ signaldict_repr(PyObject *self) const char *b[SIGNAL_MAP_LEN]; /* bool */ int i; + if (SdFlagAddr(self) == NULL) { + return value_error_ptr(INVALID_SIGNALDICT_ERROR_MSG); + } + assert(SIGNAL_MAP_LEN == 9); - for (cm=signal_map, i=0; cm->name != NULL; cm++, i++) { + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + for (cm=state->signal_map, i=0; cm->name != NULL; cm++, i++) { n[i] = cm->fqname; b[i] = SdFlags(self)&cm->flag ? "True" : "False"; } @@ -670,15 +735,19 @@ signaldict_richcompare(PyObject *v, PyObject *w, int op) { PyObject *res = Py_NotImplemented; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = find_state_left_or_right(v, w); assert(PyDecSignalDict_Check(state, v)); + if ((SdFlagAddr(v) == NULL) || (SdFlagAddr(w) == NULL)) { + return value_error_ptr(INVALID_SIGNALDICT_ERROR_MSG); + } + if (op == Py_EQ || op == Py_NE) { if (PyDecSignalDict_Check(state, w)) { res = (SdFlags(v)==SdFlags(w)) ^ (op==Py_NE) ? Py_True : Py_False; } else if (PyDict_Check(w)) { - uint32_t flags = dict_as_flags(w); + uint32_t flags = dict_as_flags(state, w); if (flags & DEC_ERRORS) { if (flags & DEC_INVALID_SIGNALS) { /* non-comparable: Py_NotImplemented */ @@ -700,7 +769,11 @@ signaldict_richcompare(PyObject *v, PyObject *w, int op) static PyObject * signaldict_copy(PyObject *self, PyObject *args UNUSED) { - return flags_as_dict(SdFlags(self)); + if (SdFlagAddr(self) == NULL) { + return value_error_ptr(INVALID_SIGNALDICT_ERROR_MSG); + } + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + return flags_as_dict(state, SdFlags(self)); } @@ -769,7 +842,7 @@ static PyObject * context_getround(PyObject *self, void *closure UNUSED) { int i = mpd_getround(CTX(self)); - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); return Py_NewRef(state->round_map[i]); } @@ -928,7 +1001,8 @@ context_setround(PyObject *self, PyObject *value, void *closure UNUSED) mpd_context_t *ctx; int x; - x = getround(value); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + x = getround(state, value); if (x == -1) { return -1; } @@ -986,8 +1060,8 @@ context_settraps_list(PyObject *self, PyObject *value) { mpd_context_t *ctx; uint32_t flags; - - flags = list_as_flags(value); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + flags = list_as_flags(state, value); if (flags & DEC_ERRORS) { return -1; } @@ -1006,12 +1080,12 @@ context_settraps_dict(PyObject *self, PyObject *value) mpd_context_t *ctx; uint32_t flags; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); if (PyDecSignalDict_Check(state, value)) { flags = SdFlags(value); } else { - flags = dict_as_flags(value); + flags = dict_as_flags(state, value); if (flags & DEC_ERRORS) { return -1; } @@ -1051,8 +1125,9 @@ context_setstatus_list(PyObject *self, PyObject *value) { mpd_context_t *ctx; uint32_t flags; + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); - flags = list_as_flags(value); + flags = list_as_flags(state, value); if (flags & DEC_ERRORS) { return -1; } @@ -1071,12 +1146,12 @@ context_setstatus_dict(PyObject *self, PyObject *value) mpd_context_t *ctx; uint32_t flags; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); if (PyDecSignalDict_Check(state, value)) { flags = SdFlags(value); } else { - flags = dict_as_flags(value); + flags = dict_as_flags(state, value); if (flags & DEC_ERRORS) { return -1; } @@ -1262,7 +1337,7 @@ context_new(PyTypeObject *type, PyObject *args UNUSED, PyObject *kwds UNUSED) PyDecContextObject *self = NULL; mpd_context_t *ctx; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(type); if (type == state->PyDecContext_Type) { self = PyObject_GC_New(PyDecContextObject, state->PyDecContext_Type); } @@ -1327,7 +1402,7 @@ context_dealloc(PyDecContextObject *self) PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); #ifndef WITH_DECIMAL_CONTEXTVAR - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); if (self == state->cached_context) { state->cached_context = NULL; } @@ -1379,7 +1454,7 @@ context_repr(PyDecContextObject *self) int n, mem; #ifdef Py_DEBUG - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); assert(PyDecContext_Check(state, self)); #endif ctx = CTX(self); @@ -1447,7 +1522,7 @@ ieee_context(PyObject *dummy UNUSED, PyObject *v) goto error; } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(v)); context = PyObject_CallObject((PyObject *)state->PyDecContext_Type, NULL); if (context == NULL) { return NULL; @@ -1470,7 +1545,7 @@ context_copy(PyObject *self, PyObject *args UNUSED) { PyObject *copy; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); copy = PyObject_CallObject((PyObject *)state->PyDecContext_Type, NULL); if (copy == NULL) { return NULL; @@ -1490,14 +1565,15 @@ context_reduce(PyObject *self, PyObject *args UNUSED) PyObject *traps; PyObject *ret; mpd_context_t *ctx; + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); ctx = CTX(self); - flags = signals_as_list(ctx->status); + flags = signals_as_list(state, ctx->status); if (flags == NULL) { return NULL; } - traps = signals_as_list(ctx->traps); + traps = signals_as_list(state, ctx->traps); if (traps == NULL) { Py_DECREF(flags); return NULL; @@ -1542,7 +1618,7 @@ static PyGetSetDef context_getsets [] = #define CONTEXT_CHECK_VA(state, obj) \ if (obj == Py_None) { \ - CURRENT_CONTEXT(obj); \ + CURRENT_CONTEXT(state, obj); \ } \ else if (!PyDecContext_Check(state, obj)) { \ PyErr_SetString(PyExc_TypeError, \ @@ -1565,10 +1641,9 @@ static PyGetSetDef context_getsets [] = #ifndef WITH_DECIMAL_CONTEXTVAR /* Get the context from the thread state dictionary. */ static PyObject * -current_context_from_dict(void) +current_context_from_dict(decimal_state *modstate) { PyThreadState *tstate = _PyThreadState_GET(); - decimal_state *modstate = GLOBAL_STATE(); #ifdef Py_DEBUG // The caller must hold the GIL _Py_EnsureTstateNotNULL(tstate); @@ -1617,45 +1692,41 @@ current_context_from_dict(void) /* Return borrowed reference to thread local context. */ static PyObject * -current_context(void) +current_context(decimal_state *modstate) { PyThreadState *tstate = _PyThreadState_GET(); - decimal_state *modstate = GLOBAL_STATE(); if (modstate->cached_context && modstate->cached_context->tstate == tstate) { return (PyObject *)(modstate->cached_context); } - return current_context_from_dict(); + return current_context_from_dict(modstate); } /* ctxobj := borrowed reference to the current context */ -#define CURRENT_CONTEXT(ctxobj) \ - ctxobj = current_context(); \ +#define CURRENT_CONTEXT(state, ctxobj) \ + ctxobj = current_context(state); \ if (ctxobj == NULL) { \ return NULL; \ } /* Return a new reference to the current context */ static PyObject * -PyDec_GetCurrentContext(PyObject *self UNUSED, PyObject *args UNUSED) +PyDec_GetCurrentContext(PyObject *self, PyObject *args UNUSED) { PyObject *context; + decimal_state *state = get_module_state(self); - context = current_context(); - if (context == NULL) { - return NULL; - } - + CURRENT_CONTEXT(state, context); return Py_NewRef(context); } /* Set the thread local context to a new context, decrement old reference */ static PyObject * -PyDec_SetCurrentContext(PyObject *self UNUSED, PyObject *v) +PyDec_SetCurrentContext(PyObject *self, PyObject *v) { PyObject *dict; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state(self); CONTEXT_CHECK(state, v); dict = PyThreadState_GetDict(); @@ -1691,9 +1762,8 @@ PyDec_SetCurrentContext(PyObject *self UNUSED, PyObject *v) } #else static PyObject * -init_current_context(void) +init_current_context(decimal_state *state) { - decimal_state *state = GLOBAL_STATE(); PyObject *tl_context = context_copy(state->default_context_template, NULL); if (tl_context == NULL) { return NULL; @@ -1711,10 +1781,9 @@ init_current_context(void) } static inline PyObject * -current_context(void) +current_context(decimal_state *state) { PyObject *tl_context; - decimal_state *state = GLOBAL_STATE(); if (PyContextVar_Get(state->current_context_var, NULL, &tl_context) < 0) { return NULL; } @@ -1723,12 +1792,12 @@ current_context(void) return tl_context; } - return init_current_context(); + return init_current_context(state); } /* ctxobj := borrowed reference to the current context */ -#define CURRENT_CONTEXT(ctxobj) \ - ctxobj = current_context(); \ +#define CURRENT_CONTEXT(state, ctxobj) \ + ctxobj = current_context(state); \ if (ctxobj == NULL) { \ return NULL; \ } \ @@ -1736,16 +1805,17 @@ current_context(void) /* Return a new reference to the current context */ static PyObject * -PyDec_GetCurrentContext(PyObject *self UNUSED, PyObject *args UNUSED) +PyDec_GetCurrentContext(PyObject *self, PyObject *args UNUSED) { - return current_context(); + decimal_state *state = get_module_state(self); + return current_context(state); } /* Set the thread local context to a new context, decrement old reference */ static PyObject * -PyDec_SetCurrentContext(PyObject *self UNUSED, PyObject *v) +PyDec_SetCurrentContext(PyObject *self, PyObject *v) { - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state(self); CONTEXT_CHECK(state, v); /* If the new context is one of the templates, make a copy. @@ -1778,7 +1848,7 @@ PyDec_SetCurrentContext(PyObject *self UNUSED, PyObject *v) * owns one reference to the global (outer) context and one * to the local (inner) context. */ static PyObject * -ctxmanager_new(PyTypeObject *type UNUSED, PyObject *args, PyObject *kwds) +ctxmanager_new(PyObject *m, PyObject *args, PyObject *kwds) { static char *kwlist[] = { "ctx", "prec", "rounding", @@ -1798,8 +1868,8 @@ ctxmanager_new(PyTypeObject *type UNUSED, PyObject *args, PyObject *kwds) PyObject *flags = Py_None; PyObject *traps = Py_None; - decimal_state *state = GLOBAL_STATE(); - CURRENT_CONTEXT(global); + decimal_state *state = get_module_state(m); + CURRENT_CONTEXT(state, global); if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOOOOOOOO", kwlist, &local, &prec, &rounding, &Emin, &Emax, &capitals, &clamp, &flags, &traps)) { return NULL; @@ -1876,7 +1946,7 @@ ctxmanager_set_local(PyDecContextManagerObject *self, PyObject *args UNUSED) { PyObject *ret; - ret = PyDec_SetCurrentContext(NULL, self->local); + ret = PyDec_SetCurrentContext(PyType_GetModule(Py_TYPE(self)), self->local); if (ret == NULL) { return NULL; } @@ -1891,7 +1961,7 @@ ctxmanager_restore_global(PyDecContextManagerObject *self, { PyObject *ret; - ret = PyDec_SetCurrentContext(NULL, self->global); + ret = PyDec_SetCurrentContext(PyType_GetModule(Py_TYPE(self)), self->global); if (ret == NULL) { return NULL; } @@ -1934,7 +2004,7 @@ PyDecType_New(PyTypeObject *type) { PyDecObject *dec; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(type); if (type == state->PyDec_Type) { dec = PyObject_GC_New(PyDecObject, state->PyDec_Type); } @@ -2321,8 +2391,8 @@ PyDecType_FromFloatExact(PyTypeObject *type, PyObject *v, mpd_t *d1, *d2; uint32_t status = 0; mpd_context_t maxctx; + decimal_state *state = get_module_state_by_def(type); - decimal_state *state = GLOBAL_STATE(); #ifdef Py_DEBUG assert(PyType_IsSubtype(type, state->PyDec_Type)); #endif @@ -2459,7 +2529,7 @@ PyDecType_FromDecimalExact(PyTypeObject *type, PyObject *v, PyObject *context) PyObject *dec; uint32_t status = 0; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(type); if (type == state->PyDec_Type && PyDec_CheckExact(state, v)) { return Py_NewRef(v); } @@ -2734,8 +2804,8 @@ dec_from_float(PyObject *type, PyObject *pyfloat) PyObject *context; PyObject *result; - CURRENT_CONTEXT(context); - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def((PyTypeObject *)type); + CURRENT_CONTEXT(state, context); result = PyDecType_FromFloatExact(state->PyDec_Type, pyfloat, context); if (type != (PyObject *)state->PyDec_Type && result != NULL) { Py_SETREF(result, PyObject_CallFunctionObjArgs(type, result, NULL)); @@ -2748,7 +2818,7 @@ dec_from_float(PyObject *type, PyObject *pyfloat) static PyObject * ctx_from_float(PyObject *context, PyObject *v) { - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); return PyDec_FromFloat(state, v, context); } @@ -2759,7 +2829,7 @@ dec_apply(PyObject *v, PyObject *context) PyObject *result; uint32_t status = 0; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); result = dec_alloc(state); if (result == NULL) { return NULL; @@ -2786,7 +2856,7 @@ dec_apply(PyObject *v, PyObject *context) static PyObject * PyDecType_FromObjectExact(PyTypeObject *type, PyObject *v, PyObject *context) { - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(type); if (v == NULL) { return PyDecType_FromSsizeExact(type, 0, context); } @@ -2821,7 +2891,7 @@ PyDecType_FromObjectExact(PyTypeObject *type, PyObject *v, PyObject *context) static PyObject * PyDec_FromObject(PyObject *v, PyObject *context) { - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); if (v == NULL) { return PyDec_FromSsize(state, 0, context); } @@ -2877,7 +2947,7 @@ dec_new(PyTypeObject *type, PyObject *args, PyObject *kwds) &v, &context)) { return NULL; } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(type); CONTEXT_CHECK_VA(state, context); return PyDecType_FromObjectExact(type, v, context); @@ -2908,7 +2978,7 @@ ctx_create_decimal(PyObject *context, PyObject *args) Py_LOCAL_INLINE(int) convert_op(int type_err, PyObject **conv, PyObject *v, PyObject *context) { - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); if (PyDec_Check(state, v)) { *conv = Py_NewRef(v); return 1; @@ -3011,7 +3081,7 @@ multiply_by_denominator(PyObject *v, PyObject *r, PyObject *context) if (tmp == NULL) { return NULL; } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); denom = PyDec_FromLongExact(state, tmp, context); Py_DECREF(tmp); if (denom == NULL) { @@ -3066,7 +3136,7 @@ numerator_as_decimal(PyObject *r, PyObject *context) return NULL; } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); num = PyDec_FromLongExact(state, tmp, context); Py_DECREF(tmp); return num; @@ -3085,7 +3155,7 @@ convert_op_cmp(PyObject **vcmp, PyObject **wcmp, PyObject *v, PyObject *w, *vcmp = v; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); if (PyDec_Check(state, w)) { *wcmp = Py_NewRef(w); } @@ -3183,7 +3253,8 @@ dec_str(PyObject *dec) mpd_ssize_t size; char *cp; - CURRENT_CONTEXT(context); + decimal_state *state = get_module_state_by_def(Py_TYPE(dec)); + CURRENT_CONTEXT(state, context); size = mpd_to_sci_size(&cp, MPD(dec), CtxCaps(context)); if (size < 0) { PyErr_NoMemory(); @@ -3201,8 +3272,8 @@ dec_repr(PyObject *dec) { PyObject *res, *context; char *cp; - - CURRENT_CONTEXT(context); + decimal_state *state = get_module_state_by_def(Py_TYPE(dec)); + CURRENT_CONTEXT(state, context); cp = mpd_to_sci(MPD(dec), CtxCaps(context)); if (cp == NULL) { PyErr_NoMemory(); @@ -3363,7 +3434,8 @@ dec_format(PyObject *dec, PyObject *args) mpd_t tmp = {MPD_STATIC|MPD_STATIC_DATA,0,0,0,MPD_MINALLOC_MAX,dt}; - CURRENT_CONTEXT(context); + decimal_state *state = get_module_state_by_def(Py_TYPE(dec)); + CURRENT_CONTEXT(state, context); if (!PyArg_ParseTuple(args, "O|O", &fmtarg, &override)) { return NULL; } @@ -3626,9 +3698,9 @@ dec_as_integer_ratio(PyObject *self, PyObject *args UNUSED) return NULL; } - CURRENT_CONTEXT(context); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + CURRENT_CONTEXT(state, context); - decimal_state *state = GLOBAL_STATE(); tmp = dec_alloc(state); if (tmp == NULL) { return NULL; @@ -3719,12 +3791,12 @@ PyDec_ToIntegralValue(PyObject *dec, PyObject *args, PyObject *kwds) &rounding, &context)) { return NULL; } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(dec)); CONTEXT_CHECK_VA(state, context); workctx = *CTX(context); if (rounding != Py_None) { - int round = getround(rounding); + int round = getround(state, rounding); if (round < 0) { return NULL; } @@ -3761,12 +3833,12 @@ PyDec_ToIntegralExact(PyObject *dec, PyObject *args, PyObject *kwds) &rounding, &context)) { return NULL; } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(dec)); CONTEXT_CHECK_VA(state, context); workctx = *CTX(context); if (rounding != Py_None) { - int round = getround(rounding); + int round = getround(state, rounding); if (round < 0) { return NULL; } @@ -3829,7 +3901,8 @@ PyDec_Round(PyObject *dec, PyObject *args) uint32_t status = 0; PyObject *context; - CURRENT_CONTEXT(context); + decimal_state *state = get_module_state_by_def(Py_TYPE(dec)); + CURRENT_CONTEXT(state, context); if (!PyArg_ParseTuple(args, "|O", &x)) { return NULL; } @@ -3849,7 +3922,6 @@ PyDec_Round(PyObject *dec, PyObject *args) if (y == -1 && PyErr_Occurred()) { return NULL; } - decimal_state *state = GLOBAL_STATE(); result = dec_alloc(state); if (result == NULL) { return NULL; @@ -3951,7 +4023,7 @@ PyDec_AsTuple(PyObject *dec, PyObject *dummy UNUSED) } } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(dec)); result = PyObject_CallFunctionObjArgs((PyObject *)state->DecimalTuple, sign, coeff, expt, NULL); @@ -3978,8 +4050,8 @@ nm_##MPDFUNC(PyObject *self) \ PyObject *context; \ uint32_t status = 0; \ \ - decimal_state *state = GLOBAL_STATE(); \ - CURRENT_CONTEXT(context); \ + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); \ + CURRENT_CONTEXT(state, context); \ if ((result = dec_alloc(state)) == NULL) { \ return NULL; \ } \ @@ -4003,8 +4075,8 @@ nm_##MPDFUNC(PyObject *self, PyObject *other) \ PyObject *context; \ uint32_t status = 0; \ \ - decimal_state *state = GLOBAL_STATE(); \ - CURRENT_CONTEXT(context) ; \ + decimal_state *state = find_state_left_or_right(self, other); \ + CURRENT_CONTEXT(state, context) ; \ CONVERT_BINOP(&a, &b, self, other, context); \ \ if ((result = dec_alloc(state)) == NULL) { \ @@ -4044,7 +4116,7 @@ dec_##MPDFUNC(PyObject *self, PyObject *args, PyObject *kwds) \ &context)) { \ return NULL; \ } \ - decimal_state *state = GLOBAL_STATE(); \ + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); \ CONTEXT_CHECK_VA(state, context); \ \ return MPDFUNC(MPD(self), CTX(context)) ? incr_true() : incr_false(); \ @@ -4064,7 +4136,8 @@ dec_##MPDFUNC(PyObject *self, PyObject *args, PyObject *kwds) \ &context)) { \ return NULL; \ } \ - decimal_state *state = GLOBAL_STATE(); \ + decimal_state *state = \ + get_module_state_by_def(Py_TYPE(self)); \ CONTEXT_CHECK_VA(state, context); \ \ if ((result = dec_alloc(state)) == NULL) { \ @@ -4096,7 +4169,8 @@ dec_##MPDFUNC(PyObject *self, PyObject *args, PyObject *kwds) \ &other, &context)) { \ return NULL; \ } \ - decimal_state *state = GLOBAL_STATE(); \ + decimal_state *state = \ + get_module_state_by_def(Py_TYPE(self)); \ CONTEXT_CHECK_VA(state, context); \ CONVERT_BINOP_RAISE(&a, &b, self, other, context); \ \ @@ -4134,7 +4208,8 @@ dec_##MPDFUNC(PyObject *self, PyObject *args, PyObject *kwds) \ &other, &context)) { \ return NULL; \ } \ - decimal_state *state = GLOBAL_STATE(); \ + decimal_state *state = \ + get_module_state_by_def(Py_TYPE(self)); \ CONTEXT_CHECK_VA(state, context); \ CONVERT_BINOP_RAISE(&a, &b, self, other, context); \ \ @@ -4167,7 +4242,7 @@ dec_##MPDFUNC(PyObject *self, PyObject *args, PyObject *kwds) \ &other, &third, &context)) { \ return NULL; \ } \ - decimal_state *state = GLOBAL_STATE(); \ + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); \ CONTEXT_CHECK_VA(state, context); \ CONVERT_TERNOP_RAISE(&a, &b, &c, self, other, third, context); \ \ @@ -4210,8 +4285,8 @@ static PyObject * nm_dec_as_long(PyObject *dec) { PyObject *context; - - CURRENT_CONTEXT(context); + decimal_state *state = get_module_state_by_def(Py_TYPE(dec)); + CURRENT_CONTEXT(state, context); return dec_as_long(dec, context, MPD_ROUND_DOWN); } @@ -4230,10 +4305,10 @@ nm_mpd_qdivmod(PyObject *v, PyObject *w) uint32_t status = 0; PyObject *ret; - CURRENT_CONTEXT(context); + decimal_state *state = find_state_left_or_right(v, w); + CURRENT_CONTEXT(state, context); CONVERT_BINOP(&a, &b, v, w, context); - decimal_state *state = GLOBAL_STATE(); q = dec_alloc(state); if (q == NULL) { Py_DECREF(a); @@ -4271,7 +4346,8 @@ nm_mpd_qpow(PyObject *base, PyObject *exp, PyObject *mod) PyObject *context; uint32_t status = 0; - CURRENT_CONTEXT(context); + decimal_state *state = find_state_left_or_right(base, exp); + CURRENT_CONTEXT(state, context); CONVERT_BINOP(&a, &b, base, exp, context); if (mod != Py_None) { @@ -4282,7 +4358,6 @@ nm_mpd_qpow(PyObject *base, PyObject *exp, PyObject *mod) } } - decimal_state *state = GLOBAL_STATE(); result = dec_alloc(state); if (result == NULL) { Py_DECREF(a); @@ -4380,11 +4455,11 @@ dec_conjugate(PyObject *self, PyObject *dummy UNUSED) } static PyObject * -dec_mpd_radix(PyObject *self UNUSED, PyObject *dummy UNUSED) +dec_mpd_radix(PyObject *self, PyObject *dummy UNUSED) { PyObject *result; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); result = dec_alloc(state); if (result == NULL) { return NULL; @@ -4400,7 +4475,7 @@ dec_mpd_qcopy_abs(PyObject *self, PyObject *dummy UNUSED) PyObject *result; uint32_t status = 0; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); if ((result = dec_alloc(state)) == NULL) { return NULL; } @@ -4421,7 +4496,7 @@ dec_mpd_qcopy_negate(PyObject *self, PyObject *dummy UNUSED) PyObject *result; uint32_t status = 0; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); if ((result = dec_alloc(state)) == NULL) { return NULL; } @@ -4451,7 +4526,7 @@ dec_mpd_class(PyObject *self, PyObject *args, PyObject *kwds) &context)) { return NULL; } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); CONTEXT_CHECK_VA(state, context); cp = mpd_class(MPD(self), CTX(context)); @@ -4471,7 +4546,7 @@ dec_mpd_to_eng(PyObject *self, PyObject *args, PyObject *kwds) &context)) { return NULL; } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); CONTEXT_CHECK_VA(state, context); size = mpd_to_eng_size(&s, MPD(self), CtxCaps(context)); @@ -4504,7 +4579,7 @@ dec_mpd_qcopy_sign(PyObject *self, PyObject *args, PyObject *kwds) &other, &context)) { return NULL; } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); CONTEXT_CHECK_VA(state, context); CONVERT_BINOP_RAISE(&a, &b, self, other, context); @@ -4539,7 +4614,7 @@ dec_mpd_same_quantum(PyObject *self, PyObject *args, PyObject *kwds) &other, &context)) { return NULL; } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); CONTEXT_CHECK_VA(state, context); CONVERT_BINOP_RAISE(&a, &b, self, other, context); @@ -4574,12 +4649,12 @@ dec_mpd_qquantize(PyObject *v, PyObject *args, PyObject *kwds) &w, &rounding, &context)) { return NULL; } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(v)); CONTEXT_CHECK_VA(state, context); workctx = *CTX(context); if (rounding != Py_None) { - int round = getround(rounding); + int round = getround(state, rounding); if (round < 0) { return NULL; } @@ -4618,12 +4693,12 @@ dec_richcompare(PyObject *v, PyObject *w, int op) uint32_t status = 0; int a_issnan, b_issnan; int r; + decimal_state *state = find_state_left_or_right(v, w); #ifdef Py_DEBUG - decimal_state *state = GLOBAL_STATE(); assert(PyDec_Check(state, v)); #endif - CURRENT_CONTEXT(context); + CURRENT_CONTEXT(state, context); CONVERT_BINOP_CMP(&a, &b, v, w, op, context); a_issnan = mpd_issnan(MPD(a)); @@ -4674,7 +4749,8 @@ dec_ceil(PyObject *self, PyObject *dummy UNUSED) { PyObject *context; - CURRENT_CONTEXT(context); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + CURRENT_CONTEXT(state, context); return dec_as_long(self, context, MPD_ROUND_CEILING); } @@ -4712,7 +4788,8 @@ dec_floor(PyObject *self, PyObject *dummy UNUSED) { PyObject *context; - CURRENT_CONTEXT(context); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + CURRENT_CONTEXT(state, context); return dec_as_long(self, context, MPD_ROUND_FLOOR); } @@ -4876,7 +4953,8 @@ dec_trunc(PyObject *self, PyObject *dummy UNUSED) { PyObject *context; - CURRENT_CONTEXT(context); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); + CURRENT_CONTEXT(state, context); return dec_as_long(self, context, MPD_ROUND_DOWN); } @@ -4892,7 +4970,7 @@ dec_imag(PyObject *self UNUSED, void *closure UNUSED) { PyObject *result; - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(self)); result = dec_alloc(state); if (result == NULL) { return NULL; @@ -5091,7 +5169,8 @@ ctx_##MPDFUNC(PyObject *context, PyObject *v) \ uint32_t status = 0; \ \ CONVERT_OP_RAISE(&a, v, context); \ - decimal_state *state = GLOBAL_STATE(); \ + decimal_state *state = \ + get_module_state_by_def(Py_TYPE(context)); \ if ((result = dec_alloc(state)) == NULL) { \ Py_DECREF(a); \ return NULL; \ @@ -5122,7 +5201,8 @@ ctx_##MPDFUNC(PyObject *context, PyObject *args) \ } \ \ CONVERT_BINOP_RAISE(&a, &b, v, w, context); \ - decimal_state *state = GLOBAL_STATE(); \ + decimal_state *state = \ + get_module_state_by_def(Py_TYPE(context)); \ if ((result = dec_alloc(state)) == NULL) { \ Py_DECREF(a); \ Py_DECREF(b); \ @@ -5157,7 +5237,8 @@ ctx_##MPDFUNC(PyObject *context, PyObject *args) \ } \ \ CONVERT_BINOP_RAISE(&a, &b, v, w, context); \ - decimal_state *state = GLOBAL_STATE(); \ + decimal_state *state = \ + get_module_state_by_def(Py_TYPE(context)); \ if ((result = dec_alloc(state)) == NULL) { \ Py_DECREF(a); \ Py_DECREF(b); \ @@ -5186,7 +5267,7 @@ ctx_##MPDFUNC(PyObject *context, PyObject *args) \ } \ \ CONVERT_TERNOP_RAISE(&a, &b, &c, v, w, x, context); \ - decimal_state *state = GLOBAL_STATE(); \ + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); \ if ((result = dec_alloc(state)) == NULL) { \ Py_DECREF(a); \ Py_DECREF(b); \ @@ -5252,7 +5333,7 @@ ctx_mpd_qdivmod(PyObject *context, PyObject *args) } CONVERT_BINOP_RAISE(&a, &b, v, w, context); - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); q = dec_alloc(state); if (q == NULL) { Py_DECREF(a); @@ -5307,7 +5388,7 @@ ctx_mpd_qpow(PyObject *context, PyObject *args, PyObject *kwds) } } - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); result = dec_alloc(state); if (result == NULL) { Py_DECREF(a); @@ -5357,9 +5438,9 @@ DecCtx_BoolFunc_NO_CTX(mpd_issnan) DecCtx_BoolFunc_NO_CTX(mpd_iszero) static PyObject * -ctx_iscanonical(PyObject *context UNUSED, PyObject *v) +ctx_iscanonical(PyObject *context, PyObject *v) { - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); if (!PyDec_Check(state, v)) { PyErr_SetString(PyExc_TypeError, "argument must be a Decimal"); @@ -5383,9 +5464,9 @@ PyDecContext_Apply(PyObject *context, PyObject *v) } static PyObject * -ctx_canonical(PyObject *context UNUSED, PyObject *v) +ctx_canonical(PyObject *context, PyObject *v) { - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); if (!PyDec_Check(state, v)) { PyErr_SetString(PyExc_TypeError, "argument must be a Decimal"); @@ -5402,7 +5483,7 @@ ctx_mpd_qcopy_abs(PyObject *context, PyObject *v) uint32_t status = 0; CONVERT_OP_RAISE(&a, v, context); - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); result = dec_alloc(state); if (result == NULL) { Py_DECREF(a); @@ -5435,7 +5516,7 @@ ctx_mpd_qcopy_negate(PyObject *context, PyObject *v) uint32_t status = 0; CONVERT_OP_RAISE(&a, v, context); - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); result = dec_alloc(state); if (result == NULL) { Py_DECREF(a); @@ -5532,7 +5613,7 @@ ctx_mpd_qcopy_sign(PyObject *context, PyObject *args) } CONVERT_BINOP_RAISE(&a, &b, v, w, context); - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state_by_def(Py_TYPE(context)); result = dec_alloc(state); if (result == NULL) { Py_DECREF(a); @@ -5721,17 +5802,6 @@ static PyMethodDef _decimal_methods [] = { NULL, NULL, 1, NULL } }; -static struct PyModuleDef _decimal_module = { - PyModuleDef_HEAD_INIT, - "decimal", - doc__decimal, - -1, - _decimal_methods, - NULL, - NULL, - NULL, - NULL -}; struct ssize_constmap { const char *name; mpd_ssize_t val; }; static struct ssize_constmap ssize_constants [] = { @@ -5807,10 +5877,9 @@ cfunc_noargs(PyTypeObject *t, const char *name) } -PyMODINIT_FUNC -PyInit__decimal(void) +static int +_decimal_exec(PyObject *m) { - PyObject *m = NULL; PyObject *numbers = NULL; PyObject *Number = NULL; PyObject *collections = NULL; @@ -5831,7 +5900,7 @@ PyInit__decimal(void) mpd_free = PyMem_Free; mpd_setminalloc(_Py_DEC_MINALLOC); - decimal_state *state = GLOBAL_STATE(); + decimal_state *state = get_module_state(m); /* Init external C-API functions */ state->_py_long_multiply = PyLong_Type.tp_as_number->nb_multiply; @@ -5903,10 +5972,6 @@ PyInit__decimal(void) Py_CLEAR(collections_abc); Py_CLEAR(MutableMapping); - - /* Create the module */ - ASSIGN_PTR(m, PyModule_Create(&_decimal_module)); - /* Add types to the module */ CHECK_INT(PyModule_AddType(m, state->PyDec_Type)); CHECK_INT(PyModule_AddType(m, state->PyDecContext_Type)); @@ -5922,10 +5987,12 @@ PyInit__decimal(void) ASSIGN_PTR(state->SignalTuple, PyTuple_New(SIGNAL_MAP_LEN)); /* Add exceptions that correspond to IEEE signals */ + ASSIGN_PTR(state->signal_map, dec_cond_map_init(signal_map_template, + sizeof(signal_map_template))); for (i = SIGNAL_MAP_LEN-1; i >= 0; i--) { PyObject *base; - cm = signal_map + i; + cm = state->signal_map + i; switch (cm->flag) { case MPD_Float_operation: @@ -5936,13 +6003,13 @@ PyInit__decimal(void) PyExc_ZeroDivisionError); break; case MPD_Overflow: - base = PyTuple_Pack(2, signal_map[INEXACT].ex, - signal_map[ROUNDED].ex); + base = PyTuple_Pack(2, state->signal_map[INEXACT].ex, + state->signal_map[ROUNDED].ex); break; case MPD_Underflow: - base = PyTuple_Pack(3, signal_map[INEXACT].ex, - signal_map[ROUNDED].ex, - signal_map[SUBNORMAL].ex); + base = PyTuple_Pack(3, state->signal_map[INEXACT].ex, + state->signal_map[ROUNDED].ex, + state->signal_map[SUBNORMAL].ex); break; default: base = PyTuple_Pack(1, state->DecimalException); @@ -5968,16 +6035,18 @@ PyInit__decimal(void) * several conditions, including InvalidOperation! Naming the * signal IEEEInvalidOperation would prevent the confusion. */ - cond_map[0].ex = signal_map[0].ex; + ASSIGN_PTR(state->cond_map, dec_cond_map_init(cond_map_template, + sizeof(cond_map_template))); + state->cond_map[0].ex = state->signal_map[0].ex; /* Add remaining exceptions, inherit from InvalidOperation */ - for (cm = cond_map+1; cm->name != NULL; cm++) { + for (cm = state->cond_map+1; cm->name != NULL; cm++) { PyObject *base; if (cm->flag == MPD_Division_undefined) { - base = PyTuple_Pack(2, signal_map[0].ex, PyExc_ZeroDivisionError); + base = PyTuple_Pack(2, state->signal_map[0].ex, PyExc_ZeroDivisionError); } else { - base = PyTuple_Pack(1, signal_map[0].ex); + base = PyTuple_Pack(1, state->signal_map[0].ex); } if (base == NULL) { goto error; /* GCOV_NOT_REACHED */ @@ -6023,9 +6092,8 @@ PyInit__decimal(void) /* Init mpd_ssize_t constants */ for (ssize_cm = ssize_constants; ssize_cm->name != NULL; ssize_cm++) { - ASSIGN_PTR(obj, PyLong_FromSsize_t(ssize_cm->val)); - CHECK_INT(PyModule_AddObject(m, ssize_cm->name, obj)); - obj = NULL; + CHECK_INT(PyModule_Add(m, ssize_cm->name, + PyLong_FromSsize_t(ssize_cm->val))); } /* Init int constants */ @@ -6044,29 +6112,103 @@ PyInit__decimal(void) CHECK_INT(PyModule_AddStringConstant(m, "__version__", "1.70")); CHECK_INT(PyModule_AddStringConstant(m, "__libmpdec_version__", mpd_version())); - - return m; - + return 0; error: Py_CLEAR(obj); /* GCOV_NOT_REACHED */ Py_CLEAR(numbers); /* GCOV_NOT_REACHED */ Py_CLEAR(Number); /* GCOV_NOT_REACHED */ - Py_CLEAR(state->Rational); /* GCOV_NOT_REACHED */ Py_CLEAR(collections); /* GCOV_NOT_REACHED */ Py_CLEAR(collections_abc); /* GCOV_NOT_REACHED */ Py_CLEAR(MutableMapping); /* GCOV_NOT_REACHED */ - Py_CLEAR(state->SignalTuple); /* GCOV_NOT_REACHED */ - Py_CLEAR(state->DecimalTuple); /* GCOV_NOT_REACHED */ - Py_CLEAR(state->default_context_template); /* GCOV_NOT_REACHED */ + + return -1; +} + +static int +decimal_traverse(PyObject *module, visitproc visit, void *arg) +{ + decimal_state *state = get_module_state(module); + Py_VISIT(state->PyDecContextManager_Type); + Py_VISIT(state->PyDecContext_Type); + Py_VISIT(state->PyDecSignalDictMixin_Type); + Py_VISIT(state->PyDec_Type); + Py_VISIT(state->PyDecSignalDict_Type); + Py_VISIT(state->DecimalTuple); + Py_VISIT(state->DecimalException); + +#ifndef WITH_DECIMAL_CONTEXTVAR + Py_VISIT(state->tls_context_key); + Py_VISIT(state->cached_context); +#else + Py_VISIT(state->current_context_var); +#endif + + Py_VISIT(state->default_context_template); + Py_VISIT(state->basic_context_template); + Py_VISIT(state->extended_context_template); + Py_VISIT(state->Rational); + Py_VISIT(state->SignalTuple); + + return 0; +} + +static int +decimal_clear(PyObject *module) +{ + decimal_state *state = get_module_state(module); + Py_CLEAR(state->PyDecContextManager_Type); + Py_CLEAR(state->PyDecContext_Type); + Py_CLEAR(state->PyDecSignalDictMixin_Type); + Py_CLEAR(state->PyDec_Type); + Py_CLEAR(state->PyDecSignalDict_Type); + Py_CLEAR(state->DecimalTuple); + Py_CLEAR(state->DecimalException); + #ifndef WITH_DECIMAL_CONTEXTVAR - Py_CLEAR(state->tls_context_key); /* GCOV_NOT_REACHED */ + Py_CLEAR(state->tls_context_key); + Py_CLEAR(state->cached_context); #else - Py_CLEAR(state->current_context_var); /* GCOV_NOT_REACHED */ + Py_CLEAR(state->current_context_var); #endif - Py_CLEAR(state->basic_context_template); /* GCOV_NOT_REACHED */ - Py_CLEAR(state->extended_context_template); /* GCOV_NOT_REACHED */ - Py_CLEAR(m); /* GCOV_NOT_REACHED */ - return NULL; /* GCOV_NOT_REACHED */ + Py_CLEAR(state->default_context_template); + Py_CLEAR(state->basic_context_template); + Py_CLEAR(state->extended_context_template); + Py_CLEAR(state->Rational); + Py_CLEAR(state->SignalTuple); + + PyMem_Free(state->signal_map); + PyMem_Free(state->cond_map); + return 0; +} + +static void +decimal_free(void *module) +{ + (void)decimal_clear((PyObject *)module); +} + +static struct PyModuleDef_Slot _decimal_slots[] = { + {Py_mod_exec, _decimal_exec}, + {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, + {0, NULL}, +}; + +static struct PyModuleDef _decimal_module = { + PyModuleDef_HEAD_INIT, + .m_name = "decimal", + .m_doc = doc__decimal, + .m_size = sizeof(decimal_state), + .m_methods = _decimal_methods, + .m_slots = _decimal_slots, + .m_traverse = decimal_traverse, + .m_clear = decimal_clear, + .m_free = decimal_free, +}; + +PyMODINIT_FUNC +PyInit__decimal(void) +{ + return PyModuleDef_Init(&_decimal_module); } diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c index a8d68d68420d36..8cb57e693d81d7 100644 --- a/Modules/_elementtree.c +++ b/Modules/_elementtree.c @@ -17,7 +17,9 @@ #include "Python.h" #include "pycore_import.h" // _PyImport_GetModuleAttrString() -#include "structmember.h" // PyMemberDef +#include "pycore_pyhash.h" // _Py_HashSecret + +#include // offsetof() #include "expat.h" #include "pyexpat.h" @@ -4133,8 +4135,8 @@ _elementtree_XMLParser__setevents_impl(XMLParserObject *self, } static PyMemberDef xmlparser_members[] = { - {"entity", T_OBJECT, offsetof(XMLParserObject, entity), READONLY, NULL}, - {"target", T_OBJECT, offsetof(XMLParserObject, target), READONLY, NULL}, + {"entity", _Py_T_OBJECT, offsetof(XMLParserObject, entity), Py_READONLY, NULL}, + {"target", _Py_T_OBJECT, offsetof(XMLParserObject, target), Py_READONLY, NULL}, {NULL} }; @@ -4190,7 +4192,7 @@ static PyMethodDef element_methods[] = { }; static struct PyMemberDef element_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(ElementObject, weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(ElementObject, weakreflist), Py_READONLY}, {NULL}, }; diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index c987485e66a48a..389ff4391de0be 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -6,7 +6,7 @@ #include "pycore_object.h" // _PyObject_GC_TRACK #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_tuple.h" // _PyTuple_ITEMS() -#include "structmember.h" // PyMemberDef + #include "clinic/_functoolsmodule.c.h" /*[clinic input] @@ -340,18 +340,18 @@ PyDoc_STRVAR(partial_doc, #define OFF(x) offsetof(partialobject, x) static PyMemberDef partial_memberlist[] = { - {"func", T_OBJECT, OFF(fn), READONLY, + {"func", _Py_T_OBJECT, OFF(fn), Py_READONLY, "function object to use in future partial calls"}, - {"args", T_OBJECT, OFF(args), READONLY, + {"args", _Py_T_OBJECT, OFF(args), Py_READONLY, "tuple of arguments to future partial calls"}, - {"keywords", T_OBJECT, OFF(kw), READONLY, + {"keywords", _Py_T_OBJECT, OFF(kw), Py_READONLY, "dictionary of keyword arguments to future partial calls"}, - {"__weaklistoffset__", T_PYSSIZET, - offsetof(partialobject, weakreflist), READONLY}, - {"__dictoffset__", T_PYSSIZET, - offsetof(partialobject, dict), READONLY}, - {"__vectorcalloffset__", T_PYSSIZET, - offsetof(partialobject, vectorcall), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, + offsetof(partialobject, weakreflist), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, + offsetof(partialobject, dict), Py_READONLY}, + {"__vectorcalloffset__", Py_T_PYSSIZET, + offsetof(partialobject, vectorcall), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -540,7 +540,7 @@ keyobject_traverse(keyobject *ko, visitproc visit, void *arg) } static PyMemberDef keyobject_members[] = { - {"obj", T_OBJECT, + {"obj", _Py_T_OBJECT, offsetof(keyobject, object), 0, PyDoc_STR("Value wrapped by a key function.")}, {NULL} @@ -1394,10 +1394,10 @@ static PyGetSetDef lru_cache_getsetlist[] = { }; static PyMemberDef lru_cache_memberlist[] = { - {"__dictoffset__", T_PYSSIZET, - offsetof(lru_cache_object, dict), READONLY}, - {"__weaklistoffset__", T_PYSSIZET, - offsetof(lru_cache_object, weakreflist), READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, + offsetof(lru_cache_object, dict), Py_READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, + offsetof(lru_cache_object, weakreflist), Py_READONLY}, {NULL} /* Sentinel */ }; diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c index bedbdc081425c2..eff36fd7fb669b 100644 --- a/Modules/_gdbmmodule.c +++ b/Modules/_gdbmmodule.c @@ -561,6 +561,37 @@ _gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls) Py_RETURN_NONE; } +/*[clinic input] +_gdbm.gdbm.clear + cls: defining_class + / +Remove all items from the database. + +[clinic start generated code]*/ + +static PyObject * +_gdbm_gdbm_clear_impl(gdbmobject *self, PyTypeObject *cls) +/*[clinic end generated code: output=673577c573318661 input=34136d52fcdd4210]*/ +{ + _gdbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); + check_gdbmobject_open(self, state->gdbm_error); + datum key; + // Invalidate cache + self->di_size = -1; + while (1) { + key = gdbm_firstkey(self->di_dbm); + if (key.dptr == NULL) { + break; + } + if (gdbm_delete(self->di_dbm, key) < 0) { + PyErr_SetString(state->gdbm_error, "cannot delete item from database"); + return NULL; + } + } + Py_RETURN_NONE; +} + static PyObject * gdbm__enter__(PyObject *self, PyObject *args) { @@ -582,6 +613,7 @@ static PyMethodDef gdbm_methods[] = { _GDBM_GDBM_SYNC_METHODDEF _GDBM_GDBM_GET_METHODDEF _GDBM_GDBM_SETDEFAULT_METHODDEF + _GDBM_GDBM_CLEAR_METHODDEF {"__enter__", gdbm__enter__, METH_NOARGS, NULL}, {"__exit__", gdbm__exit__, METH_VARARGS, NULL}, {NULL, NULL} /* sentinel */ @@ -755,11 +787,7 @@ _gdbm_exec(PyObject *module) defined(GDBM_VERSION_PATCH) PyObject *obj = Py_BuildValue("iii", GDBM_VERSION_MAJOR, GDBM_VERSION_MINOR, GDBM_VERSION_PATCH); - if (obj == NULL) { - return -1; - } - if (PyModule_AddObject(module, "_GDBM_VERSION", obj) < 0) { - Py_DECREF(obj); + if (PyModule_Add(module, "_GDBM_VERSION", obj) < 0) { return -1; } #endif diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index 246eea74098820..ee6fb8b4b03643 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -24,8 +24,9 @@ #include "Python.h" #include "pycore_hashtable.h" -#include "hashlib.h" +#include "pycore_pyhash.h" // _Py_HashBytes() #include "pycore_strhex.h" // _Py_strhex() +#include "hashlib.h" /* EVP is the preferred interface to hashing in OpenSSL */ #include @@ -1888,12 +1889,7 @@ hashlib_md_meth_names(PyObject *module) return -1; } - if (PyModule_AddObject(module, "openssl_md_meth_names", state.set) < 0) { - Py_DECREF(state.set); - return -1; - } - - return 0; + return PyModule_Add(module, "openssl_md_meth_names", state.set); } /*[clinic input] diff --git a/Modules/_heapqmodule.c b/Modules/_heapqmodule.c index 00285ae01f8574..9d4ec256ee9e3e 100644 --- a/Modules/_heapqmodule.c +++ b/Modules/_heapqmodule.c @@ -672,9 +672,7 @@ From all times, sorting has always been a Great Art! :-)\n"); static int heapq_exec(PyObject *m) { - PyObject *about = PyUnicode_FromString(__about__); - if (PyModule_AddObject(m, "__about__", about) < 0) { - Py_DECREF(about); + if (PyModule_Add(m, "__about__", PyUnicode_FromString(__about__)) < 0) { return -1; } return 0; diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c index bfc3d2558c9e36..0983a7bd151f40 100644 --- a/Modules/_io/bufferedio.c +++ b/Modules/_io/bufferedio.c @@ -8,11 +8,12 @@ */ #include "Python.h" +#include "pycore_bytesobject.h" // _PyBytes_Join() #include "pycore_call.h" // _PyObject_CallNoArgs() -#include "pycore_object.h" +#include "pycore_object.h" // _PyObject_GC_UNTRACK() #include "pycore_pyerrors.h" // _Py_FatalErrorFormat() #include "pycore_pylifecycle.h" // _Py_IsInterpreterFinalizing() -#include "structmember.h" // PyMemberDef + #include "_iomodule.h" /*[clinic input] @@ -2477,10 +2478,10 @@ static PyMethodDef bufferedreader_methods[] = { }; static PyMemberDef bufferedreader_members[] = { - {"raw", T_OBJECT, offsetof(buffered, raw), READONLY}, - {"_finalizing", T_BOOL, offsetof(buffered, finalizing), 0}, - {"__weaklistoffset__", T_PYSSIZET, offsetof(buffered, weakreflist), READONLY}, - {"__dictoffset__", T_PYSSIZET, offsetof(buffered, dict), READONLY}, + {"raw", _Py_T_OBJECT, offsetof(buffered, raw), Py_READONLY}, + {"_finalizing", Py_T_BOOL, offsetof(buffered, finalizing), 0}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(buffered, weakreflist), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(buffered, dict), Py_READONLY}, {NULL} }; @@ -2537,10 +2538,10 @@ static PyMethodDef bufferedwriter_methods[] = { }; static PyMemberDef bufferedwriter_members[] = { - {"raw", T_OBJECT, offsetof(buffered, raw), READONLY}, - {"_finalizing", T_BOOL, offsetof(buffered, finalizing), 0}, - {"__weaklistoffset__", T_PYSSIZET, offsetof(buffered, weakreflist), READONLY}, - {"__dictoffset__", T_PYSSIZET, offsetof(buffered, dict), READONLY}, + {"raw", _Py_T_OBJECT, offsetof(buffered, raw), Py_READONLY}, + {"_finalizing", Py_T_BOOL, offsetof(buffered, finalizing), 0}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(buffered, weakreflist), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(buffered, dict), Py_READONLY}, {NULL} }; @@ -2593,8 +2594,8 @@ static PyMethodDef bufferedrwpair_methods[] = { }; static PyMemberDef bufferedrwpair_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(rwpair, weakreflist), READONLY}, - {"__dictoffset__", T_PYSSIZET, offsetof(rwpair, dict), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(rwpair, weakreflist), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(rwpair, dict), Py_READONLY}, {NULL} }; @@ -2655,10 +2656,10 @@ static PyMethodDef bufferedrandom_methods[] = { }; static PyMemberDef bufferedrandom_members[] = { - {"raw", T_OBJECT, offsetof(buffered, raw), READONLY}, - {"_finalizing", T_BOOL, offsetof(buffered, finalizing), 0}, - {"__weaklistoffset__", T_PYSSIZET, offsetof(buffered, weakreflist), READONLY}, - {"__dictoffset__", T_PYSSIZET, offsetof(buffered, dict), READONLY}, + {"raw", _Py_T_OBJECT, offsetof(buffered, raw), Py_READONLY}, + {"_finalizing", Py_T_BOOL, offsetof(buffered, finalizing), 0}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(buffered, weakreflist), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(buffered, dict), Py_READONLY}, {NULL} }; diff --git a/Modules/_io/bytesio.c b/Modules/_io/bytesio.c index 80773058693259..3ab503c9e3998d 100644 --- a/Modules/_io/bytesio.c +++ b/Modules/_io/bytesio.c @@ -1028,8 +1028,8 @@ static struct PyMethodDef bytesio_methods[] = { }; static PyMemberDef bytesio_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(bytesio, weakreflist), READONLY}, - {"__dictoffset__", T_PYSSIZET, offsetof(bytesio, dict), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(bytesio, weakreflist), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(bytesio, dict), Py_READONLY}, {NULL} }; diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c index 39709fd2931315..7fe37eee787e50 100644 --- a/Modules/_io/fileio.c +++ b/Modules/_io/fileio.c @@ -3,7 +3,7 @@ #include "Python.h" #include "pycore_fileutils.h" // _Py_BEGIN_SUPPRESS_IPH #include "pycore_object.h" // _PyObject_GC_UNTRACK() -#include "structmember.h" // PyMemberDef + #include #ifdef HAVE_SYS_TYPES_H #include @@ -1199,10 +1199,10 @@ static PyGetSetDef fileio_getsetlist[] = { }; static PyMemberDef fileio_members[] = { - {"_blksize", T_UINT, offsetof(fileio, blksize), 0}, - {"_finalizing", T_BOOL, offsetof(fileio, finalizing), 0}, - {"__weaklistoffset__", T_PYSSIZET, offsetof(fileio, weakreflist), READONLY}, - {"__dictoffset__", T_PYSSIZET, offsetof(fileio, dict), READONLY}, + {"_blksize", Py_T_UINT, offsetof(fileio, blksize), 0}, + {"_finalizing", Py_T_BOOL, offsetof(fileio, finalizing), 0}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(fileio, weakreflist), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(fileio, dict), Py_READONLY}, {NULL} }; diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c index e2e8ef46adf901..5fd19895311c0c 100644 --- a/Modules/_io/iobase.c +++ b/Modules/_io/iobase.c @@ -863,8 +863,8 @@ static PyGetSetDef iobase_getset[] = { }; static struct PyMemberDef iobase_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(iobase, weakreflist), READONLY}, - {"__dictoffset__", T_PYSSIZET, offsetof(iobase, dict), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(iobase, weakreflist), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(iobase, dict), Py_READONLY}, {NULL}, }; diff --git a/Modules/_io/stringio.c b/Modules/_io/stringio.c index 1960002d405edf..1856b07108bab6 100644 --- a/Modules/_io/stringio.c +++ b/Modules/_io/stringio.c @@ -1002,8 +1002,8 @@ static PyGetSetDef stringio_getset[] = { }; static struct PyMemberDef stringio_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(stringio, weakreflist), READONLY}, - {"__dictoffset__", T_PYSSIZET, offsetof(stringio, dict), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(stringio, weakreflist), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(stringio, dict), Py_READONLY}, {NULL}, }; diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c index a5cf9fc397f5fe..6ce90b2ed774c0 100644 --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -14,7 +14,7 @@ #include "pycore_fileutils.h" // _Py_GetLocaleEncoding() #include "pycore_object.h" // _PyObject_GC_UNTRACK() #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "structmember.h" // PyMemberDef + #include "_iomodule.h" /*[clinic input] @@ -234,7 +234,7 @@ _io_IncrementalNewlineDecoder___init___impl(nldecoder_object *self, { if (errors == NULL) { - errors = Py_NewRef(&_Py_ID(strict)); + errors = &_Py_ID(strict); } else { errors = Py_NewRef(errors); @@ -1138,7 +1138,7 @@ _io_TextIOWrapper___init___impl(textio *self, PyObject *buffer, if (encoding == NULL && _PyRuntime.preconfig.utf8_mode) { _Py_DECLARE_STR(utf_8, "utf-8"); - self->encoding = Py_NewRef(&_Py_STR(utf_8)); + self->encoding = &_Py_STR(utf_8); } else if (encoding == NULL || (strcmp(encoding, "locale") == 0)) { self->encoding = _Py_GetLocaleEncodingObject(); @@ -2267,7 +2267,7 @@ _textiowrapper_readline(textio *self, Py_ssize_t limit) Py_CLEAR(chunks); } if (line == NULL) { - line = Py_NewRef(&_Py_STR(empty)); + line = &_Py_STR(empty); } return line; @@ -3230,13 +3230,13 @@ static PyMethodDef textiowrapper_methods[] = { }; static PyMemberDef textiowrapper_members[] = { - {"encoding", T_OBJECT, offsetof(textio, encoding), READONLY}, - {"buffer", T_OBJECT, offsetof(textio, buffer), READONLY}, - {"line_buffering", T_BOOL, offsetof(textio, line_buffering), READONLY}, - {"write_through", T_BOOL, offsetof(textio, write_through), READONLY}, - {"_finalizing", T_BOOL, offsetof(textio, finalizing), 0}, - {"__weaklistoffset__", T_PYSSIZET, offsetof(textio, weakreflist), READONLY}, - {"__dictoffset__", T_PYSSIZET, offsetof(textio, dict), READONLY}, + {"encoding", _Py_T_OBJECT, offsetof(textio, encoding), Py_READONLY}, + {"buffer", _Py_T_OBJECT, offsetof(textio, buffer), Py_READONLY}, + {"line_buffering", Py_T_BOOL, offsetof(textio, line_buffering), Py_READONLY}, + {"write_through", Py_T_BOOL, offsetof(textio, write_through), Py_READONLY}, + {"_finalizing", Py_T_BOOL, offsetof(textio, finalizing), 0}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(textio, weakreflist), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(textio, dict), Py_READONLY}, {NULL} }; diff --git a/Modules/_io/winconsoleio.c b/Modules/_io/winconsoleio.c index 452b12c138fa8b..a1ed7eb61e47b5 100644 --- a/Modules/_io/winconsoleio.c +++ b/Modules/_io/winconsoleio.c @@ -12,7 +12,7 @@ #ifdef HAVE_WINDOWS_CONSOLE_IO -#include "structmember.h" // PyMemberDef + #ifdef HAVE_SYS_TYPES_H #include #endif @@ -1141,10 +1141,10 @@ static PyGetSetDef winconsoleio_getsetlist[] = { }; static PyMemberDef winconsoleio_members[] = { - {"_blksize", T_UINT, offsetof(winconsoleio, blksize), 0}, - {"_finalizing", T_BOOL, offsetof(winconsoleio, finalizing), 0}, - {"__weaklistoffset__", T_PYSSIZET, offsetof(winconsoleio, weakreflist), READONLY}, - {"__dictoffset__", T_PYSSIZET, offsetof(winconsoleio, dict), READONLY}, + {"_blksize", Py_T_UINT, offsetof(winconsoleio, blksize), 0}, + {"_finalizing", Py_T_BOOL, offsetof(winconsoleio, finalizing), 0}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(winconsoleio, weakreflist), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(winconsoleio, dict), Py_READONLY}, {NULL} }; diff --git a/Modules/_json.c b/Modules/_json.c index 2d0e30d70932bd..c7cfe50b52faff 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -11,7 +11,7 @@ #include "Python.h" #include "pycore_ceval.h" // _Py_EnterRecursiveCall() #include "pycore_runtime.h" // _PyRuntime -#include "structmember.h" // PyMemberDef + #include "pycore_global_objects.h" // _Py_ID() #include // bool @@ -28,12 +28,12 @@ typedef struct _PyScannerObject { } PyScannerObject; static PyMemberDef scanner_members[] = { - {"strict", T_BOOL, offsetof(PyScannerObject, strict), READONLY, "strict"}, - {"object_hook", T_OBJECT, offsetof(PyScannerObject, object_hook), READONLY, "object_hook"}, - {"object_pairs_hook", T_OBJECT, offsetof(PyScannerObject, object_pairs_hook), READONLY}, - {"parse_float", T_OBJECT, offsetof(PyScannerObject, parse_float), READONLY, "parse_float"}, - {"parse_int", T_OBJECT, offsetof(PyScannerObject, parse_int), READONLY, "parse_int"}, - {"parse_constant", T_OBJECT, offsetof(PyScannerObject, parse_constant), READONLY, "parse_constant"}, + {"strict", Py_T_BOOL, offsetof(PyScannerObject, strict), Py_READONLY, "strict"}, + {"object_hook", _Py_T_OBJECT, offsetof(PyScannerObject, object_hook), Py_READONLY, "object_hook"}, + {"object_pairs_hook", _Py_T_OBJECT, offsetof(PyScannerObject, object_pairs_hook), Py_READONLY}, + {"parse_float", _Py_T_OBJECT, offsetof(PyScannerObject, parse_float), Py_READONLY, "parse_float"}, + {"parse_int", _Py_T_OBJECT, offsetof(PyScannerObject, parse_int), Py_READONLY, "parse_int"}, + {"parse_constant", _Py_T_OBJECT, offsetof(PyScannerObject, parse_constant), Py_READONLY, "parse_constant"}, {NULL} }; @@ -52,14 +52,14 @@ typedef struct _PyEncoderObject { } PyEncoderObject; static PyMemberDef encoder_members[] = { - {"markers", T_OBJECT, offsetof(PyEncoderObject, markers), READONLY, "markers"}, - {"default", T_OBJECT, offsetof(PyEncoderObject, defaultfn), READONLY, "default"}, - {"encoder", T_OBJECT, offsetof(PyEncoderObject, encoder), READONLY, "encoder"}, - {"indent", T_OBJECT, offsetof(PyEncoderObject, indent), READONLY, "indent"}, - {"key_separator", T_OBJECT, offsetof(PyEncoderObject, key_separator), READONLY, "key_separator"}, - {"item_separator", T_OBJECT, offsetof(PyEncoderObject, item_separator), READONLY, "item_separator"}, - {"sort_keys", T_BOOL, offsetof(PyEncoderObject, sort_keys), READONLY, "sort_keys"}, - {"skipkeys", T_BOOL, offsetof(PyEncoderObject, skipkeys), READONLY, "skipkeys"}, + {"markers", _Py_T_OBJECT, offsetof(PyEncoderObject, markers), Py_READONLY, "markers"}, + {"default", _Py_T_OBJECT, offsetof(PyEncoderObject, defaultfn), Py_READONLY, "default"}, + {"encoder", _Py_T_OBJECT, offsetof(PyEncoderObject, encoder), Py_READONLY, "encoder"}, + {"indent", _Py_T_OBJECT, offsetof(PyEncoderObject, indent), Py_READONLY, "indent"}, + {"key_separator", _Py_T_OBJECT, offsetof(PyEncoderObject, key_separator), Py_READONLY, "key_separator"}, + {"item_separator", _Py_T_OBJECT, offsetof(PyEncoderObject, item_separator), Py_READONLY, "item_separator"}, + {"sort_keys", Py_T_BOOL, offsetof(PyEncoderObject, sort_keys), Py_READONLY, "sort_keys"}, + {"skipkeys", Py_T_BOOL, offsetof(PyEncoderObject, skipkeys), Py_READONLY, "skipkeys"}, {NULL} }; @@ -1277,13 +1277,13 @@ _encoded_const(PyObject *obj) { /* Return the JSON string representation of None, True, False */ if (obj == Py_None) { - return Py_NewRef(&_Py_ID(null)); + return &_Py_ID(null); } else if (obj == Py_True) { - return Py_NewRef(&_Py_ID(true)); + return &_Py_ID(true); } else if (obj == Py_False) { - return Py_NewRef(&_Py_ID(false)); + return &_Py_ID(false); } else { PyErr_SetString(PyExc_ValueError, "not a const"); diff --git a/Modules/_localemodule.c b/Modules/_localemodule.c index 970530facd01b0..34915de7515d63 100644 --- a/Modules/_localemodule.c +++ b/Modules/_localemodule.c @@ -844,12 +844,7 @@ _locale_exec(PyObject *module) _locale_state *state = get_locale_state(module); state->Error = PyErr_NewException("locale.Error", NULL, NULL); - if (state->Error == NULL) { - return -1; - } - Py_INCREF(get_locale_state(module)->Error); - if (PyModule_AddObject(module, "Error", get_locale_state(module)->Error) < 0) { - Py_DECREF(get_locale_state(module)->Error); + if (PyModule_AddObjectRef(module, "Error", state->Error) < 0) { return -1; } diff --git a/Modules/_lzmamodule.c b/Modules/_lzmamodule.c index 02a32ddccb3ed7..c548f8fa3839e0 100644 --- a/Modules/_lzmamodule.c +++ b/Modules/_lzmamodule.c @@ -6,7 +6,7 @@ */ #include "Python.h" -#include "structmember.h" // PyMemberDef + #include // free() #include @@ -1338,13 +1338,13 @@ PyDoc_STRVAR(Decompressor_unused_data_doc, "Data found after the end of the compressed stream."); static PyMemberDef Decompressor_members[] = { - {"check", T_INT, offsetof(Decompressor, check), READONLY, + {"check", Py_T_INT, offsetof(Decompressor, check), Py_READONLY, Decompressor_check_doc}, - {"eof", T_BOOL, offsetof(Decompressor, eof), READONLY, + {"eof", Py_T_BOOL, offsetof(Decompressor, eof), Py_READONLY, Decompressor_eof_doc}, - {"needs_input", T_BOOL, offsetof(Decompressor, needs_input), READONLY, + {"needs_input", Py_T_BOOL, offsetof(Decompressor, needs_input), Py_READONLY, Decompressor_needs_input_doc}, - {"unused_data", T_OBJECT_EX, offsetof(Decompressor, unused_data), READONLY, + {"unused_data", Py_T_OBJECT_EX, offsetof(Decompressor, unused_data), Py_READONLY, Decompressor_unused_data_doc}, {NULL} }; @@ -1498,15 +1498,7 @@ _lzma__decode_filter_properties_impl(PyObject *module, lzma_vli filter_id, static int module_add_int_constant(PyObject *m, const char *name, long long value) { - PyObject *o = PyLong_FromLongLong(value); - if (o == NULL) { - return -1; - } - if (PyModule_AddObject(m, name, o) == 0) { - return 0; - } - Py_DECREF(o); - return -1; + return PyModule_Add(m, name, PyLong_FromLongLong(value)); } static int diff --git a/Modules/_multiprocessing/multiprocessing.c b/Modules/_multiprocessing/multiprocessing.c index 8f9daa5c3de0cc..16b5cb5dd9ec7a 100644 --- a/Modules/_multiprocessing/multiprocessing.c +++ b/Modules/_multiprocessing/multiprocessing.c @@ -266,8 +266,7 @@ multiprocessing_exec(PyObject *module) ADD_FLAG(HAVE_BROKEN_SEM_UNLINK); #endif - if (PyModule_AddObject(module, "flags", flags) < 0) { - Py_DECREF(flags); + if (PyModule_Add(module, "flags", flags) < 0) { return -1; } diff --git a/Modules/_multiprocessing/semaphore.c b/Modules/_multiprocessing/semaphore.c index 897b8db7110a41..771f86e5367af3 100644 --- a/Modules/_multiprocessing/semaphore.c +++ b/Modules/_multiprocessing/semaphore.c @@ -734,13 +734,13 @@ static PyMethodDef semlock_methods[] = { */ static PyMemberDef semlock_members[] = { - {"handle", T_SEM_HANDLE, offsetof(SemLockObject, handle), READONLY, + {"handle", T_SEM_HANDLE, offsetof(SemLockObject, handle), Py_READONLY, ""}, - {"kind", T_INT, offsetof(SemLockObject, kind), READONLY, + {"kind", Py_T_INT, offsetof(SemLockObject, kind), Py_READONLY, ""}, - {"maxvalue", T_INT, offsetof(SemLockObject, maxvalue), READONLY, + {"maxvalue", Py_T_INT, offsetof(SemLockObject, maxvalue), Py_READONLY, ""}, - {"name", T_STRING, offsetof(SemLockObject, name), READONLY, + {"name", Py_T_STRING, offsetof(SemLockObject, name), Py_READONLY, ""}, {NULL} }; diff --git a/Modules/_opcode.c b/Modules/_opcode.c index daabdce1655777..ad0fa736f82767 100644 --- a/Modules/_opcode.c +++ b/Modules/_opcode.c @@ -2,6 +2,7 @@ #include "compile.h" #include "opcode.h" #include "internal/pycore_code.h" +#include "internal/pycore_intrinsics.h" /*[clinic input] module _opcode @@ -220,6 +221,60 @@ _opcode_get_specialization_stats_impl(PyObject *module) #endif } +/*[clinic input] + +_opcode.get_intrinsic1_descs + +Return a list of names of the unary intrinsics. +[clinic start generated code]*/ + +static PyObject * +_opcode_get_intrinsic1_descs_impl(PyObject *module) +/*[clinic end generated code: output=bd1ddb6b4447d18b input=13b51c712618459b]*/ +{ + PyObject *list = PyList_New(MAX_INTRINSIC_1 + 1); + if (list == NULL) { + return NULL; + } + for (int i=0; i <= MAX_INTRINSIC_1; i++) { + PyObject *name = _PyUnstable_GetUnaryIntrinsicName(i); + if (name == NULL) { + Py_DECREF(list); + return NULL; + } + PyList_SET_ITEM(list, i, name); + } + return list; +} + + +/*[clinic input] + +_opcode.get_intrinsic2_descs + +Return a list of names of the binary intrinsics. +[clinic start generated code]*/ + +static PyObject * +_opcode_get_intrinsic2_descs_impl(PyObject *module) +/*[clinic end generated code: output=40e62bc27584c8a0 input=e83068f249f5471b]*/ +{ + PyObject *list = PyList_New(MAX_INTRINSIC_2 + 1); + if (list == NULL) { + return NULL; + } + for (int i=0; i <= MAX_INTRINSIC_2; i++) { + PyObject *name = _PyUnstable_GetBinaryIntrinsicName(i); + if (name == NULL) { + Py_DECREF(list); + return NULL; + } + PyList_SET_ITEM(list, i, name); + } + return list; +} + + static PyMethodDef opcode_functions[] = { _OPCODE_STACK_EFFECT_METHODDEF @@ -232,10 +287,21 @@ opcode_functions[] = { _OPCODE_HAS_LOCAL_METHODDEF _OPCODE_HAS_EXC_METHODDEF _OPCODE_GET_SPECIALIZATION_STATS_METHODDEF + _OPCODE_GET_INTRINSIC1_DESCS_METHODDEF + _OPCODE_GET_INTRINSIC2_DESCS_METHODDEF {NULL, NULL, 0, NULL} }; +int +_opcode_exec(PyObject *m) { + if (PyModule_AddIntMacro(m, ENABLE_SPECIALIZATION) < 0) { + return -1; + } + return 0; +} + static PyModuleDef_Slot module_slots[] = { + {Py_mod_exec, _opcode_exec}, {Py_mod_multiple_interpreters, Py_MOD_PER_INTERPRETER_GIL_SUPPORTED}, {0, NULL} }; diff --git a/Modules/_operator.c b/Modules/_operator.c index 108f45fb6dad93..1f6496d381adac 100644 --- a/Modules/_operator.c +++ b/Modules/_operator.c @@ -3,7 +3,7 @@ #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_runtime.h" // _Py_ID() -#include "structmember.h" // PyMemberDef + #include "clinic/_operator.c.h" typedef struct { @@ -1153,7 +1153,7 @@ static PyMethodDef itemgetter_methods[] = { }; static PyMemberDef itemgetter_members[] = { - {"__vectorcalloffset__", T_PYSSIZET, offsetof(itemgetterobject, vectorcall), READONLY}, + {"__vectorcalloffset__", Py_T_PYSSIZET, offsetof(itemgetterobject, vectorcall), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -1508,7 +1508,7 @@ static PyMethodDef attrgetter_methods[] = { }; static PyMemberDef attrgetter_members[] = { - {"__vectorcalloffset__", T_PYSSIZET, offsetof(attrgetterobject, vectorcall), READONLY}, + {"__vectorcalloffset__", Py_T_PYSSIZET, offsetof(attrgetterobject, vectorcall), Py_READONLY}, {NULL} /* Sentinel*/ }; @@ -1549,10 +1549,77 @@ static PyType_Spec attrgetter_type_spec = { typedef struct { PyObject_HEAD PyObject *name; - PyObject *args; + PyObject *xargs; // reference to arguments passed in constructor PyObject *kwds; + PyObject **vectorcall_args; /* Borrowed references */ + PyObject *vectorcall_kwnames; + vectorcallfunc vectorcall; } methodcallerobject; +static int _methodcaller_initialize_vectorcall(methodcallerobject* mc) +{ + PyObject* args = mc->xargs; + PyObject* kwds = mc->kwds; + + Py_ssize_t nargs = PyTuple_GET_SIZE(args); + assert(nargs > 0); + mc->vectorcall_args = PyMem_Calloc( + nargs + (kwds ? PyDict_Size(kwds) : 0), + sizeof(PyObject*)); + if (!mc->vectorcall_args) { + PyErr_NoMemory(); + return -1; + } + /* The first item of vectorcall_args will be filled with obj later */ + if (nargs > 1) { + memcpy(mc->vectorcall_args, PySequence_Fast_ITEMS(args), + nargs * sizeof(PyObject*)); + } + if (kwds) { + const Py_ssize_t nkwds = PyDict_Size(kwds); + + mc->vectorcall_kwnames = PyTuple_New(nkwds); + if (!mc->vectorcall_kwnames) { + return -1; + } + Py_ssize_t i = 0, ppos = 0; + PyObject* key, * value; + while (PyDict_Next(kwds, &ppos, &key, &value)) { + PyTuple_SET_ITEM(mc->vectorcall_kwnames, i, Py_NewRef(key)); + mc->vectorcall_args[nargs + i] = value; // borrowed reference + ++i; + } + } + else { + mc->vectorcall_kwnames = NULL; + } + return 1; +} + + +static PyObject * +methodcaller_vectorcall( + methodcallerobject *mc, PyObject *const *args, size_t nargsf, PyObject* kwnames) +{ + if (!_PyArg_CheckPositional("methodcaller", PyVectorcall_NARGS(nargsf), 1, 1) + || !_PyArg_NoKwnames("methodcaller", kwnames)) { + return NULL; + } + if (mc->vectorcall_args == NULL) { + if (_methodcaller_initialize_vectorcall(mc) < 0) { + return NULL; + } + } + + assert(mc->vectorcall_args != 0); + mc->vectorcall_args[0] = args[0]; + return PyObject_VectorcallMethod( + mc->name, mc->vectorcall_args, + (PyTuple_GET_SIZE(mc->xargs)) | PY_VECTORCALL_ARGUMENTS_OFFSET, + mc->vectorcall_kwnames); +} + + /* AC 3.5: variable number of arguments, not currently support by AC */ static PyObject * methodcaller_new(PyTypeObject *type, PyObject *args, PyObject *kwds) @@ -1580,30 +1647,32 @@ methodcaller_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return NULL; } - name = PyTuple_GET_ITEM(args, 0); Py_INCREF(name); PyUnicode_InternInPlace(&name); mc->name = name; + mc->xargs = Py_XNewRef(args); // allows us to use borrowed references mc->kwds = Py_XNewRef(kwds); + mc->vectorcall_args = 0; - mc->args = PyTuple_GetSlice(args, 1, PyTuple_GET_SIZE(args)); - if (mc->args == NULL) { - Py_DECREF(mc); - return NULL; - } + + mc->vectorcall = (vectorcallfunc)methodcaller_vectorcall; PyObject_GC_Track(mc); return (PyObject *)mc; } -static int +static void methodcaller_clear(methodcallerobject *mc) { Py_CLEAR(mc->name); - Py_CLEAR(mc->args); + Py_CLEAR(mc->xargs); Py_CLEAR(mc->kwds); - return 0; + if (mc->vectorcall_args != NULL) { + PyMem_Free(mc->vectorcall_args); + mc->vectorcall_args = 0; + Py_CLEAR(mc->vectorcall_kwnames); + } } static void @@ -1611,7 +1680,7 @@ methodcaller_dealloc(methodcallerobject *mc) { PyTypeObject *tp = Py_TYPE(mc); PyObject_GC_UnTrack(mc); - (void)methodcaller_clear(mc); + methodcaller_clear(mc); tp->tp_free(mc); Py_DECREF(tp); } @@ -1620,7 +1689,7 @@ static int methodcaller_traverse(methodcallerobject *mc, visitproc visit, void *arg) { Py_VISIT(mc->name); - Py_VISIT(mc->args); + Py_VISIT(mc->xargs); Py_VISIT(mc->kwds); Py_VISIT(Py_TYPE(mc)); return 0; @@ -1639,7 +1708,16 @@ methodcaller_call(methodcallerobject *mc, PyObject *args, PyObject *kw) method = PyObject_GetAttr(obj, mc->name); if (method == NULL) return NULL; - result = PyObject_Call(method, mc->args, mc->kwds); + + + PyObject *cargs = PyTuple_GetSlice(mc->xargs, 1, PyTuple_GET_SIZE(mc->xargs)); + if (cargs == NULL) { + Py_DECREF(method); + return NULL; + } + + result = PyObject_Call(method, cargs, mc->kwds); + Py_DECREF(cargs); Py_DECREF(method); return result; } @@ -1657,7 +1735,7 @@ methodcaller_repr(methodcallerobject *mc) } numkwdargs = mc->kwds != NULL ? PyDict_GET_SIZE(mc->kwds) : 0; - numposargs = PyTuple_GET_SIZE(mc->args); + numposargs = PyTuple_GET_SIZE(mc->xargs) - 1; numtotalargs = numposargs + numkwdargs; if (numtotalargs == 0) { @@ -1673,7 +1751,7 @@ methodcaller_repr(methodcallerobject *mc) } for (i = 0; i < numposargs; ++i) { - PyObject *onerepr = PyObject_Repr(PyTuple_GET_ITEM(mc->args, i)); + PyObject *onerepr = PyObject_Repr(PyTuple_GET_ITEM(mc->xargs, i+1)); if (onerepr == NULL) goto done; PyTuple_SET_ITEM(argreprs, i, onerepr); @@ -1723,17 +1801,16 @@ methodcaller_repr(methodcallerobject *mc) static PyObject * methodcaller_reduce(methodcallerobject *mc, PyObject *Py_UNUSED(ignored)) { - PyObject *newargs; if (!mc->kwds || PyDict_GET_SIZE(mc->kwds) == 0) { Py_ssize_t i; - Py_ssize_t callargcount = PyTuple_GET_SIZE(mc->args); - newargs = PyTuple_New(1 + callargcount); + Py_ssize_t newarg_size = PyTuple_GET_SIZE(mc->xargs); + PyObject *newargs = PyTuple_New(newarg_size); if (newargs == NULL) return NULL; PyTuple_SET_ITEM(newargs, 0, Py_NewRef(mc->name)); - for (i = 0; i < callargcount; ++i) { - PyObject *arg = PyTuple_GET_ITEM(mc->args, i); - PyTuple_SET_ITEM(newargs, i + 1, Py_NewRef(arg)); + for (i = 1; i < newarg_size; ++i) { + PyObject *arg = PyTuple_GET_ITEM(mc->xargs, i); + PyTuple_SET_ITEM(newargs, i, Py_NewRef(arg)); } return Py_BuildValue("ON", Py_TYPE(mc), newargs); } @@ -1751,7 +1828,12 @@ methodcaller_reduce(methodcallerobject *mc, PyObject *Py_UNUSED(ignored)) constructor = PyObject_VectorcallDict(partial, newargs, 2, mc->kwds); Py_DECREF(partial); - return Py_BuildValue("NO", constructor, mc->args); + PyObject *args = PyTuple_GetSlice(mc->xargs, 1, PyTuple_GET_SIZE(mc->xargs)); + if (!args) { + Py_DECREF(constructor); + return NULL; + } + return Py_BuildValue("NO", constructor, args); } } @@ -1760,6 +1842,12 @@ static PyMethodDef methodcaller_methods[] = { reduce_doc}, {NULL} }; + +static PyMemberDef methodcaller_members[] = { + {"__vectorcalloffset__", Py_T_PYSSIZET, offsetof(methodcallerobject, vectorcall), Py_READONLY}, + {NULL} +}; + PyDoc_STRVAR(methodcaller_doc, "methodcaller(name, /, *args, **kwargs)\n--\n\n\ Return a callable object that calls the given method on its operand.\n\ @@ -1774,6 +1862,7 @@ static PyType_Slot methodcaller_type_slots[] = { {Py_tp_traverse, methodcaller_traverse}, {Py_tp_clear, methodcaller_clear}, {Py_tp_methods, methodcaller_methods}, + {Py_tp_members, methodcaller_members}, {Py_tp_new, methodcaller_new}, {Py_tp_getattro, PyObject_GenericGetAttr}, {Py_tp_repr, methodcaller_repr}, @@ -1785,7 +1874,7 @@ static PyType_Spec methodcaller_type_spec = { .basicsize = sizeof(methodcallerobject), .itemsize = 0, .flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC | - Py_TPFLAGS_IMMUTABLETYPE), + Py_TPFLAGS_HAVE_VECTORCALL | Py_TPFLAGS_IMMUTABLETYPE), .slots = methodcaller_type_slots, }; diff --git a/Modules/_pickle.c b/Modules/_pickle.c index ea44b494cdd7cd..c2b04cc513a664 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -12,9 +12,11 @@ #include "pycore_bytesobject.h" // _PyBytesWriter #include "pycore_ceval.h" // _Py_EnterRecursiveCall() #include "pycore_moduleobject.h" // _PyModule_GetState() +#include "pycore_object.h" // _PyNone_Type #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_runtime.h" // _Py_ID() -#include "structmember.h" // PyMemberDef +#include "pycore_setobject.h" // _PySet_NextEntry() + #include // strtol() @@ -2027,8 +2029,7 @@ whichmodule(PyObject *global, PyObject *dotted_path) } /* If no module is found, use __main__. */ - module_name = &_Py_ID(__main__); - return Py_NewRef(module_name); + return &_Py_ID(__main__); } /* fast_save_enter() and fast_save_leave() are guards against recursive @@ -5072,9 +5073,9 @@ Pickler_set_persid(PicklerObject *self, PyObject *value, void *Py_UNUSED(ignored } static PyMemberDef Pickler_members[] = { - {"bin", T_INT, offsetof(PicklerObject, bin)}, - {"fast", T_INT, offsetof(PicklerObject, fast)}, - {"dispatch_table", T_OBJECT_EX, offsetof(PicklerObject, dispatch_table)}, + {"bin", Py_T_INT, offsetof(PicklerObject, bin)}, + {"fast", Py_T_INT, offsetof(PicklerObject, fast)}, + {"dispatch_table", Py_T_OBJECT_EX, offsetof(PicklerObject, dispatch_table)}, {NULL} }; diff --git a/Modules/_queuemodule.c b/Modules/_queuemodule.c index 69cc05135c2a72..b0a36f03694507 100644 --- a/Modules/_queuemodule.c +++ b/Modules/_queuemodule.c @@ -5,7 +5,7 @@ #include "Python.h" #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_time.h" // _PyTime_t -#include "structmember.h" // PyMemberDef + #include // offsetof() typedef struct { @@ -373,7 +373,7 @@ static PyMethodDef simplequeue_methods[] = { }; static struct PyMemberDef simplequeue_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(simplequeueobject, weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(simplequeueobject, weakreflist), Py_READONLY}, {NULL}, }; diff --git a/Modules/_sqlite/blob.c b/Modules/_sqlite/blob.c index 989d9a83b590ca..f099020c5f4e6f 100644 --- a/Modules/_sqlite/blob.c +++ b/Modules/_sqlite/blob.c @@ -577,7 +577,7 @@ static PyMethodDef blob_methods[] = { }; static struct PyMemberDef blob_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(pysqlite_Blob, in_weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(pysqlite_Blob, in_weakreflist), Py_READONLY}, {NULL}, }; diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c index bab743674b666d..ddd7ace81198bb 100644 --- a/Modules/_sqlite/connection.c +++ b/Modules/_sqlite/connection.c @@ -26,7 +26,7 @@ #endif #include "module.h" -#include "structmember.h" // PyMemberDef + #include "connection.h" #include "statement.h" #include "cursor.h" @@ -2511,18 +2511,18 @@ static PyMethodDef connection_methods[] = { static struct PyMemberDef connection_members[] = { - {"Warning", T_OBJECT, offsetof(pysqlite_Connection, Warning), READONLY}, - {"Error", T_OBJECT, offsetof(pysqlite_Connection, Error), READONLY}, - {"InterfaceError", T_OBJECT, offsetof(pysqlite_Connection, InterfaceError), READONLY}, - {"DatabaseError", T_OBJECT, offsetof(pysqlite_Connection, DatabaseError), READONLY}, - {"DataError", T_OBJECT, offsetof(pysqlite_Connection, DataError), READONLY}, - {"OperationalError", T_OBJECT, offsetof(pysqlite_Connection, OperationalError), READONLY}, - {"IntegrityError", T_OBJECT, offsetof(pysqlite_Connection, IntegrityError), READONLY}, - {"InternalError", T_OBJECT, offsetof(pysqlite_Connection, InternalError), READONLY}, - {"ProgrammingError", T_OBJECT, offsetof(pysqlite_Connection, ProgrammingError), READONLY}, - {"NotSupportedError", T_OBJECT, offsetof(pysqlite_Connection, NotSupportedError), READONLY}, - {"row_factory", T_OBJECT, offsetof(pysqlite_Connection, row_factory)}, - {"text_factory", T_OBJECT, offsetof(pysqlite_Connection, text_factory)}, + {"Warning", _Py_T_OBJECT, offsetof(pysqlite_Connection, Warning), Py_READONLY}, + {"Error", _Py_T_OBJECT, offsetof(pysqlite_Connection, Error), Py_READONLY}, + {"InterfaceError", _Py_T_OBJECT, offsetof(pysqlite_Connection, InterfaceError), Py_READONLY}, + {"DatabaseError", _Py_T_OBJECT, offsetof(pysqlite_Connection, DatabaseError), Py_READONLY}, + {"DataError", _Py_T_OBJECT, offsetof(pysqlite_Connection, DataError), Py_READONLY}, + {"OperationalError", _Py_T_OBJECT, offsetof(pysqlite_Connection, OperationalError), Py_READONLY}, + {"IntegrityError", _Py_T_OBJECT, offsetof(pysqlite_Connection, IntegrityError), Py_READONLY}, + {"InternalError", _Py_T_OBJECT, offsetof(pysqlite_Connection, InternalError), Py_READONLY}, + {"ProgrammingError", _Py_T_OBJECT, offsetof(pysqlite_Connection, ProgrammingError), Py_READONLY}, + {"NotSupportedError", _Py_T_OBJECT, offsetof(pysqlite_Connection, NotSupportedError), Py_READONLY}, + {"row_factory", _Py_T_OBJECT, offsetof(pysqlite_Connection, row_factory)}, + {"text_factory", _Py_T_OBJECT, offsetof(pysqlite_Connection, text_factory)}, {NULL} }; diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c index dba8ab61e41e70..618ce532b2518d 100644 --- a/Modules/_sqlite/cursor.c +++ b/Modules/_sqlite/cursor.c @@ -1325,13 +1325,13 @@ static PyMethodDef cursor_methods[] = { static struct PyMemberDef cursor_members[] = { - {"connection", T_OBJECT, offsetof(pysqlite_Cursor, connection), READONLY}, - {"description", T_OBJECT, offsetof(pysqlite_Cursor, description), READONLY}, - {"arraysize", T_INT, offsetof(pysqlite_Cursor, arraysize), 0}, - {"lastrowid", T_OBJECT, offsetof(pysqlite_Cursor, lastrowid), READONLY}, - {"rowcount", T_LONG, offsetof(pysqlite_Cursor, rowcount), READONLY}, - {"row_factory", T_OBJECT, offsetof(pysqlite_Cursor, row_factory), 0}, - {"__weaklistoffset__", T_PYSSIZET, offsetof(pysqlite_Cursor, in_weakreflist), READONLY}, + {"connection", _Py_T_OBJECT, offsetof(pysqlite_Cursor, connection), Py_READONLY}, + {"description", _Py_T_OBJECT, offsetof(pysqlite_Cursor, description), Py_READONLY}, + {"arraysize", Py_T_INT, offsetof(pysqlite_Cursor, arraysize), 0}, + {"lastrowid", _Py_T_OBJECT, offsetof(pysqlite_Cursor, lastrowid), Py_READONLY}, + {"rowcount", Py_T_LONG, offsetof(pysqlite_Cursor, rowcount), Py_READONLY}, + {"row_factory", _Py_T_OBJECT, offsetof(pysqlite_Cursor, row_factory), 0}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(pysqlite_Cursor, in_weakreflist), Py_READONLY}, {NULL} }; diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c index ddbdc9f478aab3..c4e43a0db0f5d3 100644 --- a/Modules/_sre/sre.c +++ b/Modules/_sre/sre.c @@ -41,7 +41,7 @@ static const char copyright[] = #include "Python.h" #include "pycore_long.h" // _PyLong_GetZero() #include "pycore_moduleobject.h" // _PyModule_GetState() -#include "structmember.h" // PyMemberDef + #include "sre.h" @@ -2994,13 +2994,13 @@ static PyGetSetDef pattern_getset[] = { #define PAT_OFF(x) offsetof(PatternObject, x) static PyMemberDef pattern_members[] = { - {"pattern", T_OBJECT, PAT_OFF(pattern), READONLY, + {"pattern", _Py_T_OBJECT, PAT_OFF(pattern), Py_READONLY, "The pattern string from which the RE object was compiled."}, - {"flags", T_INT, PAT_OFF(flags), READONLY, + {"flags", Py_T_INT, PAT_OFF(flags), Py_READONLY, "The regex matching flags."}, - {"groups", T_PYSSIZET, PAT_OFF(groups), READONLY, + {"groups", Py_T_PYSSIZET, PAT_OFF(groups), Py_READONLY, "The number of capturing groups in the pattern."}, - {"__weaklistoffset__", T_PYSSIZET, offsetof(PatternObject, weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(PatternObject, weakreflist), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -3053,13 +3053,13 @@ static PyGetSetDef match_getset[] = { #define MATCH_OFF(x) offsetof(MatchObject, x) static PyMemberDef match_members[] = { - {"string", T_OBJECT, MATCH_OFF(string), READONLY, + {"string", _Py_T_OBJECT, MATCH_OFF(string), Py_READONLY, "The string passed to match() or search()."}, - {"re", T_OBJECT, MATCH_OFF(pattern), READONLY, + {"re", _Py_T_OBJECT, MATCH_OFF(pattern), Py_READONLY, "The regular expression object."}, - {"pos", T_PYSSIZET, MATCH_OFF(pos), READONLY, + {"pos", Py_T_PYSSIZET, MATCH_OFF(pos), Py_READONLY, "The index into the string at which the RE engine started looking for a match."}, - {"endpos", T_PYSSIZET, MATCH_OFF(endpos), READONLY, + {"endpos", Py_T_PYSSIZET, MATCH_OFF(endpos), Py_READONLY, "The index into the string beyond which the RE engine will not go."}, {NULL} }; @@ -3103,7 +3103,7 @@ static PyMethodDef scanner_methods[] = { #define SCAN_OFF(x) offsetof(ScannerObject, x) static PyMemberDef scanner_members[] = { - {"pattern", T_OBJECT, SCAN_OFF(pattern), READONLY}, + {"pattern", _Py_T_OBJECT, SCAN_OFF(pattern), Py_READONLY}, {NULL} /* Sentinel */ }; diff --git a/Modules/_ssl.c b/Modules/_ssl.c index ed720b4295f8ec..c001b875906d44 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -26,6 +26,7 @@ #define OPENSSL_NO_DEPRECATED 1 #include "Python.h" +#include "pycore_fileutils.h" // _PyIsSelectable_fd() #include "pycore_weakref.h" // _PyWeakref_GET_REF() /* Include symbols from _socket module */ @@ -666,6 +667,10 @@ PySSL_SetError(PySSLSocket *sslsock, int ret, const char *filename, int lineno) errstr = "Some I/O error occurred"; } } else { + if (ERR_GET_LIB(e) == ERR_LIB_SSL && + ERR_GET_REASON(e) == SSL_R_CERTIFICATE_VERIFY_FAILED) { + type = state->PySSLCertVerificationErrorObject; + } p = PY_SSL_ERROR_SYSCALL; } break; @@ -5985,7 +5990,7 @@ sslmodule_init_constants(PyObject *m) #define addbool(m, key, value) \ do { \ PyObject *bool_obj = (value) ? Py_True : Py_False; \ - PyModule_AddObject((m), (key), Py_NewRef(bool_obj)); \ + PyModule_AddObjectRef((m), (key), bool_obj); \ } while (0) addbool(m, "HAS_SNI", 1); diff --git a/Modules/_struct.c b/Modules/_struct.c index 31c94927e91d68..425715ad030d4d 100644 --- a/Modules/_struct.c +++ b/Modules/_struct.c @@ -10,8 +10,9 @@ #include "Python.h" #include "pycore_bytesobject.h" // _PyBytesWriter #include "pycore_moduleobject.h" // _PyModule_GetState() -#include "structmember.h" // PyMemberDef + #include +#include // offsetof() /*[clinic input] class Struct "PyStructObject *" "&PyStructType" @@ -2176,7 +2177,7 @@ static struct PyMethodDef s_methods[] = { }; static PyMemberDef s_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(PyStructObject, weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(PyStructObject, weakreflist), Py_READONLY}, {NULL} /* sentinel */ }; diff --git a/Modules/_testcapi/abstract.c b/Modules/_testcapi/abstract.c new file mode 100644 index 00000000000000..10d7ff8d4a7bc8 --- /dev/null +++ b/Modules/_testcapi/abstract.c @@ -0,0 +1,640 @@ +#include // ptrdiff_t + +#include "parts.h" + +#define NULLABLE(x) do { if (x == Py_None) x = NULL; } while (0); + +#define RETURN_INT(value) do { \ + int _ret = (value); \ + if (_ret == -1) { \ + return NULL; \ + } \ + return PyLong_FromLong(_ret); \ + } while (0) + +#define RETURN_SIZE(value) do { \ + Py_ssize_t _ret = (value); \ + if (_ret == -1) { \ + return NULL; \ + } \ + return PyLong_FromSsize_t(_ret); \ + } while (0) + + +static PyObject * +object_getattr(PyObject *self, PyObject *args) +{ + PyObject *obj, *attr_name; + if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(attr_name); + return PyObject_GetAttr(obj, attr_name); +} + +static PyObject * +object_getattrstring(PyObject *self, PyObject *args) +{ + PyObject *obj; + const char *attr_name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { + return NULL; + } + NULLABLE(obj); + return PyObject_GetAttrString(obj, attr_name); +} + +static PyObject * +object_getoptionalattr(PyObject *self, PyObject *args) +{ + PyObject *obj, *attr_name, *value; + if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(attr_name); + + switch (PyObject_GetOptionalAttr(obj, attr_name, &value)) { + case -1: + assert(value == NULL); + return NULL; + case 0: + assert(value == NULL); + return Py_NewRef(PyExc_AttributeError); + case 1: + return value; + default: + Py_FatalError("PyObject_GetOptionalAttr() returned invalid code"); + Py_UNREACHABLE(); + } +} + +static PyObject * +object_getoptionalattrstring(PyObject *self, PyObject *args) +{ + PyObject *obj, *value; + const char *attr_name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { + return NULL; + } + NULLABLE(obj); + + switch (PyObject_GetOptionalAttrString(obj, attr_name, &value)) { + case -1: + assert(value == NULL); + return NULL; + case 0: + assert(value == NULL); + return Py_NewRef(PyExc_AttributeError); + case 1: + return value; + default: + Py_FatalError("PyObject_GetOptionalAttrString() returned invalid code"); + Py_UNREACHABLE(); + } +} + +static PyObject * +object_hasattr(PyObject *self, PyObject *args) +{ + PyObject *obj, *attr_name; + if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(attr_name); + return PyLong_FromLong(PyObject_HasAttr(obj, attr_name)); +} + +static PyObject * +object_hasattrstring(PyObject *self, PyObject *args) +{ + PyObject *obj; + const char *attr_name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { + return NULL; + } + NULLABLE(obj); + return PyLong_FromLong(PyObject_HasAttrString(obj, attr_name)); +} + +static PyObject * +object_setattr(PyObject *self, PyObject *args) +{ + PyObject *obj, *attr_name, *value; + if (!PyArg_ParseTuple(args, "OOO", &obj, &attr_name, &value)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(attr_name); + NULLABLE(value); + RETURN_INT(PyObject_SetAttr(obj, attr_name, value)); +} + +static PyObject * +object_setattrstring(PyObject *self, PyObject *args) +{ + PyObject *obj, *value; + const char *attr_name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#O", &obj, &attr_name, &size, &value)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(value); + RETURN_INT(PyObject_SetAttrString(obj, attr_name, value)); +} + +static PyObject * +object_delattr(PyObject *self, PyObject *args) +{ + PyObject *obj, *attr_name; +if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(attr_name); + RETURN_INT(PyObject_DelAttr(obj, attr_name)); +} + +static PyObject * +object_delattrstring(PyObject *self, PyObject *args) +{ + PyObject *obj; + const char *attr_name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { + return NULL; + } + NULLABLE(obj); + RETURN_INT(PyObject_DelAttrString(obj, attr_name)); +} + + +static PyObject * +mapping_check(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyMapping_Check(obj)); +} + +static PyObject * +mapping_size(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PyMapping_Size(obj)); +} + +static PyObject * +mapping_length(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PyMapping_Length(obj)); +} + +static PyObject * +object_getitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + return PyObject_GetItem(mapping, key); +} + +static PyObject * +mapping_getitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { + return NULL; + } + NULLABLE(mapping); + return PyMapping_GetItemString(mapping, key); +} + +static PyObject * +mapping_getoptionalitem(PyObject *self, PyObject *args) +{ + PyObject *obj, *attr_name, *value; + if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(attr_name); + + switch (PyMapping_GetOptionalItem(obj, attr_name, &value)) { + case -1: + assert(value == NULL); + return NULL; + case 0: + assert(value == NULL); + return Py_NewRef(PyExc_KeyError); + case 1: + return value; + default: + Py_FatalError("PyMapping_GetOptionalItem() returned invalid code"); + Py_UNREACHABLE(); + } +} + +static PyObject * +mapping_getoptionalitemstring(PyObject *self, PyObject *args) +{ + PyObject *obj, *value; + const char *attr_name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { + return NULL; + } + NULLABLE(obj); + + switch (PyMapping_GetOptionalItemString(obj, attr_name, &value)) { + case -1: + assert(value == NULL); + return NULL; + case 0: + assert(value == NULL); + return Py_NewRef(PyExc_KeyError); + case 1: + return value; + default: + Py_FatalError("PyMapping_GetOptionalItemString() returned invalid code"); + Py_UNREACHABLE(); + } +} + +static PyObject * +mapping_haskey(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + return PyLong_FromLong(PyMapping_HasKey(mapping, key)); +} + +static PyObject * +mapping_haskeystring(PyObject *self, PyObject *args) +{ + PyObject *mapping; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { + return NULL; + } + NULLABLE(mapping); + return PyLong_FromLong(PyMapping_HasKeyString(mapping, key)); +} + +static PyObject * +object_setitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key, *value; + if (!PyArg_ParseTuple(args, "OOO", &mapping, &key, &value)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + NULLABLE(value); + RETURN_INT(PyObject_SetItem(mapping, key, value)); +} + +static PyObject * +mapping_setitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping, *value; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#O", &mapping, &key, &size, &value)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(value); + RETURN_INT(PyMapping_SetItemString(mapping, key, value)); +} + +static PyObject * +object_delitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + RETURN_INT(PyObject_DelItem(mapping, key)); +} + +static PyObject * +mapping_delitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + RETURN_INT(PyMapping_DelItem(mapping, key)); +} + +static PyObject * +mapping_delitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { + return NULL; + } + NULLABLE(mapping); + RETURN_INT(PyMapping_DelItemString(mapping, key)); +} + +static PyObject * +mapping_keys(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyMapping_Keys(obj); +} + +static PyObject * +mapping_values(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyMapping_Values(obj); +} + +static PyObject * +mapping_items(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyMapping_Items(obj); +} + + +static PyObject * +sequence_check(PyObject* self, PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PySequence_Check(obj)); +} + +static PyObject * +sequence_size(PyObject* self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PySequence_Size(obj)); +} + +static PyObject * +sequence_length(PyObject* self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PySequence_Length(obj)); +} + +static PyObject * +sequence_concat(PyObject *self, PyObject *args) +{ + PyObject *seq1, *seq2; + if (!PyArg_ParseTuple(args, "OO", &seq1, &seq2)) { + return NULL; + } + NULLABLE(seq1); + NULLABLE(seq2); + + return PySequence_Concat(seq1, seq2); +} + +static PyObject * +sequence_repeat(PyObject *self, PyObject *args) +{ + PyObject *seq; + Py_ssize_t count; + if (!PyArg_ParseTuple(args, "On", &seq, &count)) { + return NULL; + } + NULLABLE(seq); + + return PySequence_Repeat(seq, count); +} + +static PyObject * +sequence_inplaceconcat(PyObject *self, PyObject *args) +{ + PyObject *seq1, *seq2; + if (!PyArg_ParseTuple(args, "OO", &seq1, &seq2)) { + return NULL; + } + NULLABLE(seq1); + NULLABLE(seq2); + + return PySequence_InPlaceConcat(seq1, seq2); +} + +static PyObject * +sequence_inplacerepeat(PyObject *self, PyObject *args) +{ + PyObject *seq; + Py_ssize_t count; + if (!PyArg_ParseTuple(args, "On", &seq, &count)) { + return NULL; + } + NULLABLE(seq); + + return PySequence_InPlaceRepeat(seq, count); +} + +static PyObject * +sequence_getitem(PyObject *self, PyObject *args) +{ + PyObject *seq; + Py_ssize_t i; + if (!PyArg_ParseTuple(args, "On", &seq, &i)) { + return NULL; + } + NULLABLE(seq); + + return PySequence_GetItem(seq, i); +} + +static PyObject * +sequence_setitem(PyObject *self, PyObject *args) +{ + Py_ssize_t i; + PyObject *seq, *val; + if (!PyArg_ParseTuple(args, "OnO", &seq, &i, &val)) { + return NULL; + } + NULLABLE(seq); + NULLABLE(val); + + RETURN_INT(PySequence_SetItem(seq, i, val)); +} + + +static PyObject * +sequence_delitem(PyObject *self, PyObject *args) +{ + Py_ssize_t i; + PyObject *seq; + if (!PyArg_ParseTuple(args, "On", &seq, &i)) { + return NULL; + } + NULLABLE(seq); + + RETURN_INT(PySequence_DelItem(seq, i)); +} + +static PyObject * +sequence_setslice(PyObject* self, PyObject *args) +{ + PyObject *sequence, *obj; + Py_ssize_t i1, i2; + if (!PyArg_ParseTuple(args, "OnnO", &sequence, &i1, &i2, &obj)) { + return NULL; + } + NULLABLE(sequence); + NULLABLE(obj); + + RETURN_INT(PySequence_SetSlice(sequence, i1, i2, obj)); +} + +static PyObject * +sequence_delslice(PyObject *self, PyObject *args) +{ + PyObject *sequence; + Py_ssize_t i1, i2; + if (!PyArg_ParseTuple(args, "Onn", &sequence, &i1, &i2)) { + return NULL; + } + NULLABLE(sequence); + + RETURN_INT(PySequence_DelSlice(sequence, i1, i2)); +} + +static PyObject * +sequence_count(PyObject *self, PyObject *args) +{ + PyObject *seq, *value; + if (!PyArg_ParseTuple(args, "OO", &seq, &value)) { + return NULL; + } + NULLABLE(seq); + NULLABLE(value); + + RETURN_SIZE(PySequence_Count(seq, value)); +} + +static PyObject * +sequence_contains(PyObject *self, PyObject *args) +{ + PyObject *seq, *value; + if (!PyArg_ParseTuple(args, "OO", &seq, &value)) { + return NULL; + } + NULLABLE(seq); + NULLABLE(value); + + RETURN_INT(PySequence_Contains(seq, value)); +} + +static PyObject * +sequence_index(PyObject *self, PyObject *args) +{ + PyObject *seq, *value; + if (!PyArg_ParseTuple(args, "OO", &seq, &value)) { + return NULL; + } + NULLABLE(seq); + NULLABLE(value); + + RETURN_SIZE(PySequence_Index(seq, value)); +} + +static PyObject * +sequence_list(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PySequence_List(obj); +} + +static PyObject * +sequence_tuple(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PySequence_Tuple(obj); +} + + +static PyMethodDef test_methods[] = { + {"object_getattr", object_getattr, METH_VARARGS}, + {"object_getattrstring", object_getattrstring, METH_VARARGS}, + {"object_getoptionalattr", object_getoptionalattr, METH_VARARGS}, + {"object_getoptionalattrstring", object_getoptionalattrstring, METH_VARARGS}, + {"object_hasattr", object_hasattr, METH_VARARGS}, + {"object_hasattrstring", object_hasattrstring, METH_VARARGS}, + {"object_setattr", object_setattr, METH_VARARGS}, + {"object_setattrstring", object_setattrstring, METH_VARARGS}, + {"object_delattr", object_delattr, METH_VARARGS}, + {"object_delattrstring", object_delattrstring, METH_VARARGS}, + + {"mapping_check", mapping_check, METH_O}, + {"mapping_size", mapping_size, METH_O}, + {"mapping_length", mapping_length, METH_O}, + {"object_getitem", object_getitem, METH_VARARGS}, + {"mapping_getitemstring", mapping_getitemstring, METH_VARARGS}, + {"mapping_getoptionalitem", mapping_getoptionalitem, METH_VARARGS}, + {"mapping_getoptionalitemstring", mapping_getoptionalitemstring, METH_VARARGS}, + {"mapping_haskey", mapping_haskey, METH_VARARGS}, + {"mapping_haskeystring", mapping_haskeystring, METH_VARARGS}, + {"object_setitem", object_setitem, METH_VARARGS}, + {"mapping_setitemstring", mapping_setitemstring, METH_VARARGS}, + {"object_delitem", object_delitem, METH_VARARGS}, + {"mapping_delitem", mapping_delitem, METH_VARARGS}, + {"mapping_delitemstring", mapping_delitemstring, METH_VARARGS}, + {"mapping_keys", mapping_keys, METH_O}, + {"mapping_values", mapping_values, METH_O}, + {"mapping_items", mapping_items, METH_O}, + + {"sequence_check", sequence_check, METH_O}, + {"sequence_size", sequence_size, METH_O}, + {"sequence_length", sequence_length, METH_O}, + {"sequence_concat", sequence_concat, METH_VARARGS}, + {"sequence_repeat", sequence_repeat, METH_VARARGS}, + {"sequence_inplaceconcat", sequence_inplaceconcat, METH_VARARGS}, + {"sequence_inplacerepeat", sequence_inplacerepeat, METH_VARARGS}, + {"sequence_getitem", sequence_getitem, METH_VARARGS}, + {"sequence_setitem", sequence_setitem, METH_VARARGS}, + {"sequence_delitem", sequence_delitem, METH_VARARGS}, + {"sequence_setslice", sequence_setslice, METH_VARARGS}, + {"sequence_delslice", sequence_delslice, METH_VARARGS}, + {"sequence_count", sequence_count, METH_VARARGS}, + {"sequence_contains", sequence_contains, METH_VARARGS}, + {"sequence_index", sequence_index, METH_VARARGS}, + {"sequence_list", sequence_list, METH_O}, + {"sequence_tuple", sequence_tuple, METH_O}, + + {NULL}, +}; + +int +_PyTestCapi_Init_Abstract(PyObject *m) +{ + if (PyModule_AddFunctions(m, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/buffer.c b/Modules/_testcapi/buffer.c index aff9a477eff57e..942774156c6c47 100644 --- a/Modules/_testcapi/buffer.c +++ b/Modules/_testcapi/buffer.c @@ -2,7 +2,7 @@ #include "parts.h" -#include "structmember.h" // PyMemberDef + #include // offsetof typedef struct { @@ -72,7 +72,7 @@ static PyBufferProcs testbuf_as_buffer = { }; static struct PyMemberDef testbuf_members[] = { - {"references", T_PYSSIZET, offsetof(testBufObject, references), READONLY}, + {"references", Py_T_PYSSIZET, offsetof(testBufObject, references), Py_READONLY}, {NULL}, }; diff --git a/Modules/_testcapi/dict.c b/Modules/_testcapi/dict.c new file mode 100644 index 00000000000000..b1dfcf4c707da7 --- /dev/null +++ b/Modules/_testcapi/dict.c @@ -0,0 +1,376 @@ +#include // ptrdiff_t + +#include "parts.h" + +#define NULLABLE(x) do { if (x == Py_None) x = NULL; } while (0); + +#define RETURN_INT(value) do { \ + int _ret = (value); \ + if (_ret == -1) { \ + return NULL; \ + } \ + return PyLong_FromLong(_ret); \ + } while (0) + +#define RETURN_SIZE(value) do { \ + Py_ssize_t _ret = (value); \ + if (_ret == -1) { \ + return NULL; \ + } \ + return PyLong_FromSsize_t(_ret); \ + } while (0) + + +static PyObject * +dict_check(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyDict_Check(obj)); +} + +static PyObject * +dict_checkexact(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyLong_FromLong(PyDict_CheckExact(obj)); +} + +static PyObject * +dict_new(PyObject *self, PyObject *Py_UNUSED(ignored)) +{ + return PyDict_New(); +} + +static PyObject * +dictproxy_new(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyDictProxy_New(obj); +} + +static PyObject * +dict_clear(PyObject *self, PyObject *obj) +{ + PyDict_Clear(obj); + Py_RETURN_NONE; +} + +static PyObject * +dict_copy(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyDict_Copy(obj); +} + +static PyObject * +dict_contains(PyObject *self, PyObject *args) +{ + PyObject *obj, *key; + if (!PyArg_ParseTuple(args, "OO", &obj, &key)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(key); + RETURN_INT(PyDict_Contains(obj, key)); +} + +static PyObject * +dict_size(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + RETURN_SIZE(PyDict_Size(obj)); +} + +static PyObject * +dict_getitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + PyObject *value = PyDict_GetItem(mapping, key); + if (value == NULL) { + if (PyErr_Occurred()) { + return NULL; + } + return Py_NewRef(PyExc_KeyError); + } + return Py_NewRef(value); +} + +static PyObject * +dict_getitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { + return NULL; + } + NULLABLE(mapping); + PyObject *value = PyDict_GetItemString(mapping, key); + if (value == NULL) { + if (PyErr_Occurred()) { + return NULL; + } + return Py_NewRef(PyExc_KeyError); + } + return Py_NewRef(value); +} + +static PyObject * +dict_getitemwitherror(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + PyObject *value = PyDict_GetItemWithError(mapping, key); + if (value == NULL) { + if (PyErr_Occurred()) { + return NULL; + } + return Py_NewRef(PyExc_KeyError); + } + return Py_NewRef(value); +} + + +static PyObject * +dict_getitemref(PyObject *self, PyObject *args) +{ + PyObject *obj, *attr_name, *value; + if (!PyArg_ParseTuple(args, "OO", &obj, &attr_name)) { + return NULL; + } + NULLABLE(obj); + NULLABLE(attr_name); + + switch (PyDict_GetItemRef(obj, attr_name, &value)) { + case -1: + assert(value == NULL); + return NULL; + case 0: + assert(value == NULL); + return Py_NewRef(PyExc_KeyError); + case 1: + return value; + default: + Py_FatalError("PyMapping_GetItemRef() returned invalid code"); + Py_UNREACHABLE(); + } +} + +static PyObject * +dict_getitemstringref(PyObject *self, PyObject *args) +{ + PyObject *obj, *value; + const char *attr_name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &obj, &attr_name, &size)) { + return NULL; + } + NULLABLE(obj); + + switch (PyDict_GetItemStringRef(obj, attr_name, &value)) { + case -1: + assert(value == NULL); + return NULL; + case 0: + assert(value == NULL); + return Py_NewRef(PyExc_KeyError); + case 1: + return value; + default: + Py_FatalError("PyDict_GetItemStringRef() returned invalid code"); + Py_UNREACHABLE(); + } +} + +static PyObject * +dict_setitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key, *value; + if (!PyArg_ParseTuple(args, "OOO", &mapping, &key, &value)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + NULLABLE(value); + RETURN_INT(PyDict_SetItem(mapping, key, value)); +} + +static PyObject * +dict_setitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping, *value; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#O", &mapping, &key, &size, &value)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(value); + RETURN_INT(PyDict_SetItemString(mapping, key, value)); +} + +static PyObject * +dict_setdefault(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key, *defaultobj; + if (!PyArg_ParseTuple(args, "OOO", &mapping, &key, &defaultobj)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + NULLABLE(defaultobj); + return PyDict_SetDefault(mapping, key, defaultobj); +} + +static PyObject * +dict_delitem(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key; + if (!PyArg_ParseTuple(args, "OO", &mapping, &key)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(key); + RETURN_INT(PyDict_DelItem(mapping, key)); +} + +static PyObject * +dict_delitemstring(PyObject *self, PyObject *args) +{ + PyObject *mapping; + const char *key; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "Oz#", &mapping, &key, &size)) { + return NULL; + } + NULLABLE(mapping); + RETURN_INT(PyDict_DelItemString(mapping, key)); +} + +static PyObject * +dict_keys(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyDict_Keys(obj); +} + +static PyObject * +dict_values(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyDict_Values(obj); +} + +static PyObject * +dict_items(PyObject *self, PyObject *obj) +{ + NULLABLE(obj); + return PyDict_Items(obj); +} + +static PyObject * +dict_next(PyObject *self, PyObject *args) +{ + PyObject *mapping, *key, *value; + Py_ssize_t pos; + if (!PyArg_ParseTuple(args, "On", &mapping, &pos)) { + return NULL; + } + NULLABLE(mapping); + int rc = PyDict_Next(mapping, &pos, &key, &value); + if (rc != 0) { + return Py_BuildValue("inOO", rc, pos, key, value); + } + if (PyErr_Occurred()) { + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +dict_merge(PyObject *self, PyObject *args) +{ + PyObject *mapping, *mapping2; + int override; + if (!PyArg_ParseTuple(args, "OOi", &mapping, &mapping2, &override)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(mapping2); + RETURN_INT(PyDict_Merge(mapping, mapping2, override)); +} + +static PyObject * +dict_update(PyObject *self, PyObject *args) +{ + PyObject *mapping, *mapping2; + if (!PyArg_ParseTuple(args, "OO", &mapping, &mapping2)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(mapping2); + RETURN_INT(PyDict_Update(mapping, mapping2)); +} + +static PyObject * +dict_mergefromseq2(PyObject *self, PyObject *args) +{ + PyObject *mapping, *seq; + int override; + if (!PyArg_ParseTuple(args, "OOi", &mapping, &seq, &override)) { + return NULL; + } + NULLABLE(mapping); + NULLABLE(seq); + RETURN_INT(PyDict_MergeFromSeq2(mapping, seq, override)); +} + + +static PyMethodDef test_methods[] = { + {"dict_check", dict_check, METH_O}, + {"dict_checkexact", dict_checkexact, METH_O}, + {"dict_new", dict_new, METH_NOARGS}, + {"dictproxy_new", dictproxy_new, METH_O}, + {"dict_clear", dict_clear, METH_O}, + {"dict_copy", dict_copy, METH_O}, + {"dict_size", dict_size, METH_O}, + {"dict_getitem", dict_getitem, METH_VARARGS}, + {"dict_getitemwitherror", dict_getitemwitherror, METH_VARARGS}, + {"dict_getitemstring", dict_getitemstring, METH_VARARGS}, + {"dict_getitemref", dict_getitemref, METH_VARARGS}, + {"dict_getitemstringref", dict_getitemstringref, METH_VARARGS}, + {"dict_contains", dict_contains, METH_VARARGS}, + {"dict_setitem", dict_setitem, METH_VARARGS}, + {"dict_setitemstring", dict_setitemstring, METH_VARARGS}, + {"dict_delitem", dict_delitem, METH_VARARGS}, + {"dict_delitemstring", dict_delitemstring, METH_VARARGS}, + {"dict_setdefault", dict_setdefault, METH_VARARGS}, + {"dict_keys", dict_keys, METH_O}, + {"dict_values", dict_values, METH_O}, + {"dict_items", dict_items, METH_O}, + {"dict_next", dict_next, METH_VARARGS}, + {"dict_merge", dict_merge, METH_VARARGS}, + {"dict_update", dict_update, METH_VARARGS}, + {"dict_mergefromseq2", dict_mergefromseq2, METH_VARARGS}, + + {NULL}, +}; + +int +_PyTestCapi_Init_Dict(PyObject *m) +{ + if (PyModule_AddFunctions(m, test_methods) < 0) { + return -1; + } + + return 0; +} diff --git a/Modules/_testcapi/docstring.c b/Modules/_testcapi/docstring.c index a997c54a8a69c4..b680171cc1437a 100644 --- a/Modules/_testcapi/docstring.c +++ b/Modules/_testcapi/docstring.c @@ -66,42 +66,88 @@ test_with_docstring(PyObject *self, PyObject *Py_UNUSED(ignored)) static PyMethodDef test_methods[] = { {"docstring_empty", - (PyCFunction)test_with_docstring, METH_NOARGS, + (PyCFunction)test_with_docstring, METH_VARARGS, docstring_empty}, {"docstring_no_signature", + (PyCFunction)test_with_docstring, METH_VARARGS, + docstring_no_signature}, + {"docstring_no_signature_noargs", (PyCFunction)test_with_docstring, METH_NOARGS, docstring_no_signature}, + {"docstring_no_signature_o", + (PyCFunction)test_with_docstring, METH_O, + docstring_no_signature}, {"docstring_with_invalid_signature", - (PyCFunction)test_with_docstring, METH_NOARGS, + (PyCFunction)test_with_docstring, METH_VARARGS, docstring_with_invalid_signature}, {"docstring_with_invalid_signature2", - (PyCFunction)test_with_docstring, METH_NOARGS, + (PyCFunction)test_with_docstring, METH_VARARGS, docstring_with_invalid_signature2}, {"docstring_with_signature", - (PyCFunction)test_with_docstring, METH_NOARGS, + (PyCFunction)test_with_docstring, METH_VARARGS, docstring_with_signature}, {"docstring_with_signature_and_extra_newlines", - (PyCFunction)test_with_docstring, METH_NOARGS, + (PyCFunction)test_with_docstring, METH_VARARGS, docstring_with_signature_and_extra_newlines}, {"docstring_with_signature_but_no_doc", - (PyCFunction)test_with_docstring, METH_NOARGS, + (PyCFunction)test_with_docstring, METH_VARARGS, docstring_with_signature_but_no_doc}, {"docstring_with_signature_with_defaults", - (PyCFunction)test_with_docstring, METH_NOARGS, + (PyCFunction)test_with_docstring, METH_VARARGS, docstring_with_signature_with_defaults}, {"no_docstring", - (PyCFunction)test_with_docstring, METH_NOARGS}, + (PyCFunction)test_with_docstring, METH_VARARGS}, {"test_with_docstring", - test_with_docstring, METH_NOARGS, + test_with_docstring, METH_VARARGS, PyDoc_STR("This is a pretty normal docstring.")}, {NULL}, }; +static PyMethodDef DocStringNoSignatureTest_methods[] = { + {"meth_noargs", + (PyCFunction)test_with_docstring, METH_NOARGS, + docstring_no_signature}, + {"meth_o", + (PyCFunction)test_with_docstring, METH_O, + docstring_no_signature}, + {"meth_noargs_class", + (PyCFunction)test_with_docstring, METH_NOARGS|METH_CLASS, + docstring_no_signature}, + {"meth_o_class", + (PyCFunction)test_with_docstring, METH_O|METH_CLASS, + docstring_no_signature}, + {"meth_noargs_static", + (PyCFunction)test_with_docstring, METH_NOARGS|METH_STATIC, + docstring_no_signature}, + {"meth_o_static", + (PyCFunction)test_with_docstring, METH_O|METH_STATIC, + docstring_no_signature}, + {"meth_noargs_coexist", + (PyCFunction)test_with_docstring, METH_NOARGS|METH_COEXIST, + docstring_no_signature}, + {"meth_o_coexist", + (PyCFunction)test_with_docstring, METH_O|METH_COEXIST, + docstring_no_signature}, + {NULL}, +}; + +static PyTypeObject DocStringNoSignatureTest = { + PyVarObject_HEAD_INIT(NULL, 0) + .tp_name = "_testcapi.DocStringNoSignatureTest", + .tp_basicsize = sizeof(PyObject), + .tp_flags = Py_TPFLAGS_DEFAULT, + .tp_methods = DocStringNoSignatureTest_methods, + .tp_new = PyType_GenericNew, +}; + int _PyTestCapi_Init_Docstring(PyObject *mod) { if (PyModule_AddFunctions(mod, test_methods) < 0) { return -1; } + if (PyModule_AddType(mod, &DocStringNoSignatureTest) < 0) { + return -1; + } return 0; } diff --git a/Modules/_testcapi/heaptype.c b/Modules/_testcapi/heaptype.c index c124871e433431..d14a1763184207 100644 --- a/Modules/_testcapi/heaptype.c +++ b/Modules/_testcapi/heaptype.c @@ -1,5 +1,6 @@ #include "parts.h" -#include "structmember.h" // PyMemberDef +#include // offsetof() + static struct PyModuleDef *_testcapimodule = NULL; // set at initialization @@ -332,7 +333,7 @@ typedef struct { static struct PyMemberDef members_to_repeat[] = { - {"T_INT", T_INT, offsetof(HeapCTypeWithDataObject, data), 0, NULL}, + {"Py_T_INT", Py_T_INT, offsetof(HeapCTypeWithDataObject, data), 0, NULL}, {NULL} }; @@ -477,7 +478,7 @@ typedef struct { } HeapCTypeObject; static struct PyMemberDef heapctype_members[] = { - {"value", T_INT, offsetof(HeapCTypeObject, value)}, + {"value", Py_T_INT, offsetof(HeapCTypeObject, value)}, {NULL} /* Sentinel */ }; @@ -571,7 +572,7 @@ heapctypesubclass_init(PyObject *self, PyObject *args, PyObject *kwargs) } static struct PyMemberDef heapctypesubclass_members[] = { - {"value2", T_INT, offsetof(HeapCTypeSubclassObject, value2)}, + {"value2", Py_T_INT, offsetof(HeapCTypeSubclassObject, value2)}, {NULL} /* Sentinel */ }; @@ -772,8 +773,8 @@ static PyGetSetDef heapctypewithdict_getsetlist[] = { }; static struct PyMemberDef heapctypewithdict_members[] = { - {"dictobj", T_OBJECT, offsetof(HeapCTypeWithDictObject, dict)}, - {"__dictoffset__", T_PYSSIZET, offsetof(HeapCTypeWithDictObject, dict), READONLY}, + {"dictobj", _Py_T_OBJECT, offsetof(HeapCTypeWithDictObject, dict)}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(HeapCTypeWithDictObject, dict), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -867,8 +868,8 @@ static PyType_Spec HeapCTypeWithManagedWeakref_spec = { }; static struct PyMemberDef heapctypewithnegativedict_members[] = { - {"dictobj", T_OBJECT, offsetof(HeapCTypeWithDictObject, dict)}, - {"__dictoffset__", T_PYSSIZET, -(Py_ssize_t)sizeof(void*), READONLY}, + {"dictobj", _Py_T_OBJECT, offsetof(HeapCTypeWithDictObject, dict)}, + {"__dictoffset__", Py_T_PYSSIZET, -(Py_ssize_t)sizeof(void*), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -893,9 +894,9 @@ typedef struct { } HeapCTypeWithWeakrefObject; static struct PyMemberDef heapctypewithweakref_members[] = { - {"weakreflist", T_OBJECT, offsetof(HeapCTypeWithWeakrefObject, weakreflist)}, - {"__weaklistoffset__", T_PYSSIZET, - offsetof(HeapCTypeWithWeakrefObject, weakreflist), READONLY}, + {"weakreflist", _Py_T_OBJECT, offsetof(HeapCTypeWithWeakrefObject, weakreflist)}, + {"__weaklistoffset__", Py_T_PYSSIZET, + offsetof(HeapCTypeWithWeakrefObject, weakreflist), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -943,7 +944,7 @@ typedef struct { } HeapCTypeSetattrObject; static struct PyMemberDef heapctypesetattr_members[] = { - {"pvalue", T_LONG, offsetof(HeapCTypeSetattrObject, value)}, + {"pvalue", Py_T_LONG, offsetof(HeapCTypeSetattrObject, value)}, {NULL} /* Sentinel */ }; @@ -1122,94 +1123,62 @@ _PyTestCapi_Init_Heaptype(PyObject *m) { return -1; } +#define ADD(name, value) do { \ + if (PyModule_Add(m, name, value) < 0) { \ + return -1; \ + } \ + } while (0) + PyObject *HeapDocCType = PyType_FromSpec(&HeapDocCType_spec); - if (HeapDocCType == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapDocCType", HeapDocCType); + ADD("HeapDocCType", HeapDocCType); /* bpo-41832: Add a new type to test PyType_FromSpec() now can accept a NULL tp_doc slot. */ PyObject *NullTpDocType = PyType_FromSpec(&NullTpDocType_spec); - if (NullTpDocType == NULL) { - return -1; - } - PyModule_AddObject(m, "NullTpDocType", NullTpDocType); + ADD("NullTpDocType", NullTpDocType); PyObject *HeapGcCType = PyType_FromSpec(&HeapGcCType_spec); - if (HeapGcCType == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapGcCType", HeapGcCType); + ADD("HeapGcCType", HeapGcCType); PyObject *HeapCType = PyType_FromSpec(&HeapCType_spec); if (HeapCType == NULL) { return -1; } PyObject *subclass_bases = PyTuple_Pack(1, HeapCType); + Py_DECREF(HeapCType); if (subclass_bases == NULL) { return -1; } PyObject *HeapCTypeSubclass = PyType_FromSpecWithBases(&HeapCTypeSubclass_spec, subclass_bases); - if (HeapCTypeSubclass == NULL) { - return -1; - } Py_DECREF(subclass_bases); - PyModule_AddObject(m, "HeapCTypeSubclass", HeapCTypeSubclass); + ADD("HeapCTypeSubclass", HeapCTypeSubclass); PyObject *HeapCTypeWithDict = PyType_FromSpec(&HeapCTypeWithDict_spec); - if (HeapCTypeWithDict == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeWithDict", HeapCTypeWithDict); + ADD("HeapCTypeWithDict", HeapCTypeWithDict); PyObject *HeapCTypeWithDict2 = PyType_FromSpec(&HeapCTypeWithDict2_spec); - if (HeapCTypeWithDict2 == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeWithDict2", HeapCTypeWithDict2); + ADD("HeapCTypeWithDict2", HeapCTypeWithDict2); PyObject *HeapCTypeWithNegativeDict = PyType_FromSpec(&HeapCTypeWithNegativeDict_spec); - if (HeapCTypeWithNegativeDict == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeWithNegativeDict", HeapCTypeWithNegativeDict); + ADD("HeapCTypeWithNegativeDict", HeapCTypeWithNegativeDict); PyObject *HeapCTypeWithManagedDict = PyType_FromSpec(&HeapCTypeWithManagedDict_spec); - if (HeapCTypeWithManagedDict == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeWithManagedDict", HeapCTypeWithManagedDict); + ADD("HeapCTypeWithManagedDict", HeapCTypeWithManagedDict); PyObject *HeapCTypeWithManagedWeakref = PyType_FromSpec(&HeapCTypeWithManagedWeakref_spec); - if (HeapCTypeWithManagedWeakref == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeWithManagedWeakref", HeapCTypeWithManagedWeakref); + ADD("HeapCTypeWithManagedWeakref", HeapCTypeWithManagedWeakref); PyObject *HeapCTypeWithWeakref = PyType_FromSpec(&HeapCTypeWithWeakref_spec); - if (HeapCTypeWithWeakref == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeWithWeakref", HeapCTypeWithWeakref); + ADD("HeapCTypeWithWeakref", HeapCTypeWithWeakref); PyObject *HeapCTypeWithWeakref2 = PyType_FromSpec(&HeapCTypeWithWeakref2_spec); - if (HeapCTypeWithWeakref2 == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeWithWeakref2", HeapCTypeWithWeakref2); + ADD("HeapCTypeWithWeakref2", HeapCTypeWithWeakref2); PyObject *HeapCTypeWithBuffer = PyType_FromSpec(&HeapCTypeWithBuffer_spec); - if (HeapCTypeWithBuffer == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeWithBuffer", HeapCTypeWithBuffer); + ADD("HeapCTypeWithBuffer", HeapCTypeWithBuffer); PyObject *HeapCTypeSetattr = PyType_FromSpec(&HeapCTypeSetattr_spec); - if (HeapCTypeSetattr == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeSetattr", HeapCTypeSetattr); + ADD("HeapCTypeSetattr", HeapCTypeSetattr); PyObject *subclass_with_finalizer_bases = PyTuple_Pack(1, HeapCTypeSubclass); if (subclass_with_finalizer_bases == NULL) { @@ -1217,32 +1186,20 @@ _PyTestCapi_Init_Heaptype(PyObject *m) { } PyObject *HeapCTypeSubclassWithFinalizer = PyType_FromSpecWithBases( &HeapCTypeSubclassWithFinalizer_spec, subclass_with_finalizer_bases); - if (HeapCTypeSubclassWithFinalizer == NULL) { - return -1; - } Py_DECREF(subclass_with_finalizer_bases); - PyModule_AddObject(m, "HeapCTypeSubclassWithFinalizer", HeapCTypeSubclassWithFinalizer); + ADD("HeapCTypeSubclassWithFinalizer", HeapCTypeSubclassWithFinalizer); PyObject *HeapCTypeMetaclass = PyType_FromMetaclass( &PyType_Type, m, &HeapCTypeMetaclass_spec, (PyObject *) &PyType_Type); - if (HeapCTypeMetaclass == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeMetaclass", HeapCTypeMetaclass); + ADD("HeapCTypeMetaclass", HeapCTypeMetaclass); PyObject *HeapCTypeMetaclassCustomNew = PyType_FromMetaclass( &PyType_Type, m, &HeapCTypeMetaclassCustomNew_spec, (PyObject *) &PyType_Type); - if (HeapCTypeMetaclassCustomNew == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeMetaclassCustomNew", HeapCTypeMetaclassCustomNew); + ADD("HeapCTypeMetaclassCustomNew", HeapCTypeMetaclassCustomNew); PyObject *HeapCTypeMetaclassNullNew = PyType_FromMetaclass( &PyType_Type, m, &HeapCTypeMetaclassNullNew_spec, (PyObject *) &PyType_Type); - if (HeapCTypeMetaclassNullNew == NULL) { - return -1; - } - PyModule_AddObject(m, "HeapCTypeMetaclassNullNew", HeapCTypeMetaclassNullNew); + ADD("HeapCTypeMetaclassNullNew", HeapCTypeMetaclassNullNew); PyObject *HeapCCollection = PyType_FromMetaclass( NULL, m, &HeapCCollection_spec, NULL); diff --git a/Modules/_testcapi/mem.c b/Modules/_testcapi/mem.c index 979b3a4b2b1af6..ef9234d7f8955f 100644 --- a/Modules/_testcapi/mem.c +++ b/Modules/_testcapi/mem.c @@ -440,17 +440,6 @@ test_pymem_alloc0(PyObject *self, PyObject *Py_UNUSED(ignored)) Py_RETURN_NONE; } -static PyObject * -test_pymem_getallocatorsname(PyObject *self, PyObject *args) -{ - const char *name = _PyMem_GetCurrentAllocatorName(); - if (name == NULL) { - PyErr_SetString(PyExc_RuntimeError, "cannot get allocators name"); - return NULL; - } - return PyUnicode_FromString(name); -} - static PyObject * test_pymem_setrawallocators(PyObject *self, PyObject *Py_UNUSED(ignored)) { @@ -526,75 +515,6 @@ pymem_malloc_without_gil(PyObject *self, PyObject *args) Py_RETURN_NONE; } -static PyObject * -test_pyobject_is_freed(const char *test_name, PyObject *op) -{ - if (!_PyObject_IsFreed(op)) { - PyErr_SetString(PyExc_AssertionError, - "object is not seen as freed"); - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject * -check_pyobject_null_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) -{ - PyObject *op = NULL; - return test_pyobject_is_freed("check_pyobject_null_is_freed", op); -} - - -static PyObject * -check_pyobject_uninitialized_is_freed(PyObject *self, - PyObject *Py_UNUSED(args)) -{ - PyObject *op = (PyObject *)PyObject_Malloc(sizeof(PyObject)); - if (op == NULL) { - return NULL; - } - /* Initialize reference count to avoid early crash in ceval or GC */ - Py_SET_REFCNT(op, 1); - /* object fields like ob_type are uninitialized! */ - return test_pyobject_is_freed("check_pyobject_uninitialized_is_freed", op); -} - - -static PyObject * -check_pyobject_forbidden_bytes_is_freed(PyObject *self, - PyObject *Py_UNUSED(args)) -{ - /* Allocate an incomplete PyObject structure: truncate 'ob_type' field */ - PyObject *op = (PyObject *)PyObject_Malloc(offsetof(PyObject, ob_type)); - if (op == NULL) { - return NULL; - } - /* Initialize reference count to avoid early crash in ceval or GC */ - Py_SET_REFCNT(op, 1); - /* ob_type field is after the memory block: part of "forbidden bytes" - when using debug hooks on memory allocators! */ - return test_pyobject_is_freed("check_pyobject_forbidden_bytes_is_freed", op); -} - - -static PyObject * -check_pyobject_freed_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) -{ - /* This test would fail if run with the address sanitizer */ -#ifdef _Py_ADDRESS_SANITIZER - Py_RETURN_NONE; -#else - PyObject *op = PyObject_CallNoArgs((PyObject *)&PyBaseObject_Type); - if (op == NULL) { - return NULL; - } - Py_TYPE(op)->tp_dealloc(op); - /* Reset reference count to avoid early crash in ceval or GC */ - Py_SET_REFCNT(op, 1); - /* object memory is freed! */ - return test_pyobject_is_freed("check_pyobject_freed_is_freed", op); -#endif -} // Tracemalloc tests static PyObject * @@ -656,15 +576,8 @@ tracemalloc_untrack(PyObject *self, PyObject *args) } static PyMethodDef test_methods[] = { - {"check_pyobject_forbidden_bytes_is_freed", - check_pyobject_forbidden_bytes_is_freed, METH_NOARGS}, - {"check_pyobject_freed_is_freed", check_pyobject_freed_is_freed, METH_NOARGS}, - {"check_pyobject_null_is_freed", check_pyobject_null_is_freed, METH_NOARGS}, - {"check_pyobject_uninitialized_is_freed", - check_pyobject_uninitialized_is_freed, METH_NOARGS}, {"pymem_api_misuse", pymem_api_misuse, METH_NOARGS}, {"pymem_buffer_overflow", pymem_buffer_overflow, METH_NOARGS}, - {"pymem_getallocatorsname", test_pymem_getallocatorsname, METH_NOARGS}, {"pymem_malloc_without_gil", pymem_malloc_without_gil, METH_NOARGS}, {"pyobject_malloc_without_gil", pyobject_malloc_without_gil, METH_NOARGS}, {"remove_mem_hooks", remove_mem_hooks, METH_NOARGS, diff --git a/Modules/_testcapi/parts.h b/Modules/_testcapi/parts.h index aaec0a61916948..65ebf80bcd1e95 100644 --- a/Modules/_testcapi/parts.h +++ b/Modules/_testcapi/parts.h @@ -26,6 +26,7 @@ int _PyTestCapi_Init_Vectorcall(PyObject *module); int _PyTestCapi_Init_Heaptype(PyObject *module); +int _PyTestCapi_Init_Abstract(PyObject *module); int _PyTestCapi_Init_Unicode(PyObject *module); int _PyTestCapi_Init_GetArgs(PyObject *module); int _PyTestCapi_Init_DateTime(PyObject *module); @@ -34,6 +35,7 @@ int _PyTestCapi_Init_Mem(PyObject *module); int _PyTestCapi_Init_Watchers(PyObject *module); int _PyTestCapi_Init_Long(PyObject *module); int _PyTestCapi_Init_Float(PyObject *module); +int _PyTestCapi_Init_Dict(PyObject *module); int _PyTestCapi_Init_Structmember(PyObject *module); int _PyTestCapi_Init_Exceptions(PyObject *module); int _PyTestCapi_Init_Code(PyObject *module); diff --git a/Modules/_testcapi/structmember.c b/Modules/_testcapi/structmember.c index 8522dc962efa40..096eaecd40855f 100644 --- a/Modules/_testcapi/structmember.c +++ b/Modules/_testcapi/structmember.c @@ -193,7 +193,7 @@ _PyTestCapi_Init_Structmember(PyObject *m) if (res < 0) { return -1; } - res = PyModule_AddObject( + res = PyModule_AddObjectRef( m, "_test_structmembersType_OldAPI", (PyObject *)&test_structmembersType_OldAPI); diff --git a/Modules/_testcapi/vectorcall.c b/Modules/_testcapi/vectorcall.c index 5ee468bd28c853..2b5110fcba2c91 100644 --- a/Modules/_testcapi/vectorcall.c +++ b/Modules/_testcapi/vectorcall.c @@ -1,7 +1,7 @@ #include "parts.h" #include "clinic/vectorcall.c.h" -#include "structmember.h" // PyMemberDef + #include // offsetof /*[clinic input] @@ -155,10 +155,9 @@ VectorCallClass_vectorcall(PyObject *callable, } /*[clinic input] -module _testcapi class _testcapi.VectorCallClass "PyObject *" "&PyType_Type" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=8423a8e919f2f0df]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=95c63c1a47f9a995]*/ /*[clinic input] _testcapi.VectorCallClass.set_vectorcall @@ -197,7 +196,7 @@ PyMethodDef VectorCallClass_methods[] = { }; PyMemberDef VectorCallClass_members[] = { - {"__vectorcalloffset__", T_PYSSIZET, 0/* set later */, READONLY}, + {"__vectorcalloffset__", Py_T_PYSSIZET, 0/* set later */, Py_READONLY}, {NULL} }; diff --git a/Modules/_testcapi/vectorcall_limited.c b/Modules/_testcapi/vectorcall_limited.c index a69f1d3f2a79b5..a96925e840121a 100644 --- a/Modules/_testcapi/vectorcall_limited.c +++ b/Modules/_testcapi/vectorcall_limited.c @@ -3,7 +3,7 @@ #ifdef LIMITED_API_AVAILABLE -#include "structmember.h" // PyMemberDef + /* Test Vectorcall in the limited API */ @@ -132,7 +132,7 @@ call_vectorcall_method(PyObject* self, PyObject *callable) } static PyMemberDef LimitedVectorCallClass_members[] = { - {"__vectorcalloffset__", T_PYSSIZET, sizeof(PyObject), READONLY}, + {"__vectorcalloffset__", Py_T_PYSSIZET, sizeof(PyObject), Py_READONLY}, {NULL} }; diff --git a/Modules/_testcapi/watchers.c b/Modules/_testcapi/watchers.c index 4cf567b3314980..8a264bba4ed6ed 100644 --- a/Modules/_testcapi/watchers.c +++ b/Modules/_testcapi/watchers.c @@ -295,6 +295,7 @@ _testcapi_unwatch_type_impl(PyObject *module, int watcher_id, PyObject *type) // Test code object watching #define NUM_CODE_WATCHERS 2 +static int code_watcher_ids[NUM_CODE_WATCHERS] = {-1, -1}; static int num_code_object_created_events[NUM_CODE_WATCHERS] = {0, 0}; static int num_code_object_destroyed_events[NUM_CODE_WATCHERS] = {0, 0}; @@ -345,11 +346,13 @@ add_code_watcher(PyObject *self, PyObject *which_watcher) long which_l = PyLong_AsLong(which_watcher); if (which_l == 0) { watcher_id = PyCode_AddWatcher(first_code_object_callback); + code_watcher_ids[0] = watcher_id; num_code_object_created_events[0] = 0; num_code_object_destroyed_events[0] = 0; } else if (which_l == 1) { watcher_id = PyCode_AddWatcher(second_code_object_callback); + code_watcher_ids[1] = watcher_id; num_code_object_created_events[1] = 0; num_code_object_destroyed_events[1] = 0; } @@ -375,9 +378,14 @@ clear_code_watcher(PyObject *self, PyObject *watcher_id) return NULL; } // reset static events counters - if (watcher_id_l >= 0 && watcher_id_l < NUM_CODE_WATCHERS) { - num_code_object_created_events[watcher_id_l] = 0; - num_code_object_destroyed_events[watcher_id_l] = 0; + if (watcher_id_l >= 0) { + for (int i = 0; i < NUM_CODE_WATCHERS; i++) { + if (watcher_id_l == code_watcher_ids[i]) { + code_watcher_ids[i] = -1; + num_code_object_created_events[i] = 0; + num_code_object_destroyed_events[i] = 0; + } + } } Py_RETURN_NONE; } diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index dd2c9c72e53787..35599f8baa204d 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -20,15 +20,16 @@ #include "Python.h" #include "frameobject.h" // PyFrame_New #include "marshal.h" // PyMarshal_WriteLongToFile -#include "structmember.h" // for offsetof(), T_OBJECT + #include // FLT_MAX #include +#include // offsetof() #ifndef MS_WINDOWS -#include +# include #endif #ifdef HAVE_SYS_WAIT_H -#include // W_STOPCODE +# include // W_STOPCODE #endif #ifdef Py_BUILD_CORE @@ -43,6 +44,16 @@ // Include definitions from there. #include "_testcapi/parts.h" +#define NULLABLE(x) do { if (x == Py_None) x = NULL; } while (0); + +#define RETURN_INT(value) do { \ + int _ret = (value); \ + if (_ret == -1) { \ + return NULL; \ + } \ + return PyLong_FromLong(_ret); \ + } while (0) + // Forward declarations static struct PyModuleDef _testcapimodule; static PyObject *TestError; /* set to exception object in init */ @@ -1982,7 +1993,7 @@ return_result_with_error(PyObject *self, PyObject *args) Py_RETURN_NONE; } -static PyObject* +static PyObject * getitem_with_error(PyObject *self, PyObject *args) { PyObject *map, *key; @@ -2059,90 +2070,6 @@ py_w_stopcode(PyObject *self, PyObject *args) #endif -static PyObject * -get_mapping_keys(PyObject* self, PyObject *obj) -{ - return PyMapping_Keys(obj); -} - -static PyObject * -get_mapping_values(PyObject* self, PyObject *obj) -{ - return PyMapping_Values(obj); -} - -static PyObject * -get_mapping_items(PyObject* self, PyObject *obj) -{ - return PyMapping_Items(obj); -} - -static PyObject * -test_mapping_has_key_string(PyObject *self, PyObject *Py_UNUSED(args)) -{ - PyObject *context = PyDict_New(); - PyObject *val = PyLong_FromLong(1); - - // Since this uses `const char*` it is easier to test this in C: - PyDict_SetItemString(context, "a", val); - if (!PyMapping_HasKeyString(context, "a")) { - PyErr_SetString(PyExc_RuntimeError, - "Existing mapping key does not exist"); - return NULL; - } - if (PyMapping_HasKeyString(context, "b")) { - PyErr_SetString(PyExc_RuntimeError, - "Missing mapping key exists"); - return NULL; - } - - Py_DECREF(val); - Py_DECREF(context); - Py_RETURN_NONE; -} - -static PyObject * -mapping_has_key(PyObject* self, PyObject *args) -{ - PyObject *context, *key; - if (!PyArg_ParseTuple(args, "OO", &context, &key)) { - return NULL; - } - return PyLong_FromLong(PyMapping_HasKey(context, key)); -} - -static PyObject * -sequence_set_slice(PyObject* self, PyObject *args) -{ - PyObject *sequence, *obj; - Py_ssize_t i1, i2; - if (!PyArg_ParseTuple(args, "OnnO", &sequence, &i1, &i2, &obj)) { - return NULL; - } - - int res = PySequence_SetSlice(sequence, i1, i2, obj); - if (res == -1) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject * -sequence_del_slice(PyObject* self, PyObject *args) -{ - PyObject *sequence; - Py_ssize_t i1, i2; - if (!PyArg_ParseTuple(args, "Onn", &sequence, &i1, &i2)) { - return NULL; - } - - int res = PySequence_DelSlice(sequence, i1, i2); - if (res == -1) { - return NULL; - } - Py_RETURN_NONE; -} - static PyObject * test_pythread_tss_key_state(PyObject *self, PyObject *args) { @@ -2246,72 +2173,6 @@ negative_refcount(PyObject *self, PyObject *Py_UNUSED(args)) #endif -static PyObject * -sequence_getitem(PyObject *self, PyObject *args) -{ - PyObject *seq; - Py_ssize_t i; - if (!PyArg_ParseTuple(args, "On", &seq, &i)) { - return NULL; - } - return PySequence_GetItem(seq, i); -} - - -static PyObject * -sequence_setitem(PyObject *self, PyObject *args) -{ - Py_ssize_t i; - PyObject *seq, *val; - if (!PyArg_ParseTuple(args, "OnO", &seq, &i, &val)) { - return NULL; - } - if (PySequence_SetItem(seq, i, val)) { - return NULL; - } - Py_RETURN_NONE; -} - - -static PyObject * -sequence_delitem(PyObject *self, PyObject *args) -{ - Py_ssize_t i; - PyObject *seq; - if (!PyArg_ParseTuple(args, "On", &seq, &i)) { - return NULL; - } - if (PySequence_DelItem(seq, i)) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject * -hasattr_string(PyObject *self, PyObject* args) -{ - PyObject* obj; - PyObject* attr_name; - - if (!PyArg_UnpackTuple(args, "hasattr_string", 2, 2, &obj, &attr_name)) { - return NULL; - } - - if (!PyUnicode_Check(attr_name)) { - PyErr_SetString(PyExc_TypeError, "attribute name must a be string"); - return PyErr_Occurred(); - } - - const char *name_str = PyUnicode_AsUTF8(attr_name); - if (PyObject_HasAttrString(obj, name_str)) { - Py_RETURN_TRUE; - } - else { - Py_RETURN_FALSE; - } -} - - /* Functions for testing C calling conventions (METH_*) are named meth_*, * e.g. "meth_varargs" for METH_VARARGS. * @@ -3464,6 +3325,225 @@ test_weakref_capi(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) } +static PyObject * +test_dict_capi(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) +{ + assert(!PyErr_Occurred()); + + PyObject *dict= NULL, *key = NULL, *missing_key = NULL, *value = NULL; + PyObject *invalid_key = NULL; + int res; + + // test PyDict_New() + dict = PyDict_New(); + if (dict == NULL) { + goto error; + } + + key = PyUnicode_FromString("key"); + if (key == NULL) { + goto error; + } + + missing_key = PyUnicode_FromString("missing_key"); + if (missing_key == NULL) { + goto error; + } + + value = PyUnicode_FromString("value"); + if (value == NULL) { + goto error; + } + + // test PyDict_SetItem() + Py_ssize_t key_refcnt = Py_REFCNT(key); + Py_ssize_t value_refcnt = Py_REFCNT(value); + res = PyDict_SetItem(dict, key, value); + if (res < 0) { + goto error; + } + assert(res == 0); + assert(Py_REFCNT(key) == (key_refcnt + 1)); + assert(Py_REFCNT(value) == (value_refcnt + 1)); + + // test PyDict_SetItemString() + res = PyDict_SetItemString(dict, "key", value); + if (res < 0) { + goto error; + } + assert(res == 0); + assert(Py_REFCNT(key) == (key_refcnt + 1)); + assert(Py_REFCNT(value) == (value_refcnt + 1)); + + // test PyDict_Size() + assert(PyDict_Size(dict) == 1); + + // test PyDict_Contains(), key is present + assert(PyDict_Contains(dict, key) == 1); + + // test PyDict_GetItem(), key is present + assert(PyDict_GetItem(dict, key) == value); + + // test PyDict_GetItemString(), key is present + assert(PyDict_GetItemString(dict, "key") == value); + + // test PyDict_GetItemWithError(), key is present + assert(PyDict_GetItemWithError(dict, key) == value); + assert(!PyErr_Occurred()); + + // test PyDict_GetItemRef(), key is present + PyObject *get_value = Py_Ellipsis; // marker value + assert(PyDict_GetItemRef(dict, key, &get_value) == 1); + assert(get_value == value); + Py_DECREF(get_value); + + // test PyDict_GetItemStringRef(), key is present + get_value = Py_Ellipsis; // marker value + assert(PyDict_GetItemStringRef(dict, "key", &get_value) == 1); + assert(get_value == value); + Py_DECREF(get_value); + + // test PyDict_Contains(), missing key + assert(PyDict_Contains(dict, missing_key) == 0); + + // test PyDict_GetItem(), missing key + assert(PyDict_GetItem(dict, missing_key) == NULL); + assert(!PyErr_Occurred()); + + // test PyDict_GetItemString(), missing key + assert(PyDict_GetItemString(dict, "missing_key") == NULL); + assert(!PyErr_Occurred()); + + // test PyDict_GetItemWithError(), missing key + assert(PyDict_GetItem(dict, missing_key) == NULL); + assert(!PyErr_Occurred()); + + // test PyDict_GetItemRef(), missing key + get_value = Py_Ellipsis; // marker value + assert(PyDict_GetItemRef(dict, missing_key, &get_value) == 0); + assert(!PyErr_Occurred()); + assert(get_value == NULL); + + // test PyDict_GetItemStringRef(), missing key + get_value = Py_Ellipsis; // marker value + assert(PyDict_GetItemStringRef(dict, "missing_key", &get_value) == 0); + assert(!PyErr_Occurred()); + assert(get_value == NULL); + + // test PyDict_GetItem(), invalid dict + PyObject *invalid_dict = key; // borrowed reference + assert(PyDict_GetItem(invalid_dict, key) == NULL); + assert(!PyErr_Occurred()); + + // test PyDict_GetItemWithError(), invalid dict + assert(PyDict_GetItemWithError(invalid_dict, key) == NULL); + assert(PyErr_ExceptionMatches(PyExc_SystemError)); + PyErr_Clear(); + + // test PyDict_GetItemRef(), invalid dict + get_value = Py_Ellipsis; // marker value + assert(PyDict_GetItemRef(invalid_dict, key, &get_value) == -1); + assert(PyErr_ExceptionMatches(PyExc_SystemError)); + PyErr_Clear(); + assert(get_value == NULL); + + // test PyDict_GetItemStringRef(), invalid dict + get_value = Py_Ellipsis; // marker value + assert(PyDict_GetItemStringRef(invalid_dict, "key", &get_value) == -1); + assert(PyErr_ExceptionMatches(PyExc_SystemError)); + PyErr_Clear(); + assert(get_value == NULL); + + invalid_key = PyList_New(0); + if (invalid_key == NULL) { + goto error; + } + + // test PyDict_Contains(), invalid key + assert(PyDict_Contains(dict, invalid_key) == -1); + assert(PyErr_ExceptionMatches(PyExc_TypeError)); + PyErr_Clear(); + + // test PyDict_GetItem(), invalid key + assert(PyDict_GetItem(dict, invalid_key) == NULL); + assert(!PyErr_Occurred()); + + // test PyDict_GetItemWithError(), invalid key + assert(PyDict_GetItemWithError(dict, invalid_key) == NULL); + assert(PyErr_ExceptionMatches(PyExc_TypeError)); + PyErr_Clear(); + + // test PyDict_GetItemRef(), invalid key + get_value = Py_Ellipsis; // marker value + assert(PyDict_GetItemRef(dict, invalid_key, &get_value) == -1); + assert(PyErr_ExceptionMatches(PyExc_TypeError)); + PyErr_Clear(); + assert(get_value == NULL); + + // test PyDict_DelItem(), key is present + assert(PyDict_DelItem(dict, key) == 0); + assert(PyDict_Size(dict) == 0); + + // test PyDict_DelItem(), missing key + assert(PyDict_DelItem(dict, missing_key) == -1); + assert(PyErr_ExceptionMatches(PyExc_KeyError)); + PyErr_Clear(); + + // test PyDict_DelItem(), invalid key + assert(PyDict_DelItem(dict, invalid_key) == -1); + assert(PyErr_ExceptionMatches(PyExc_TypeError)); + PyErr_Clear(); + + // test PyDict_Clear() + PyDict_Clear(dict); + + Py_DECREF(dict); + Py_DECREF(key); + Py_DECREF(missing_key); + Py_DECREF(value); + Py_DECREF(invalid_key); + + Py_RETURN_NONE; + +error: + Py_XDECREF(dict); + Py_XDECREF(key); + Py_XDECREF(missing_key); + Py_XDECREF(value); + Py_XDECREF(invalid_key); + return NULL; +} + + +static PyObject * +sys_getobject(PyObject *Py_UNUSED(module), PyObject *arg) +{ + const char *name; + Py_ssize_t size; + if (!PyArg_Parse(arg, "z#", &name, &size)) { + return NULL; + } + PyObject *result = PySys_GetObject(name); + if (result == NULL) { + result = PyExc_AttributeError; + } + return Py_NewRef(result); +} + +static PyObject * +sys_setobject(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + PyObject *value; + if (!PyArg_ParseTuple(args, "z#O", &name, &size, &value)) { + return NULL; + } + NULLABLE(value); + RETURN_INT(PySys_SetObject(name, value)); +} + + static PyMethodDef TestMethods[] = { {"set_errno", set_errno, METH_VARARGS}, {"test_config", test_config, METH_NOARGS}, @@ -3545,23 +3625,12 @@ static PyMethodDef TestMethods[] = { #ifdef W_STOPCODE {"W_STOPCODE", py_w_stopcode, METH_VARARGS}, #endif - {"get_mapping_keys", get_mapping_keys, METH_O}, - {"get_mapping_values", get_mapping_values, METH_O}, - {"get_mapping_items", get_mapping_items, METH_O}, - {"test_mapping_has_key_string", test_mapping_has_key_string, METH_NOARGS}, - {"mapping_has_key", mapping_has_key, METH_VARARGS}, - {"sequence_set_slice", sequence_set_slice, METH_VARARGS}, - {"sequence_del_slice", sequence_del_slice, METH_VARARGS}, {"test_pythread_tss_key_state", test_pythread_tss_key_state, METH_VARARGS}, {"hamt", new_hamt, METH_NOARGS}, {"bad_get", _PyCFunction_CAST(bad_get), METH_FASTCALL}, #ifdef Py_REF_DEBUG {"negative_refcount", negative_refcount, METH_NOARGS}, #endif - {"sequence_getitem", sequence_getitem, METH_VARARGS}, - {"sequence_setitem", sequence_setitem, METH_VARARGS}, - {"sequence_delitem", sequence_delitem, METH_VARARGS}, - {"hasattr_string", hasattr_string, METH_VARARGS}, {"meth_varargs", meth_varargs, METH_VARARGS}, {"meth_varargs_keywords", _PyCFunction_CAST(meth_varargs_keywords), METH_VARARGS|METH_KEYWORDS}, {"meth_o", meth_o, METH_O}, @@ -3609,6 +3678,9 @@ static PyMethodDef TestMethods[] = { {"function_set_kw_defaults", function_set_kw_defaults, METH_VARARGS, NULL}, {"check_pyimport_addmodule", check_pyimport_addmodule, METH_VARARGS}, {"test_weakref_capi", test_weakref_capi, METH_NOARGS}, + {"test_dict_capi", test_dict_capi, METH_NOARGS}, + {"sys_getobject", sys_getobject, METH_O}, + {"sys_setobject", sys_setobject, METH_VARARGS}, {NULL, NULL} /* sentinel */ }; @@ -4066,7 +4138,7 @@ ContainerNoGC_dealloc(ContainerNoGCobject *self) } static PyMemberDef ContainerNoGC_members[] = { - {"value", T_OBJECT, offsetof(ContainerNoGCobject, value), READONLY, + {"value", _Py_T_OBJECT, offsetof(ContainerNoGCobject, value), Py_READONLY, PyDoc_STR("a container value for test purposes")}, {0} }; @@ -4202,6 +4274,9 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_Heaptype(m) < 0) { return NULL; } + if (_PyTestCapi_Init_Abstract(m) < 0) { + return NULL; + } if (_PyTestCapi_Init_Unicode(m) < 0) { return NULL; } @@ -4226,6 +4301,9 @@ PyInit__testcapi(void) if (_PyTestCapi_Init_Float(m) < 0) { return NULL; } + if (_PyTestCapi_Init_Dict(m) < 0) { + return NULL; + } if (_PyTestCapi_Init_Structmember(m) < 0) { return NULL; } diff --git a/Modules/_testclinic.c b/Modules/_testclinic.c index 26cdb4371ca24c..8fa3cc83d871b1 100644 --- a/Modules/_testclinic.c +++ b/Modules/_testclinic.c @@ -6,6 +6,7 @@ #undef NDEBUG #include "Python.h" +#include "pycore_object.h" // _PyObject_IsFreed() // Used for clone_with_conv_f1 and clone_with_conv_v2 @@ -1192,6 +1193,240 @@ clone_with_conv_f2_impl(PyObject *module, custom_t path) } +/*[clinic input] +output push +destination deprstar new file '{dirname}/clinic/_testclinic_depr_star.c.h' +output everything deprstar +#output methoddef_ifndef buffer 1 +output docstring_prototype suppress +output parser_prototype suppress +output impl_definition block +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=f88f37038e00fb0a]*/ + + +// Mock Python version 3.8 +#define _SAVED_PY_VERSION PY_VERSION_HEX +#undef PY_VERSION_HEX +#define PY_VERSION_HEX 0x03080000 + + +#include "clinic/_testclinic_depr_star.c.h" + + +/*[clinic input] +class _testclinic.DeprStarNew "PyObject *" "PyObject" +@classmethod +_testclinic.DeprStarNew.__new__ as depr_star_new + * [from 3.14] + a: object +The deprecation message should use the class name instead of __new__. +[clinic start generated code]*/ + +static PyObject * +depr_star_new_impl(PyTypeObject *type, PyObject *a) +/*[clinic end generated code: output=bdbb36244f90cf46 input=f4ae7dafbc23c378]*/ +{ + return type->tp_alloc(type, 0); +} + +static PyTypeObject DeprStarNew = { + PyVarObject_HEAD_INIT(NULL, 0) + .tp_name = "_testclinic.DeprStarNew", + .tp_basicsize = sizeof(PyObject), + .tp_new = depr_star_new, + .tp_flags = Py_TPFLAGS_DEFAULT, +}; + + +/*[clinic input] +class _testclinic.DeprStarInit "PyObject *" "PyObject" +_testclinic.DeprStarInit.__init__ as depr_star_init + * [from 3.14] + a: object +The deprecation message should use the class name instead of __init__. +[clinic start generated code]*/ + +static int +depr_star_init_impl(PyObject *self, PyObject *a) +/*[clinic end generated code: output=8d27b43c286d3ecc input=659ebc748d87fa86]*/ +{ + return 0; +} + +static PyTypeObject DeprStarInit = { + PyVarObject_HEAD_INIT(NULL, 0) + .tp_name = "_testclinic.DeprStarInit", + .tp_basicsize = sizeof(PyObject), + .tp_new = PyType_GenericNew, + .tp_init = depr_star_init, + .tp_flags = Py_TPFLAGS_DEFAULT, +}; + + +/*[clinic input] +depr_star_pos0_len1 + * [from 3.14] + a: object +[clinic start generated code]*/ + +static PyObject * +depr_star_pos0_len1_impl(PyObject *module, PyObject *a) +/*[clinic end generated code: output=e1c6c2b423129499 input=089b9aee25381b69]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +depr_star_pos0_len2 + * [from 3.14] + a: object + b: object +[clinic start generated code]*/ + +static PyObject * +depr_star_pos0_len2_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=96df9be39859c7e4 input=65c83a32e01495c6]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +depr_star_pos0_len3_with_kwd + * [from 3.14] + a: object + b: object + c: object + * + d: object +[clinic start generated code]*/ + +static PyObject * +depr_star_pos0_len3_with_kwd_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=7f2531eda837052f input=b33f620f57d9270f]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +depr_star_pos1_len1_opt + a: object + * [from 3.14] + b: object = None +[clinic start generated code]*/ + +static PyObject * +depr_star_pos1_len1_opt_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=b5b4e326ee3b216f input=4a4b8ff72eae9ff7]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +depr_star_pos1_len1 + a: object + * [from 3.14] + b: object +[clinic start generated code]*/ + +static PyObject * +depr_star_pos1_len1_impl(PyObject *module, PyObject *a, PyObject *b) +/*[clinic end generated code: output=eab92e37d5b0a480 input=1e7787a9fe5f62a0]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +depr_star_pos1_len2_with_kwd + a: object + * [from 3.14] + b: object + c: object + * + d: object +[clinic start generated code]*/ + +static PyObject * +depr_star_pos1_len2_with_kwd_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=3bccab672b7cfbb8 input=6bc7bd742fa8be15]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +depr_star_pos2_len1 + a: object + b: object + * [from 3.14] + c: object +[clinic start generated code]*/ + +static PyObject * +depr_star_pos2_len1_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c) +/*[clinic end generated code: output=20f5b230e9beeb70 input=5fc3e1790dec00d5]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +depr_star_pos2_len2 + a: object + b: object + * [from 3.14] + c: object + d: object +[clinic start generated code]*/ + +static PyObject * +depr_star_pos2_len2_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d) +/*[clinic end generated code: output=9f90ed8fbce27d7a input=9cc8003b89d38779]*/ +{ + Py_RETURN_NONE; +} + + +/*[clinic input] +depr_star_pos2_len2_with_kwd + a: object + b: object + * [from 3.14] + c: object + d: object + * + e: object +[clinic start generated code]*/ + +static PyObject * +depr_star_pos2_len2_with_kwd_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d, PyObject *e) +/*[clinic end generated code: output=05432c4f20527215 input=831832d90534da91]*/ +{ + Py_RETURN_NONE; +} + + +// Reset PY_VERSION_HEX +#undef PY_VERSION_HEX +#define PY_VERSION_HEX _SAVED_PY_VERSION +#undef _SAVED_PY_VERSION + + +/*[clinic input] +output pop +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=e7c7c42daced52b0]*/ + static PyMethodDef tester_methods[] = { TEST_EMPTY_FUNCTION_METHODDEF OBJECTS_CONVERTER_METHODDEF @@ -1247,6 +1482,16 @@ static PyMethodDef tester_methods[] = { CLONE_F2_METHODDEF CLONE_WITH_CONV_F1_METHODDEF CLONE_WITH_CONV_F2_METHODDEF + + DEPR_STAR_POS0_LEN1_METHODDEF + DEPR_STAR_POS0_LEN2_METHODDEF + DEPR_STAR_POS0_LEN3_WITH_KWD_METHODDEF + DEPR_STAR_POS1_LEN1_OPT_METHODDEF + DEPR_STAR_POS1_LEN1_METHODDEF + DEPR_STAR_POS1_LEN2_WITH_KWD_METHODDEF + DEPR_STAR_POS2_LEN1_METHODDEF + DEPR_STAR_POS2_LEN2_METHODDEF + DEPR_STAR_POS2_LEN2_WITH_KWD_METHODDEF {NULL, NULL} }; @@ -1260,7 +1505,21 @@ static struct PyModuleDef _testclinic_module = { PyMODINIT_FUNC PyInit__testclinic(void) { - return PyModule_Create(&_testclinic_module); + PyObject *m = PyModule_Create(&_testclinic_module); + if (m == NULL) { + return NULL; + } + if (PyModule_AddType(m, &DeprStarNew) < 0) { + goto error; + } + if (PyModule_AddType(m, &DeprStarInit) < 0) { + goto error; + } + return m; + +error: + Py_DECREF(m); + return NULL; } #undef RETURN_PACKED_ARGS diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 74c932fa921cd0..d1082c7dae8aee 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -10,28 +10,31 @@ #undef NDEBUG #include "Python.h" -#include "frameobject.h" -#include "interpreteridobject.h" // _PyInterpreterID_LookUp() #include "pycore_atomic_funcs.h" // _Py_atomic_int_get() #include "pycore_bitutils.h" // _Py_bswap32() #include "pycore_bytesobject.h" // _PyBytes_Find() #include "pycore_compile.h" // _PyCompile_CodeGen, _PyCompile_OptimizeCfg, _PyCompile_Assemble, _PyCompile_CleanDoc #include "pycore_ceval.h" // _PyEval_AddPendingCall +#include "pycore_dict.h" // _PyDictOrValues_GetValues #include "pycore_fileutils.h" // _Py_normpath #include "pycore_frame.h" // _PyInterpreterFrame #include "pycore_gc.h" // PyGC_Head #include "pycore_hashtable.h" // _Py_hashtable_new() #include "pycore_initconfig.h" // _Py_GetConfigsAsDict() #include "pycore_interp.h" // _PyInterpreterState_GetConfigCopy() +#include "pycore_object.h" // _PyObject_IsFreed() #include "pycore_pathconfig.h" // _PyPathConfig_ClearGlobal() #include "pycore_pyerrors.h" // _Py_UTF8_Edit_Cost() #include "pycore_pystate.h" // _PyThreadState_GET() + +#include "frameobject.h" +#include "interpreteridobject.h" // PyInterpreterID_LookUp() #include "osdefs.h" // MAXPATHLEN #include "clinic/_testinternalcapi.c.h" #ifdef MS_WINDOWS -# include // struct timeval +# include // struct timeval #endif @@ -1081,7 +1084,7 @@ pending_identify(PyObject *self, PyObject *args) if (!PyArg_ParseTuple(args, "O:pending_identify", &interpid)) { return NULL; } - PyInterpreterState *interp = _PyInterpreterID_LookUp(interpid); + PyInterpreterState *interp = PyInterpreterID_LookUp(interpid); if (interp == NULL) { if (!PyErr_Occurred()) { PyErr_SetString(PyExc_ValueError, "interpreter not found"); @@ -1431,7 +1434,7 @@ test_atexit(PyObject *self, PyObject *Py_UNUSED(args)) PyThreadState *tstate = Py_NewInterpreter(); struct atexit_data data = {0}; - int res = _Py_AtExit(tstate->interp, callback, (void *)&data); + int res = PyUnstable_AtExit(tstate->interp, callback, (void *)&data); Py_EndInterpreter(tstate); PyThreadState_Swap(oldts); if (res < 0) { @@ -1446,6 +1449,123 @@ test_atexit(PyObject *self, PyObject *Py_UNUSED(args)) } +static PyObject * +test_pyobject_is_freed(const char *test_name, PyObject *op) +{ + if (!_PyObject_IsFreed(op)) { + PyErr_SetString(PyExc_AssertionError, + "object is not seen as freed"); + return NULL; + } + Py_RETURN_NONE; +} + +static PyObject * +check_pyobject_null_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) +{ + PyObject *op = NULL; + return test_pyobject_is_freed("check_pyobject_null_is_freed", op); +} + + +static PyObject * +check_pyobject_uninitialized_is_freed(PyObject *self, + PyObject *Py_UNUSED(args)) +{ + PyObject *op = (PyObject *)PyObject_Malloc(sizeof(PyObject)); + if (op == NULL) { + return NULL; + } + /* Initialize reference count to avoid early crash in ceval or GC */ + Py_SET_REFCNT(op, 1); + /* object fields like ob_type are uninitialized! */ + return test_pyobject_is_freed("check_pyobject_uninitialized_is_freed", op); +} + + +static PyObject * +check_pyobject_forbidden_bytes_is_freed(PyObject *self, + PyObject *Py_UNUSED(args)) +{ + /* Allocate an incomplete PyObject structure: truncate 'ob_type' field */ + PyObject *op = (PyObject *)PyObject_Malloc(offsetof(PyObject, ob_type)); + if (op == NULL) { + return NULL; + } + /* Initialize reference count to avoid early crash in ceval or GC */ + Py_SET_REFCNT(op, 1); + /* ob_type field is after the memory block: part of "forbidden bytes" + when using debug hooks on memory allocators! */ + return test_pyobject_is_freed("check_pyobject_forbidden_bytes_is_freed", op); +} + + +static PyObject * +check_pyobject_freed_is_freed(PyObject *self, PyObject *Py_UNUSED(args)) +{ + /* This test would fail if run with the address sanitizer */ +#ifdef _Py_ADDRESS_SANITIZER + Py_RETURN_NONE; +#else + PyObject *op = PyObject_CallNoArgs((PyObject *)&PyBaseObject_Type); + if (op == NULL) { + return NULL; + } + Py_TYPE(op)->tp_dealloc(op); + /* Reset reference count to avoid early crash in ceval or GC */ + Py_SET_REFCNT(op, 1); + /* object memory is freed! */ + return test_pyobject_is_freed("check_pyobject_freed_is_freed", op); +#endif +} + + +static PyObject * +test_pymem_getallocatorsname(PyObject *self, PyObject *args) +{ + const char *name = _PyMem_GetCurrentAllocatorName(); + if (name == NULL) { + PyErr_SetString(PyExc_RuntimeError, "cannot get allocators name"); + return NULL; + } + return PyUnicode_FromString(name); +} + +static PyObject * +get_object_dict_values(PyObject *self, PyObject *obj) +{ + PyTypeObject *type = Py_TYPE(obj); + if (!_PyType_HasFeature(type, Py_TPFLAGS_MANAGED_DICT)) { + Py_RETURN_NONE; + } + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(obj); + if (!_PyDictOrValues_IsValues(dorv)) { + Py_RETURN_NONE; + } + PyDictValues *values = _PyDictOrValues_GetValues(dorv); + PyDictKeysObject *keys = ((PyHeapTypeObject *)type)->ht_cached_keys; + assert(keys != NULL); + int size = (int)keys->dk_nentries; + assert(size >= 0); + PyObject *res = PyTuple_New(size); + if (res == NULL) { + return NULL; + } + _Py_DECLARE_STR(anon_null, ""); + for(int i = 0; i < size; i++) { + PyObject *item = values->values[i]; + if (item == NULL) { + item = &_Py_STR(anon_null); + } + else { + Py_INCREF(item); + } + PyTuple_SET_ITEM(res, i, item); + } + return res; +} + + static PyMethodDef module_functions[] = { {"get_configs", get_configs, METH_NOARGS}, {"get_recursion_depth", get_recursion_depth, METH_NOARGS}, @@ -1502,6 +1622,14 @@ static PyMethodDef module_functions[] = { {"test_tstate_capi", test_tstate_capi, METH_NOARGS, NULL}, {"_PyUnicode_TransformDecimalAndSpaceToASCII", unicode_transformdecimalandspacetoascii, METH_O}, {"test_atexit", test_atexit, METH_NOARGS}, + {"check_pyobject_forbidden_bytes_is_freed", + check_pyobject_forbidden_bytes_is_freed, METH_NOARGS}, + {"check_pyobject_freed_is_freed", check_pyobject_freed_is_freed, METH_NOARGS}, + {"check_pyobject_null_is_freed", check_pyobject_null_is_freed, METH_NOARGS}, + {"check_pyobject_uninitialized_is_freed", + check_pyobject_uninitialized_is_freed, METH_NOARGS}, + {"pymem_getallocatorsname", test_pymem_getallocatorsname, METH_NOARGS}, + {"get_object_dict_values", get_object_dict_values, METH_O}, {NULL, NULL} /* sentinel */ }; diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c index ca71b6156b005d..fdef06168bfc86 100644 --- a/Modules/_testmultiphase.c +++ b/Modules/_testmultiphase.c @@ -383,32 +383,20 @@ static int execfunc(PyObject *m) /* Add a custom type */ temp = PyType_FromSpec(&Example_Type_spec); - if (temp == NULL) { - goto fail; - } - if (PyModule_AddObject(m, "Example", temp) != 0) { - Py_DECREF(temp); + if (PyModule_Add(m, "Example", temp) != 0) { goto fail; } /* Add an exception type */ temp = PyErr_NewException("_testimportexec.error", NULL, NULL); - if (temp == NULL) { - goto fail; - } - if (PyModule_AddObject(m, "error", temp) != 0) { - Py_DECREF(temp); + if (PyModule_Add(m, "error", temp) != 0) { goto fail; } /* Add Str */ temp = PyType_FromSpec(&Str_Type_spec); - if (temp == NULL) { - goto fail; - } - if (PyModule_AddObject(m, "Str", temp) != 0) { - Py_DECREF(temp); + if (PyModule_Add(m, "Str", temp) != 0) { goto fail; } @@ -857,11 +845,7 @@ meth_state_access_exec(PyObject *m) } temp = PyType_FromModuleAndSpec(m, &StateAccessType_spec, NULL); - if (temp == NULL) { - return -1; - } - if (PyModule_AddObject(m, "StateAccessType", temp) != 0) { - Py_DECREF(temp); + if (PyModule_Add(m, "StateAccessType", temp) != 0) { return -1; } diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index d8a797f34dbc4b..52f44d04523459 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -9,7 +9,7 @@ #include "pycore_pystate.h" // _PyThreadState_SetCurrent() #include "pycore_weakref.h" // _PyWeakref_GET_REF() #include // offsetof() -#include "structmember.h" // PyMemberDef + #ifdef HAVE_SIGNAL_H # include // SIGINT @@ -293,7 +293,7 @@ unlock it. A thread attempting to lock a lock that it has already locked\n\ will block until another thread unlocks it. Deadlocks may ensue."); static PyMemberDef lock_type_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(lockobject, in_weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(lockobject, in_weakreflist), Py_READONLY}, {NULL}, }; @@ -575,7 +575,7 @@ static PyMethodDef rlock_methods[] = { static PyMemberDef rlock_type_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(rlockobject, in_weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(rlockobject, in_weakreflist), Py_READONLY}, {NULL}, }; @@ -679,7 +679,7 @@ localdummy_dealloc(localdummyobject *self) } static PyMemberDef local_dummy_type_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(localdummyobject, weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(localdummyobject, weakreflist), Py_READONLY}, {NULL}, }; @@ -959,7 +959,7 @@ local_setattro(localobject *self, PyObject *name, PyObject *v) static PyObject *local_getattro(localobject *, PyObject *); static PyMemberDef local_type_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(localobject, weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(localobject, weakreflist), Py_READONLY}, {NULL}, }; diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c index 76af803bd6eefb..663b4117683629 100644 --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -317,7 +317,6 @@ typedef struct { const Tcl_ObjType *WideIntType; const Tcl_ObjType *BignumType; const Tcl_ObjType *ListType; - const Tcl_ObjType *ProcBodyType; const Tcl_ObjType *StringType; } TkappObject; @@ -595,7 +594,6 @@ Tkapp_New(const char *screenName, const char *className, v->WideIntType = Tcl_GetObjType("wideInt"); v->BignumType = Tcl_GetObjType("bignum"); v->ListType = Tcl_GetObjType("list"); - v->ProcBodyType = Tcl_GetObjType("procbody"); v->StringType = Tcl_GetObjType("string"); /* Delete the 'exit' command, which can screw things up */ @@ -874,8 +872,9 @@ asBignumObj(PyObject *value) return NULL; } hexchars += neg + 2; /* skip sign and "0x" */ - mp_init(&bigValue); - if (mp_read_radix(&bigValue, hexchars, 16) != MP_OKAY) { + if (mp_init(&bigValue) != MP_OKAY || + mp_read_radix(&bigValue, hexchars, 16) != MP_OKAY) + { mp_clear(&bigValue); Py_DECREF(hexstr); PyErr_NoMemory(); @@ -912,16 +911,13 @@ AsObj(PyObject *value) if (PyLong_CheckExact(value)) { int overflow; long longValue; -#ifdef TCL_WIDE_INT_TYPE Tcl_WideInt wideValue; -#endif longValue = PyLong_AsLongAndOverflow(value, &overflow); if (!overflow) { return Tcl_NewLongObj(longValue); } /* If there is an overflow in the long conversion, fall through to wideInt handling. */ -#ifdef TCL_WIDE_INT_TYPE if (_PyLong_AsByteArray((PyLongObject *)value, (unsigned char *)(void *)&wideValue, sizeof(wideValue), @@ -930,7 +926,6 @@ AsObj(PyObject *value) return Tcl_NewWideIntObj(wideValue); } PyErr_Clear(); -#endif /* If there is an overflow in the wideInt conversion, fall through to bignum handling. */ return asBignumObj(value); @@ -1174,10 +1169,6 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value) return result; } - if (value->typePtr == tkapp->ProcBodyType) { - /* fall through: return tcl object. */ - } - if (value->typePtr == tkapp->StringType) { return unicodeFromTclObj(value); } @@ -3197,7 +3188,7 @@ static struct PyModuleDef _tkintermodule = { PyMODINIT_FUNC PyInit__tkinter(void) { - PyObject *m, *uexe, *cexe, *o; + PyObject *m, *uexe, *cexe; tcl_lock = PyThread_allocate_lock(); if (tcl_lock == NULL) @@ -3207,17 +3198,11 @@ PyInit__tkinter(void) if (m == NULL) return NULL; - o = PyErr_NewException("_tkinter.TclError", NULL, NULL); - if (o == NULL) { + Tkinter_TclError = PyErr_NewException("_tkinter.TclError", NULL, NULL); + if (PyModule_AddObjectRef(m, "TclError", Tkinter_TclError)) { Py_DECREF(m); return NULL; } - if (PyModule_AddObject(m, "TclError", Py_NewRef(o))) { - Py_DECREF(o); - Py_DECREF(m); - return NULL; - } - Tkinter_TclError = o; if (PyModule_AddIntConstant(m, "READABLE", TCL_READABLE)) { Py_DECREF(m); @@ -3264,41 +3249,23 @@ PyInit__tkinter(void) return NULL; } - o = PyType_FromSpec(&Tkapp_Type_spec); - if (o == NULL) { + Tkapp_Type = PyType_FromSpec(&Tkapp_Type_spec); + if (PyModule_AddObjectRef(m, "TkappType", Tkapp_Type)) { Py_DECREF(m); return NULL; } - if (PyModule_AddObject(m, "TkappType", o)) { - Py_DECREF(o); - Py_DECREF(m); - return NULL; - } - Tkapp_Type = o; - o = PyType_FromSpec(&Tktt_Type_spec); - if (o == NULL) { - Py_DECREF(m); - return NULL; - } - if (PyModule_AddObject(m, "TkttType", o)) { - Py_DECREF(o); + Tktt_Type = PyType_FromSpec(&Tktt_Type_spec); + if (PyModule_AddObjectRef(m, "TkttType", Tktt_Type)) { Py_DECREF(m); return NULL; } - Tktt_Type = o; - o = PyType_FromSpec(&PyTclObject_Type_spec); - if (o == NULL) { - Py_DECREF(m); - return NULL; - } - if (PyModule_AddObject(m, "Tcl_Obj", o)) { - Py_DECREF(o); + PyTclObject_Type = PyType_FromSpec(&PyTclObject_Type_spec); + if (PyModule_AddObjectRef(m, "Tcl_Obj", PyTclObject_Type)) { Py_DECREF(m); return NULL; } - PyTclObject_Type = o; /* This helps the dynamic loader; in Unicode aware Tcl versions diff --git a/Modules/_weakref.c b/Modules/_weakref.c index b5d80cbd731a28..4e2862e7467c3d 100644 --- a/Modules/_weakref.c +++ b/Modules/_weakref.c @@ -1,4 +1,5 @@ #include "Python.h" +#include "pycore_dict.h" // _PyDict_DelItemIf() #include "pycore_object.h" // _PyObject_GET_WEAKREFS_LISTPTR #include "pycore_weakref.h" // _PyWeakref_IS_DEAD() @@ -143,27 +144,19 @@ weakref_functions[] = { static int weakref_exec(PyObject *module) { - Py_INCREF(&_PyWeakref_RefType); - if (PyModule_AddObject(module, "ref", (PyObject *) &_PyWeakref_RefType) < 0) { - Py_DECREF(&_PyWeakref_RefType); + if (PyModule_AddObjectRef(module, "ref", (PyObject *) &_PyWeakref_RefType) < 0) { return -1; } - Py_INCREF(&_PyWeakref_RefType); - if (PyModule_AddObject(module, "ReferenceType", + if (PyModule_AddObjectRef(module, "ReferenceType", (PyObject *) &_PyWeakref_RefType) < 0) { - Py_DECREF(&_PyWeakref_RefType); return -1; } - Py_INCREF(&_PyWeakref_ProxyType); - if (PyModule_AddObject(module, "ProxyType", + if (PyModule_AddObjectRef(module, "ProxyType", (PyObject *) &_PyWeakref_ProxyType) < 0) { - Py_DECREF(&_PyWeakref_ProxyType); return -1; } - Py_INCREF(&_PyWeakref_CallableProxyType); - if (PyModule_AddObject(module, "CallableProxyType", + if (PyModule_AddObjectRef(module, "CallableProxyType", (PyObject *) &_PyWeakref_CallableProxyType) < 0) { - Py_DECREF(&_PyWeakref_CallableProxyType); return -1; } diff --git a/Modules/_winapi.c b/Modules/_winapi.c index 313c12a34c6725..eec33499b983fe 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -38,7 +38,7 @@ #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_pylifecycle.h" // _Py_IsInterpreterFinalizing() #include "pycore_pystate.h" // _PyInterpreterState_GET -#include "structmember.h" // PyMemberDef + #ifndef WINDOWS_LEAN_AND_MEAN @@ -54,13 +54,13 @@ PyLong_FromUnsignedLong((unsigned long) handle) #define PYNUM_TO_HANDLE(obj) ((HANDLE)PyLong_AsUnsignedLong(obj)) #define F_POINTER "k" -#define T_POINTER T_ULONG +#define T_POINTER Py_T_ULONG #else #define HANDLE_TO_PYNUM(handle) \ PyLong_FromUnsignedLongLong((unsigned long long) handle) #define PYNUM_TO_HANDLE(obj) ((HANDLE)PyLong_AsUnsignedLongLong(obj)) #define F_POINTER "K" -#define T_POINTER T_ULONGLONG +#define T_POINTER Py_T_ULONGLONG #endif #define F_HANDLE F_POINTER @@ -322,7 +322,7 @@ static PyMethodDef overlapped_methods[] = { static PyMemberDef overlapped_members[] = { {"event", T_HANDLE, offsetof(OverlappedObject, overlapped) + offsetof(OVERLAPPED, hEvent), - READONLY, "overlapped event handle"}, + Py_READONLY, "overlapped event handle"}, {NULL} }; @@ -1539,40 +1539,56 @@ _winapi.LCMapStringEx locale: LPCWSTR flags: DWORD - src: LPCWSTR + src: unicode [clinic start generated code]*/ static PyObject * _winapi_LCMapStringEx_impl(PyObject *module, LPCWSTR locale, DWORD flags, - LPCWSTR src) -/*[clinic end generated code: output=cf4713d80e2b47c9 input=9fe26f95d5ab0001]*/ + PyObject *src) +/*[clinic end generated code: output=b90e6b26e028ff0a input=3e3dcd9b8164012f]*/ { if (flags & (LCMAP_SORTHANDLE | LCMAP_HASH | LCMAP_BYTEREV | LCMAP_SORTKEY)) { return PyErr_Format(PyExc_ValueError, "unsupported flags"); } - int dest_size = LCMapStringEx(locale, flags, src, -1, NULL, 0, + Py_ssize_t src_size; + wchar_t *src_ = PyUnicode_AsWideCharString(src, &src_size); + if (!src_) { + return NULL; + } + if (src_size > INT_MAX) { + PyMem_Free(src_); + PyErr_SetString(PyExc_OverflowError, "input string is too long"); + return NULL; + } + + int dest_size = LCMapStringEx(locale, flags, src_, (int)src_size, NULL, 0, NULL, NULL, 0); - if (dest_size == 0) { - return PyErr_SetFromWindowsErr(0); + if (dest_size <= 0) { + DWORD error = GetLastError(); + PyMem_Free(src_); + return PyErr_SetFromWindowsErr(error); } wchar_t* dest = PyMem_NEW(wchar_t, dest_size); if (dest == NULL) { + PyMem_Free(src_); return PyErr_NoMemory(); } - int nmapped = LCMapStringEx(locale, flags, src, -1, dest, dest_size, + int nmapped = LCMapStringEx(locale, flags, src_, (int)src_size, dest, dest_size, NULL, NULL, 0); - if (nmapped == 0) { + if (nmapped <= 0) { DWORD error = GetLastError(); + PyMem_Free(src_); PyMem_DEL(dest); return PyErr_SetFromWindowsErr(error); } - PyObject *ret = PyUnicode_FromWideChar(dest, dest_size - 1); + PyMem_Free(src_); + PyObject *ret = PyUnicode_FromWideChar(dest, nmapped); PyMem_DEL(dest); return ret; diff --git a/Modules/_xxinterpchannelsmodule.c b/Modules/_xxinterpchannelsmodule.c index 82472555ec7d62..1e418414767db8 100644 --- a/Modules/_xxinterpchannelsmodule.c +++ b/Modules/_xxinterpchannelsmodule.c @@ -1,13 +1,8 @@ /* interpreters module */ /* low-level access to interpreter primitives */ -#ifndef Py_BUILD_CORE_BUILTIN -# define Py_BUILD_CORE_MODULE 1 -#endif - #include "Python.h" #include "interpreteridobject.h" -#include "pycore_atexit.h" // _Py_AtExit() /* @@ -2140,7 +2135,7 @@ channel_list_interpreters(PyObject *self, PyObject *args, PyObject *kwds) goto except; } if (res) { - id_obj = _PyInterpreterState_GetIDObject(interp); + id_obj = PyInterpreterState_GetIDObject(interp); if (id_obj == NULL) { goto except; } @@ -2407,7 +2402,7 @@ module_exec(PyObject *mod) // Make sure chnnels drop objects owned by this interpreter PyInterpreterState *interp = _get_current_interp(); - _Py_AtExit(interp, clear_interpreter, (void *)interp); + PyUnstable_AtExit(interp, clear_interpreter, (void *)interp); return 0; diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index d2e0593872c5f0..31373f8fdf8c71 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -1,12 +1,7 @@ /* interpreters module */ /* low-level access to interpreter primitives */ -#ifndef Py_BUILD_CORE_BUILTIN -# define Py_BUILD_CORE_MODULE 1 -#endif - #include "Python.h" -#include "pycore_interp.h" // _PyInterpreterState_GetMainModule() #include "interpreteridobject.h" @@ -400,7 +395,7 @@ _run_script(PyInterpreterState *interp, const char *codestr, _sharedns *shared, _sharedexception *sharedexc) { PyObject *excval = NULL; - PyObject *main_mod = _PyInterpreterState_GetMainModule(interp); + PyObject *main_mod = PyUnstable_InterpreterState_GetMainModule(interp); if (main_mod == NULL) { goto error; } @@ -531,7 +526,7 @@ interp_create(PyObject *self, PyObject *args, PyObject *kwds) } assert(tstate != NULL); PyInterpreterState *interp = PyThreadState_GetInterpreter(tstate); - PyObject *idobj = _PyInterpreterState_GetIDObject(interp); + PyObject *idobj = PyInterpreterState_GetIDObject(interp); if (idobj == NULL) { // XXX Possible GILState issues? save_tstate = PyThreadState_Swap(tstate); @@ -561,7 +556,7 @@ interp_destroy(PyObject *self, PyObject *args, PyObject *kwds) } // Look up the interpreter. - PyInterpreterState *interp = _PyInterpreterID_LookUp(id); + PyInterpreterState *interp = PyInterpreterID_LookUp(id); if (interp == NULL) { return NULL; } @@ -616,7 +611,7 @@ interp_list_all(PyObject *self, PyObject *Py_UNUSED(ignored)) interp = PyInterpreterState_Head(); while (interp != NULL) { - id = _PyInterpreterState_GetIDObject(interp); + id = PyInterpreterState_GetIDObject(interp); if (id == NULL) { Py_DECREF(ids); return NULL; @@ -648,7 +643,7 @@ interp_get_current(PyObject *self, PyObject *Py_UNUSED(ignored)) if (interp == NULL) { return NULL; } - return _PyInterpreterState_GetIDObject(interp); + return PyInterpreterState_GetIDObject(interp); } PyDoc_STRVAR(get_current_doc, @@ -662,7 +657,7 @@ interp_get_main(PyObject *self, PyObject *Py_UNUSED(ignored)) { // Currently, 0 is always the main interpreter. int64_t id = 0; - return _PyInterpreterID_New(id); + return PyInterpreterID_New(id); } PyDoc_STRVAR(get_main_doc, @@ -684,7 +679,7 @@ interp_run_string(PyObject *self, PyObject *args, PyObject *kwds) } // Look up the interpreter. - PyInterpreterState *interp = _PyInterpreterID_LookUp(id); + PyInterpreterState *interp = PyInterpreterID_LookUp(id); if (interp == NULL) { return NULL; } @@ -750,7 +745,7 @@ interp_is_running(PyObject *self, PyObject *args, PyObject *kwds) return NULL; } - PyInterpreterState *interp = _PyInterpreterID_LookUp(id); + PyInterpreterState *interp = PyInterpreterID_LookUp(id); if (interp == NULL) { return NULL; } @@ -808,7 +803,7 @@ module_exec(PyObject *mod) } // PyInterpreterID - if (PyModule_AddType(mod, &_PyInterpreterID_Type) < 0) { + if (PyModule_AddType(mod, &PyInterpreterID_Type) < 0) { goto error; } diff --git a/Modules/_xxtestfuzz/fuzzer.c b/Modules/_xxtestfuzz/fuzzer.c index 37d402824853f0..54f8a42273401f 100644 --- a/Modules/_xxtestfuzz/fuzzer.c +++ b/Modules/_xxtestfuzz/fuzzer.c @@ -10,7 +10,12 @@ See the source code for LLVMFuzzerTestOneInput for details. */ +#ifndef Py_BUILD_CORE +# define Py_BUILD_CORE 1 +#endif + #include +#include "pycore_pyhash.h" // _Py_HashBytes() #include #include diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c index 0ced9d08b9eb75..fb0b4b40b2ad5d 100644 --- a/Modules/_zoneinfo.c +++ b/Modules/_zoneinfo.c @@ -4,7 +4,7 @@ #include "Python.h" #include "pycore_long.h" // _PyLong_GetOne() -#include "structmember.h" + #include #include @@ -2692,13 +2692,13 @@ static PyMethodDef zoneinfo_methods[] = { static PyMemberDef zoneinfo_members[] = { {.name = "key", .offset = offsetof(PyZoneInfo_ZoneInfo, key), - .type = T_OBJECT_EX, - .flags = READONLY, + .type = Py_T_OBJECT_EX, + .flags = Py_READONLY, .doc = NULL}, {.name = "__weaklistoffset__", .offset = offsetof(PyZoneInfo_ZoneInfo, weakreflist), - .type = T_PYSSIZET, - .flags = READONLY}, + .type = Py_T_PYSSIZET, + .flags = Py_READONLY}, {NULL}, /* Sentinel */ }; diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 0000a8d637eb56..a40a5b75b63d4f 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -11,7 +11,7 @@ #include "pycore_call.h" // _PyObject_CallMethod() #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_bytesobject.h" // _PyBytes_Repeat -#include "structmember.h" // PyMemberDef + #include // offsetof() #include @@ -2895,7 +2895,7 @@ itemsize -- the length in bytes of one array item\n\ static PyObject *array_iter(arrayobject *ao); static struct PyMemberDef array_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(arrayobject, weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(arrayobject, weakreflist), Py_READONLY}, {NULL}, }; @@ -3163,9 +3163,8 @@ array_modexec(PyObject *m) CREATE_TYPE(m, state->ArrayIterType, &arrayiter_spec); Py_SET_TYPE(state->ArrayIterType, &PyType_Type); - if (PyModule_AddObject(m, "ArrayType", - Py_NewRef((PyObject *)state->ArrayType)) < 0) { - Py_DECREF((PyObject *)state->ArrayType); + if (PyModule_AddObjectRef(m, "ArrayType", + (PyObject *)state->ArrayType) < 0) { return -1; } @@ -3193,8 +3192,7 @@ array_modexec(PyObject *m) *p++ = (char)descr->typecode; } typecodes = PyUnicode_DecodeASCII(buffer, p - buffer, NULL); - if (PyModule_AddObject(m, "typecodes", typecodes) < 0) { - Py_XDECREF(typecodes); + if (PyModule_Add(m, "typecodes", typecodes) < 0) { return -1; } diff --git a/Modules/atexitmodule.c b/Modules/atexitmodule.c index 5882d405636400..cec177cfc2f9c8 100644 --- a/Modules/atexitmodule.c +++ b/Modules/atexitmodule.c @@ -24,8 +24,8 @@ get_atexit_state(void) int -_Py_AtExit(PyInterpreterState *interp, - atexit_datacallbackfunc func, void *data) +PyUnstable_AtExit(PyInterpreterState *interp, + atexit_datacallbackfunc func, void *data) { assert(interp == _PyInterpreterState_GET()); atexit_callback *callback = PyMem_Malloc(sizeof(atexit_callback)); diff --git a/Modules/binascii.c b/Modules/binascii.c index cf9328795c2bcc..a87a2ef2e89927 100644 --- a/Modules/binascii.c +++ b/Modules/binascii.c @@ -1253,32 +1253,20 @@ static struct PyMethodDef binascii_module_methods[] = { PyDoc_STRVAR(doc_binascii, "Conversion between binary data and ASCII"); static int -binascii_exec(PyObject *module) { - int result; +binascii_exec(PyObject *module) +{ binascii_state *state = PyModule_GetState(module); if (state == NULL) { return -1; } state->Error = PyErr_NewException("binascii.Error", PyExc_ValueError, NULL); - if (state->Error == NULL) { - return -1; - } - Py_INCREF(state->Error); - result = PyModule_AddObject(module, "Error", state->Error); - if (result == -1) { - Py_DECREF(state->Error); + if (PyModule_AddObjectRef(module, "Error", state->Error) < 0) { return -1; } state->Incomplete = PyErr_NewException("binascii.Incomplete", NULL, NULL); - if (state->Incomplete == NULL) { - return -1; - } - Py_INCREF(state->Incomplete); - result = PyModule_AddObject(module, "Incomplete", state->Incomplete); - if (result == -1) { - Py_DECREF(state->Incomplete); + if (PyModule_AddObjectRef(module, "Incomplete", state->Incomplete) < 0) { return -1; } diff --git a/Modules/cjkcodecs/cjkcodecs.h b/Modules/cjkcodecs/cjkcodecs.h index ee588785e7403f..766f82983025e4 100644 --- a/Modules/cjkcodecs/cjkcodecs.h +++ b/Modules/cjkcodecs/cjkcodecs.h @@ -398,11 +398,7 @@ register_maps(PyObject *module) strcpy(mhname + sizeof("__map_") - 1, h->charset); PyObject *capsule = PyCapsule_New((void *)h, MAP_CAPSULE, NULL); - if (capsule == NULL) { - return -1; - } - if (PyModule_AddObject(module, mhname, capsule) < 0) { - Py_DECREF(capsule); + if (PyModule_Add(module, mhname, capsule) < 0) { return -1; } } diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c index 3febd1a832f9cc..5d3c16a98423ba 100644 --- a/Modules/cjkcodecs/multibytecodec.c +++ b/Modules/cjkcodecs/multibytecodec.c @@ -9,10 +9,12 @@ #endif #include "Python.h" -#include "structmember.h" // PyMemberDef + #include "multibytecodec.h" #include "clinic/multibytecodec.c.h" +#include // offsetof() + #define MODULE_NAME "_multibytecodec" typedef struct { @@ -1611,9 +1613,9 @@ static struct PyMethodDef mbstreamreader_methods[] = { }; static PyMemberDef mbstreamreader_members[] = { - {"stream", T_OBJECT, + {"stream", _Py_T_OBJECT, offsetof(MultibyteStreamReaderObject, stream), - READONLY, NULL}, + Py_READONLY, NULL}, {NULL,} }; @@ -1919,9 +1921,9 @@ static struct PyMethodDef mbstreamwriter_methods[] = { }; static PyMemberDef mbstreamwriter_members[] = { - {"stream", T_OBJECT, + {"stream", _Py_T_OBJECT, offsetof(MultibyteStreamWriterObject, stream), - READONLY, NULL}, + Py_READONLY, NULL}, {NULL,} }; diff --git a/Modules/clinic/_dbmmodule.c.h b/Modules/clinic/_dbmmodule.c.h index 172dc4b9d5793e..98aac07423c8ab 100644 --- a/Modules/clinic/_dbmmodule.c.h +++ b/Modules/clinic/_dbmmodule.c.h @@ -138,6 +138,28 @@ _dbm_dbm_setdefault(dbmobject *self, PyTypeObject *cls, PyObject *const *args, P return return_value; } +PyDoc_STRVAR(_dbm_dbm_clear__doc__, +"clear($self, /)\n" +"--\n" +"\n" +"Remove all items from the database."); + +#define _DBM_DBM_CLEAR_METHODDEF \ + {"clear", _PyCFunction_CAST(_dbm_dbm_clear), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _dbm_dbm_clear__doc__}, + +static PyObject * +_dbm_dbm_clear_impl(dbmobject *self, PyTypeObject *cls); + +static PyObject * +_dbm_dbm_clear(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + if (nargs) { + PyErr_SetString(PyExc_TypeError, "clear() takes no arguments"); + return NULL; + } + return _dbm_dbm_clear_impl(self, cls); +} + PyDoc_STRVAR(dbmopen__doc__, "open($module, filename, flags=\'r\', mode=0o666, /)\n" "--\n" @@ -200,4 +222,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=28dcf736654137c2 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=b3053c67ecfcc29c input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_gdbmmodule.c.h b/Modules/clinic/_gdbmmodule.c.h index 5c6aeeee7789f7..76f6db318f8cc5 100644 --- a/Modules/clinic/_gdbmmodule.c.h +++ b/Modules/clinic/_gdbmmodule.c.h @@ -247,6 +247,28 @@ _gdbm_gdbm_sync(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_s return _gdbm_gdbm_sync_impl(self, cls); } +PyDoc_STRVAR(_gdbm_gdbm_clear__doc__, +"clear($self, /)\n" +"--\n" +"\n" +"Remove all items from the database."); + +#define _GDBM_GDBM_CLEAR_METHODDEF \ + {"clear", _PyCFunction_CAST(_gdbm_gdbm_clear), METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _gdbm_gdbm_clear__doc__}, + +static PyObject * +_gdbm_gdbm_clear_impl(gdbmobject *self, PyTypeObject *cls); + +static PyObject * +_gdbm_gdbm_clear(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + if (nargs) { + PyErr_SetString(PyExc_TypeError, "clear() takes no arguments"); + return NULL; + } + return _gdbm_gdbm_clear_impl(self, cls); +} + PyDoc_STRVAR(dbmopen__doc__, "open($module, filename, flags=\'r\', mode=0o666, /)\n" "--\n" @@ -322,4 +344,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=c6e721d82335adb3 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8c613cbd88e57480 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_opcode.c.h b/Modules/clinic/_opcode.c.h index e6381fa73a5506..e1fc5ba17f7078 100644 --- a/Modules/clinic/_opcode.c.h +++ b/Modules/clinic/_opcode.c.h @@ -612,4 +612,40 @@ _opcode_get_specialization_stats(PyObject *module, PyObject *Py_UNUSED(ignored)) { return _opcode_get_specialization_stats_impl(module); } -/*[clinic end generated code: output=e507bf14fb2796f8 input=a9049054013a1b77]*/ + +PyDoc_STRVAR(_opcode_get_intrinsic1_descs__doc__, +"get_intrinsic1_descs($module, /)\n" +"--\n" +"\n" +"Return a list of names of the unary intrinsics."); + +#define _OPCODE_GET_INTRINSIC1_DESCS_METHODDEF \ + {"get_intrinsic1_descs", (PyCFunction)_opcode_get_intrinsic1_descs, METH_NOARGS, _opcode_get_intrinsic1_descs__doc__}, + +static PyObject * +_opcode_get_intrinsic1_descs_impl(PyObject *module); + +static PyObject * +_opcode_get_intrinsic1_descs(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return _opcode_get_intrinsic1_descs_impl(module); +} + +PyDoc_STRVAR(_opcode_get_intrinsic2_descs__doc__, +"get_intrinsic2_descs($module, /)\n" +"--\n" +"\n" +"Return a list of names of the binary intrinsics."); + +#define _OPCODE_GET_INTRINSIC2_DESCS_METHODDEF \ + {"get_intrinsic2_descs", (PyCFunction)_opcode_get_intrinsic2_descs, METH_NOARGS, _opcode_get_intrinsic2_descs__doc__}, + +static PyObject * +_opcode_get_intrinsic2_descs_impl(PyObject *module); + +static PyObject * +_opcode_get_intrinsic2_descs(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return _opcode_get_intrinsic2_descs_impl(module); +} +/*[clinic end generated code: output=d85de5f2887b3661 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testclinic_depr_star.c.h b/Modules/clinic/_testclinic_depr_star.c.h new file mode 100644 index 00000000000000..0c2fa088268c6d --- /dev/null +++ b/Modules/clinic/_testclinic_depr_star.c.h @@ -0,0 +1,974 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) +# include "pycore_gc.h" // PyGC_Head +# include "pycore_runtime.h" // _Py_ID() +#endif + + +PyDoc_STRVAR(depr_star_new__doc__, +"DeprStarNew(a)\n" +"--\n" +"\n" +"The deprecation message should use the class name instead of __new__.\n" +"\n" +"Note: Passing positional arguments to _testclinic.DeprStarNew() is\n" +"deprecated. Parameter \'a\' will become a keyword-only parameter in\n" +"Python 3.14.\n" +""); + +static PyObject * +depr_star_new_impl(PyTypeObject *type, PyObject *a); + +static PyObject * +depr_star_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "DeprStarNew", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject * const *fastargs; + Py_ssize_t nargs = PyTuple_GET_SIZE(args); + PyObject *a; + + // Emit compiler warnings when we get to Python 3.14. + #if PY_VERSION_HEX >= 0x030e00C0 + # error \ + "In _testclinic.c, update parameter(s) 'a' in the clinic input of" \ + " '_testclinic.DeprStarNew.__new__' to be keyword-only." + #elif PY_VERSION_HEX >= 0x030e00A0 + # ifdef _MSC_VER + # pragma message ( \ + "In _testclinic.c, update parameter(s) 'a' in the clinic input of" \ + " '_testclinic.DeprStarNew.__new__' to be keyword-only.") + # else + # warning \ + "In _testclinic.c, update parameter(s) 'a' in the clinic input of" \ + " '_testclinic.DeprStarNew.__new__' to be keyword-only." + # endif + #endif + if (nargs == 1) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Passing positional arguments to _testclinic.DeprStarNew() is " + "deprecated. Parameter 'a' will become a keyword-only parameter " + "in Python 3.14.", 1)) + { + goto exit; + } + } + fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 1, 0, argsbuf); + if (!fastargs) { + goto exit; + } + a = fastargs[0]; + return_value = depr_star_new_impl(type, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(depr_star_init__doc__, +"DeprStarInit(a)\n" +"--\n" +"\n" +"The deprecation message should use the class name instead of __init__.\n" +"\n" +"Note: Passing positional arguments to _testclinic.DeprStarInit() is\n" +"deprecated. Parameter \'a\' will become a keyword-only parameter in\n" +"Python 3.14.\n" +""); + +static int +depr_star_init_impl(PyObject *self, PyObject *a); + +static int +depr_star_init(PyObject *self, PyObject *args, PyObject *kwargs) +{ + int return_value = -1; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "DeprStarInit", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject * const *fastargs; + Py_ssize_t nargs = PyTuple_GET_SIZE(args); + PyObject *a; + + // Emit compiler warnings when we get to Python 3.14. + #if PY_VERSION_HEX >= 0x030e00C0 + # error \ + "In _testclinic.c, update parameter(s) 'a' in the clinic input of" \ + " '_testclinic.DeprStarInit.__init__' to be keyword-only." + #elif PY_VERSION_HEX >= 0x030e00A0 + # ifdef _MSC_VER + # pragma message ( \ + "In _testclinic.c, update parameter(s) 'a' in the clinic input of" \ + " '_testclinic.DeprStarInit.__init__' to be keyword-only.") + # else + # warning \ + "In _testclinic.c, update parameter(s) 'a' in the clinic input of" \ + " '_testclinic.DeprStarInit.__init__' to be keyword-only." + # endif + #endif + if (nargs == 1) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Passing positional arguments to _testclinic.DeprStarInit() is " + "deprecated. Parameter 'a' will become a keyword-only parameter " + "in Python 3.14.", 1)) + { + goto exit; + } + } + fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser, 1, 1, 0, argsbuf); + if (!fastargs) { + goto exit; + } + a = fastargs[0]; + return_value = depr_star_init_impl(self, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(depr_star_pos0_len1__doc__, +"depr_star_pos0_len1($module, /, a)\n" +"--\n" +"\n" +"Note: Passing positional arguments to depr_star_pos0_len1() is\n" +"deprecated. Parameter \'a\' will become a keyword-only parameter in\n" +"Python 3.14.\n" +""); + +#define DEPR_STAR_POS0_LEN1_METHODDEF \ + {"depr_star_pos0_len1", _PyCFunction_CAST(depr_star_pos0_len1), METH_FASTCALL|METH_KEYWORDS, depr_star_pos0_len1__doc__}, + +static PyObject * +depr_star_pos0_len1_impl(PyObject *module, PyObject *a); + +static PyObject * +depr_star_pos0_len1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "depr_star_pos0_len1", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[1]; + PyObject *a; + + // Emit compiler warnings when we get to Python 3.14. + #if PY_VERSION_HEX >= 0x030e00C0 + # error \ + "In _testclinic.c, update parameter(s) 'a' in the clinic input of" \ + " 'depr_star_pos0_len1' to be keyword-only." + #elif PY_VERSION_HEX >= 0x030e00A0 + # ifdef _MSC_VER + # pragma message ( \ + "In _testclinic.c, update parameter(s) 'a' in the clinic input of" \ + " 'depr_star_pos0_len1' to be keyword-only.") + # else + # warning \ + "In _testclinic.c, update parameter(s) 'a' in the clinic input of" \ + " 'depr_star_pos0_len1' to be keyword-only." + # endif + #endif + if (nargs == 1) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Passing positional arguments to depr_star_pos0_len1() is " + "deprecated. Parameter 'a' will become a keyword-only parameter " + "in Python 3.14.", 1)) + { + goto exit; + } + } + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + return_value = depr_star_pos0_len1_impl(module, a); + +exit: + return return_value; +} + +PyDoc_STRVAR(depr_star_pos0_len2__doc__, +"depr_star_pos0_len2($module, /, a, b)\n" +"--\n" +"\n" +"Note: Passing positional arguments to depr_star_pos0_len2() is\n" +"deprecated. Parameters \'a\' and \'b\' will become keyword-only parameters\n" +"in Python 3.14.\n" +""); + +#define DEPR_STAR_POS0_LEN2_METHODDEF \ + {"depr_star_pos0_len2", _PyCFunction_CAST(depr_star_pos0_len2), METH_FASTCALL|METH_KEYWORDS, depr_star_pos0_len2__doc__}, + +static PyObject * +depr_star_pos0_len2_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +depr_star_pos0_len2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "depr_star_pos0_len2", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *b; + + // Emit compiler warnings when we get to Python 3.14. + #if PY_VERSION_HEX >= 0x030e00C0 + # error \ + "In _testclinic.c, update parameter(s) 'a' and 'b' in the clinic " \ + "input of 'depr_star_pos0_len2' to be keyword-only." + #elif PY_VERSION_HEX >= 0x030e00A0 + # ifdef _MSC_VER + # pragma message ( \ + "In _testclinic.c, update parameter(s) 'a' and 'b' in the clinic " \ + "input of 'depr_star_pos0_len2' to be keyword-only.") + # else + # warning \ + "In _testclinic.c, update parameter(s) 'a' and 'b' in the clinic " \ + "input of 'depr_star_pos0_len2' to be keyword-only." + # endif + #endif + if (nargs > 0 && nargs <= 2) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Passing positional arguments to depr_star_pos0_len2() is " + "deprecated. Parameters 'a' and 'b' will become keyword-only " + "parameters in Python 3.14.", 1)) + { + goto exit; + } + } + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + return_value = depr_star_pos0_len2_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(depr_star_pos0_len3_with_kwd__doc__, +"depr_star_pos0_len3_with_kwd($module, /, a, b, c, *, d)\n" +"--\n" +"\n" +"Note: Passing positional arguments to depr_star_pos0_len3_with_kwd()\n" +"is deprecated. Parameters \'a\', \'b\' and \'c\' will become keyword-only\n" +"parameters in Python 3.14.\n" +""); + +#define DEPR_STAR_POS0_LEN3_WITH_KWD_METHODDEF \ + {"depr_star_pos0_len3_with_kwd", _PyCFunction_CAST(depr_star_pos0_len3_with_kwd), METH_FASTCALL|METH_KEYWORDS, depr_star_pos0_len3_with_kwd__doc__}, + +static PyObject * +depr_star_pos0_len3_with_kwd_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +depr_star_pos0_len3_with_kwd(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 4 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "depr_star_pos0_len3_with_kwd", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + PyObject *a; + PyObject *b; + PyObject *c; + PyObject *d; + + // Emit compiler warnings when we get to Python 3.14. + #if PY_VERSION_HEX >= 0x030e00C0 + # error \ + "In _testclinic.c, update parameter(s) 'a', 'b' and 'c' in the " \ + "clinic input of 'depr_star_pos0_len3_with_kwd' to be " \ + "keyword-only." + #elif PY_VERSION_HEX >= 0x030e00A0 + # ifdef _MSC_VER + # pragma message ( \ + "In _testclinic.c, update parameter(s) 'a', 'b' and 'c' in the " \ + "clinic input of 'depr_star_pos0_len3_with_kwd' to be " \ + "keyword-only.") + # else + # warning \ + "In _testclinic.c, update parameter(s) 'a', 'b' and 'c' in the " \ + "clinic input of 'depr_star_pos0_len3_with_kwd' to be " \ + "keyword-only." + # endif + #endif + if (nargs > 0 && nargs <= 3) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Passing positional arguments to depr_star_pos0_len3_with_kwd() " + "is deprecated. Parameters 'a', 'b' and 'c' will become " + "keyword-only parameters in Python 3.14.", 1)) + { + goto exit; + } + } + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 3, 3, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + c = args[2]; + d = args[3]; + return_value = depr_star_pos0_len3_with_kwd_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(depr_star_pos1_len1_opt__doc__, +"depr_star_pos1_len1_opt($module, /, a, b=None)\n" +"--\n" +"\n" +"Note: Passing 2 positional arguments to depr_star_pos1_len1_opt() is\n" +"deprecated. Parameter \'b\' will become a keyword-only parameter in\n" +"Python 3.14.\n" +""); + +#define DEPR_STAR_POS1_LEN1_OPT_METHODDEF \ + {"depr_star_pos1_len1_opt", _PyCFunction_CAST(depr_star_pos1_len1_opt), METH_FASTCALL|METH_KEYWORDS, depr_star_pos1_len1_opt__doc__}, + +static PyObject * +depr_star_pos1_len1_opt_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +depr_star_pos1_len1_opt(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "depr_star_pos1_len1_opt", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; + PyObject *a; + PyObject *b = Py_None; + + // Emit compiler warnings when we get to Python 3.14. + #if PY_VERSION_HEX >= 0x030e00C0 + # error \ + "In _testclinic.c, update parameter(s) 'b' in the clinic input of" \ + " 'depr_star_pos1_len1_opt' to be keyword-only." + #elif PY_VERSION_HEX >= 0x030e00A0 + # ifdef _MSC_VER + # pragma message ( \ + "In _testclinic.c, update parameter(s) 'b' in the clinic input of" \ + " 'depr_star_pos1_len1_opt' to be keyword-only.") + # else + # warning \ + "In _testclinic.c, update parameter(s) 'b' in the clinic input of" \ + " 'depr_star_pos1_len1_opt' to be keyword-only." + # endif + #endif + if (nargs == 2) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Passing 2 positional arguments to depr_star_pos1_len1_opt() is " + "deprecated. Parameter 'b' will become a keyword-only parameter " + "in Python 3.14.", 1)) + { + goto exit; + } + } + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + if (!noptargs) { + goto skip_optional_pos; + } + b = args[1]; +skip_optional_pos: + return_value = depr_star_pos1_len1_opt_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(depr_star_pos1_len1__doc__, +"depr_star_pos1_len1($module, /, a, b)\n" +"--\n" +"\n" +"Note: Passing 2 positional arguments to depr_star_pos1_len1() is\n" +"deprecated. Parameter \'b\' will become a keyword-only parameter in\n" +"Python 3.14.\n" +""); + +#define DEPR_STAR_POS1_LEN1_METHODDEF \ + {"depr_star_pos1_len1", _PyCFunction_CAST(depr_star_pos1_len1), METH_FASTCALL|METH_KEYWORDS, depr_star_pos1_len1__doc__}, + +static PyObject * +depr_star_pos1_len1_impl(PyObject *module, PyObject *a, PyObject *b); + +static PyObject * +depr_star_pos1_len1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 2 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "depr_star_pos1_len1", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[2]; + PyObject *a; + PyObject *b; + + // Emit compiler warnings when we get to Python 3.14. + #if PY_VERSION_HEX >= 0x030e00C0 + # error \ + "In _testclinic.c, update parameter(s) 'b' in the clinic input of" \ + " 'depr_star_pos1_len1' to be keyword-only." + #elif PY_VERSION_HEX >= 0x030e00A0 + # ifdef _MSC_VER + # pragma message ( \ + "In _testclinic.c, update parameter(s) 'b' in the clinic input of" \ + " 'depr_star_pos1_len1' to be keyword-only.") + # else + # warning \ + "In _testclinic.c, update parameter(s) 'b' in the clinic input of" \ + " 'depr_star_pos1_len1' to be keyword-only." + # endif + #endif + if (nargs == 2) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Passing 2 positional arguments to depr_star_pos1_len1() is " + "deprecated. Parameter 'b' will become a keyword-only parameter " + "in Python 3.14.", 1)) + { + goto exit; + } + } + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 2, 2, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + return_value = depr_star_pos1_len1_impl(module, a, b); + +exit: + return return_value; +} + +PyDoc_STRVAR(depr_star_pos1_len2_with_kwd__doc__, +"depr_star_pos1_len2_with_kwd($module, /, a, b, c, *, d)\n" +"--\n" +"\n" +"Note: Passing more than 1 positional argument to\n" +"depr_star_pos1_len2_with_kwd() is deprecated. Parameters \'b\' and \'c\'\n" +"will become keyword-only parameters in Python 3.14.\n" +""); + +#define DEPR_STAR_POS1_LEN2_WITH_KWD_METHODDEF \ + {"depr_star_pos1_len2_with_kwd", _PyCFunction_CAST(depr_star_pos1_len2_with_kwd), METH_FASTCALL|METH_KEYWORDS, depr_star_pos1_len2_with_kwd__doc__}, + +static PyObject * +depr_star_pos1_len2_with_kwd_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +depr_star_pos1_len2_with_kwd(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 4 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "depr_star_pos1_len2_with_kwd", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + PyObject *a; + PyObject *b; + PyObject *c; + PyObject *d; + + // Emit compiler warnings when we get to Python 3.14. + #if PY_VERSION_HEX >= 0x030e00C0 + # error \ + "In _testclinic.c, update parameter(s) 'b' and 'c' in the clinic " \ + "input of 'depr_star_pos1_len2_with_kwd' to be keyword-only." + #elif PY_VERSION_HEX >= 0x030e00A0 + # ifdef _MSC_VER + # pragma message ( \ + "In _testclinic.c, update parameter(s) 'b' and 'c' in the clinic " \ + "input of 'depr_star_pos1_len2_with_kwd' to be keyword-only.") + # else + # warning \ + "In _testclinic.c, update parameter(s) 'b' and 'c' in the clinic " \ + "input of 'depr_star_pos1_len2_with_kwd' to be keyword-only." + # endif + #endif + if (nargs > 1 && nargs <= 3) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Passing more than 1 positional argument to " + "depr_star_pos1_len2_with_kwd() is deprecated. Parameters 'b' and" + " 'c' will become keyword-only parameters in Python 3.14.", 1)) + { + goto exit; + } + } + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 3, 3, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + c = args[2]; + d = args[3]; + return_value = depr_star_pos1_len2_with_kwd_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(depr_star_pos2_len1__doc__, +"depr_star_pos2_len1($module, /, a, b, c)\n" +"--\n" +"\n" +"Note: Passing 3 positional arguments to depr_star_pos2_len1() is\n" +"deprecated. Parameter \'c\' will become a keyword-only parameter in\n" +"Python 3.14.\n" +""); + +#define DEPR_STAR_POS2_LEN1_METHODDEF \ + {"depr_star_pos2_len1", _PyCFunction_CAST(depr_star_pos2_len1), METH_FASTCALL|METH_KEYWORDS, depr_star_pos2_len1__doc__}, + +static PyObject * +depr_star_pos2_len1_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c); + +static PyObject * +depr_star_pos2_len1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 3 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "depr_star_pos2_len1", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + PyObject *a; + PyObject *b; + PyObject *c; + + // Emit compiler warnings when we get to Python 3.14. + #if PY_VERSION_HEX >= 0x030e00C0 + # error \ + "In _testclinic.c, update parameter(s) 'c' in the clinic input of" \ + " 'depr_star_pos2_len1' to be keyword-only." + #elif PY_VERSION_HEX >= 0x030e00A0 + # ifdef _MSC_VER + # pragma message ( \ + "In _testclinic.c, update parameter(s) 'c' in the clinic input of" \ + " 'depr_star_pos2_len1' to be keyword-only.") + # else + # warning \ + "In _testclinic.c, update parameter(s) 'c' in the clinic input of" \ + " 'depr_star_pos2_len1' to be keyword-only." + # endif + #endif + if (nargs == 3) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Passing 3 positional arguments to depr_star_pos2_len1() is " + "deprecated. Parameter 'c' will become a keyword-only parameter " + "in Python 3.14.", 1)) + { + goto exit; + } + } + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 3, 3, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + c = args[2]; + return_value = depr_star_pos2_len1_impl(module, a, b, c); + +exit: + return return_value; +} + +PyDoc_STRVAR(depr_star_pos2_len2__doc__, +"depr_star_pos2_len2($module, /, a, b, c, d)\n" +"--\n" +"\n" +"Note: Passing more than 2 positional arguments to\n" +"depr_star_pos2_len2() is deprecated. Parameters \'c\' and \'d\' will\n" +"become keyword-only parameters in Python 3.14.\n" +""); + +#define DEPR_STAR_POS2_LEN2_METHODDEF \ + {"depr_star_pos2_len2", _PyCFunction_CAST(depr_star_pos2_len2), METH_FASTCALL|METH_KEYWORDS, depr_star_pos2_len2__doc__}, + +static PyObject * +depr_star_pos2_len2_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d); + +static PyObject * +depr_star_pos2_len2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 4 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", "d", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "depr_star_pos2_len2", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[4]; + PyObject *a; + PyObject *b; + PyObject *c; + PyObject *d; + + // Emit compiler warnings when we get to Python 3.14. + #if PY_VERSION_HEX >= 0x030e00C0 + # error \ + "In _testclinic.c, update parameter(s) 'c' and 'd' in the clinic " \ + "input of 'depr_star_pos2_len2' to be keyword-only." + #elif PY_VERSION_HEX >= 0x030e00A0 + # ifdef _MSC_VER + # pragma message ( \ + "In _testclinic.c, update parameter(s) 'c' and 'd' in the clinic " \ + "input of 'depr_star_pos2_len2' to be keyword-only.") + # else + # warning \ + "In _testclinic.c, update parameter(s) 'c' and 'd' in the clinic " \ + "input of 'depr_star_pos2_len2' to be keyword-only." + # endif + #endif + if (nargs > 2 && nargs <= 4) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Passing more than 2 positional arguments to " + "depr_star_pos2_len2() is deprecated. Parameters 'c' and 'd' will" + " become keyword-only parameters in Python 3.14.", 1)) + { + goto exit; + } + } + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 4, 4, 0, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + c = args[2]; + d = args[3]; + return_value = depr_star_pos2_len2_impl(module, a, b, c, d); + +exit: + return return_value; +} + +PyDoc_STRVAR(depr_star_pos2_len2_with_kwd__doc__, +"depr_star_pos2_len2_with_kwd($module, /, a, b, c, d, *, e)\n" +"--\n" +"\n" +"Note: Passing more than 2 positional arguments to\n" +"depr_star_pos2_len2_with_kwd() is deprecated. Parameters \'c\' and \'d\'\n" +"will become keyword-only parameters in Python 3.14.\n" +""); + +#define DEPR_STAR_POS2_LEN2_WITH_KWD_METHODDEF \ + {"depr_star_pos2_len2_with_kwd", _PyCFunction_CAST(depr_star_pos2_len2_with_kwd), METH_FASTCALL|METH_KEYWORDS, depr_star_pos2_len2_with_kwd__doc__}, + +static PyObject * +depr_star_pos2_len2_with_kwd_impl(PyObject *module, PyObject *a, PyObject *b, + PyObject *c, PyObject *d, PyObject *e); + +static PyObject * +depr_star_pos2_len2_with_kwd(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 5 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(a), &_Py_ID(b), &_Py_ID(c), &_Py_ID(d), &_Py_ID(e), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"a", "b", "c", "d", "e", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "depr_star_pos2_len2_with_kwd", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[5]; + PyObject *a; + PyObject *b; + PyObject *c; + PyObject *d; + PyObject *e; + + // Emit compiler warnings when we get to Python 3.14. + #if PY_VERSION_HEX >= 0x030e00C0 + # error \ + "In _testclinic.c, update parameter(s) 'c' and 'd' in the clinic " \ + "input of 'depr_star_pos2_len2_with_kwd' to be keyword-only." + #elif PY_VERSION_HEX >= 0x030e00A0 + # ifdef _MSC_VER + # pragma message ( \ + "In _testclinic.c, update parameter(s) 'c' and 'd' in the clinic " \ + "input of 'depr_star_pos2_len2_with_kwd' to be keyword-only.") + # else + # warning \ + "In _testclinic.c, update parameter(s) 'c' and 'd' in the clinic " \ + "input of 'depr_star_pos2_len2_with_kwd' to be keyword-only." + # endif + #endif + if (nargs > 2 && nargs <= 4) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "Passing more than 2 positional arguments to " + "depr_star_pos2_len2_with_kwd() is deprecated. Parameters 'c' and" + " 'd' will become keyword-only parameters in Python 3.14.", 1)) + { + goto exit; + } + } + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 4, 4, 1, argsbuf); + if (!args) { + goto exit; + } + a = args[0]; + b = args[1]; + c = args[2]; + d = args[3]; + e = args[4]; + return_value = depr_star_pos2_len2_with_kwd_impl(module, a, b, c, d, e); + +exit: + return return_value; +} +/*[clinic end generated code: output=18ab056f6cc06d7e input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h index 8f46b8f1095e98..35ac053547121c 100644 --- a/Modules/clinic/_winapi.c.h +++ b/Modules/clinic/_winapi.c.h @@ -884,7 +884,7 @@ PyDoc_STRVAR(_winapi_LCMapStringEx__doc__, static PyObject * _winapi_LCMapStringEx_impl(PyObject *module, LPCWSTR locale, DWORD flags, - LPCWSTR src); + PyObject *src); static PyObject * _winapi_LCMapStringEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) @@ -911,16 +911,16 @@ _winapi_LCMapStringEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, static const char * const _keywords[] = {"locale", "flags", "src", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, - .format = "O&kO&:LCMapStringEx", + .format = "O&kU:LCMapStringEx", .kwtuple = KWTUPLE, }; #undef KWTUPLE LPCWSTR locale = NULL; DWORD flags; - LPCWSTR src = NULL; + PyObject *src; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, - _PyUnicode_WideCharString_Converter, &locale, &flags, _PyUnicode_WideCharString_Converter, &src)) { + _PyUnicode_WideCharString_Converter, &locale, &flags, &src)) { goto exit; } return_value = _winapi_LCMapStringEx_impl(module, locale, flags, src); @@ -928,8 +928,6 @@ _winapi_LCMapStringEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, exit: /* Cleanup for locale */ PyMem_Free((void *)locale); - /* Cleanup for src */ - PyMem_Free((void *)src); return return_value; } @@ -1480,4 +1478,4 @@ _winapi_CopyFile2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO return return_value; } -/*[clinic end generated code: output=f32fe6ecdbffd74d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ff91ab5cae8961dd input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index a8f6ce026a331b..81e3162e679d16 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -7366,7 +7366,7 @@ PyDoc_STRVAR(os_pwrite__doc__, "Write bytes to a file descriptor starting at a particular offset.\n" "\n" "Write buffer to fd, starting at offset bytes from the beginning of\n" -"the file. Returns the number of bytes writte. Does not change the\n" +"the file. Returns the number of bytes written. Does not change the\n" "current file offset."); #define OS_PWRITE_METHODDEF \ @@ -11981,4 +11981,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=a7e8c3df2db09717 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a85a386b212b0631 input=a9049054013a1b77]*/ diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 97644a7cee774f..35a35091bf4511 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -25,12 +25,13 @@ #include "Python.h" #include "pycore_context.h" +#include "pycore_dict.h" // _PyDict_MaybeUntrack() #include "pycore_initconfig.h" -#include "pycore_interp.h" // PyInterpreterState.gc +#include "pycore_interp.h" // PyInterpreterState.gc #include "pycore_object.h" #include "pycore_pyerrors.h" -#include "pycore_pystate.h" // _PyThreadState_GET() -#include "pycore_weakref.h" // _PyWeakref_ClearRef() +#include "pycore_pystate.h" // _PyThreadState_GET() +#include "pycore_weakref.h" // _PyWeakref_ClearRef() #include "pydtrace.h" typedef struct _gc_runtime_state GCState; @@ -459,6 +460,7 @@ update_refs(PyGC_Head *containers) static int visit_decref(PyObject *op, void *parent) { + OBJECT_STAT_INC(object_visits); _PyObject_ASSERT(_PyObject_CAST(parent), !_PyObject_IsFreed(op)); if (_PyObject_IS_GC(op)) { @@ -497,6 +499,7 @@ subtract_refs(PyGC_Head *containers) static int visit_reachable(PyObject *op, PyGC_Head *reachable) { + OBJECT_STAT_INC(object_visits); if (!_PyObject_IS_GC(op)) { return 0; } @@ -724,6 +727,7 @@ clear_unreachable_mask(PyGC_Head *unreachable) static int visit_move(PyObject *op, PyGC_Head *tolist) { + OBJECT_STAT_INC(object_visits); if (_PyObject_IS_GC(op)) { PyGC_Head *gc = AS_GC(op); if (gc_is_collecting(gc)) { @@ -1194,6 +1198,12 @@ gc_collect_main(PyThreadState *tstate, int generation, Py_ssize_t *n_collected, Py_ssize_t *n_uncollectable, int nofail) { + GC_STAT_ADD(generation, collections, 1); +#ifdef Py_STATS + if (_py_stats) { + _py_stats->object_stats.object_visits = 0; + } +#endif int i; Py_ssize_t m = 0; /* # objects collected */ Py_ssize_t n = 0; /* # unreachable objects that couldn't be collected */ @@ -1350,6 +1360,15 @@ gc_collect_main(PyThreadState *tstate, int generation, stats->collected += m; stats->uncollectable += n; + GC_STAT_ADD(generation, objects_collected, m); +#ifdef Py_STATS + if (_py_stats) { + GC_STAT_ADD(generation, object_visits, + _py_stats->object_stats.object_visits); + _py_stats->object_stats.object_visits = 0; + } +#endif + if (PyDTrace_GC_DONE_ENABLED()) { PyDTrace_GC_DONE(n + m); } diff --git a/Modules/getpath.c b/Modules/getpath.c index abe7c3c3c30a9a..76e3c7e65249f4 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -1,11 +1,13 @@ /* Return the initial module search path. */ #include "Python.h" +#include "pycore_fileutils.h" // _Py_abspath() +#include "pycore_initconfig.h" // _PyStatus_EXCEPTION() +#include "pycore_pathconfig.h" // _PyPathConfig_ReadGlobal() +#include "pycore_pymem.h" // _PyMem_RawWcsdup() + #include "marshal.h" // PyMarshal_ReadObjectFromString #include "osdefs.h" // DELIM -#include "pycore_initconfig.h" -#include "pycore_fileutils.h" -#include "pycore_pathconfig.h" #include #ifdef MS_WINDOWS diff --git a/Modules/grpmodule.c b/Modules/grpmodule.c index 57cdde6064c24e..f5709296334a8f 100644 --- a/Modules/grpmodule.c +++ b/Modules/grpmodule.c @@ -65,8 +65,14 @@ mkgrent(PyObject *module, struct group *p) Py_DECREF(v); return NULL; } - for (member = p->gr_mem; *member != NULL; member++) { - PyObject *x = PyUnicode_DecodeFSDefault(*member); + for (member = p->gr_mem; ; member++) { + char *group_member; + // member can be misaligned + memcpy(&group_member, member, sizeof(group_member)); + if (group_member == NULL) { + break; + } + PyObject *x = PyUnicode_DecodeFSDefault(group_member); if (x == NULL || PyList_Append(w, x) != 0) { Py_XDECREF(x); Py_DECREF(w); diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index f5f7bf33bf8f4b..0ab6d330e87793 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -5,7 +5,7 @@ #include "pycore_typeobject.h" // _PyType_GetModuleState() #include "pycore_object.h" // _PyObject_GC_TRACK() #include "pycore_tuple.h" // _PyTuple_ITEMS() -#include "structmember.h" // PyMemberDef + #include // offsetof() /* Itertools module written and maintained @@ -1090,7 +1090,7 @@ static PyMethodDef tee_methods[] = { }; static PyMemberDef tee_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(teeobject, weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(teeobject, weakreflist), Py_READONLY}, {NULL}, }; diff --git a/Modules/md5module.c b/Modules/md5module.c index 2122f8b18baf6e..5463effb507de6 100644 --- a/Modules/md5module.c +++ b/Modules/md5module.c @@ -356,13 +356,7 @@ md5_exec(PyObject *m) st->md5_type = (PyTypeObject *)PyType_FromModuleAndSpec( m, &md5_type_spec, NULL); - if (st->md5_type == NULL) { - return -1; - } - - Py_INCREF((PyObject *)st->md5_type); - if (PyModule_AddObject(m, "MD5Type", (PyObject *)st->md5_type) < 0) { - Py_DECREF(st->md5_type); + if (PyModule_AddObjectRef(m, "MD5Type", (PyObject *)st->md5_type) < 0) { return -1; } diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index c1cd5b0efaa3d2..5c131570123560 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -25,7 +25,7 @@ #include #include "pycore_bytesobject.h" // _PyBytes_Find() #include "pycore_fileutils.h" // _Py_stat_struct -#include "structmember.h" // PyMemberDef + #include // offsetof() // to support MS_WINDOWS_SYSTEM OpenFileMappingA / CreateFileMappingA @@ -883,7 +883,7 @@ mmap_madvise_method(mmap_object *self, PyObject *args) #endif // HAVE_MADVISE static struct PyMemberDef mmap_object_members[] = { - {"__weaklistoffset__", T_PYSSIZET, offsetof(mmap_object, weakreflist), READONLY}, + {"__weaklistoffset__", Py_T_PYSSIZET, offsetof(mmap_object, weakreflist), Py_READONLY}, {NULL}, }; @@ -1579,9 +1579,7 @@ new_mmap_object(PyTypeObject *type, PyObject *args, PyObject *kwdict) static int mmap_exec(PyObject *module) { - Py_INCREF(PyExc_OSError); - if (PyModule_AddObject(module, "error", PyExc_OSError) < 0) { - Py_DECREF(PyExc_OSError); + if (PyModule_AddObjectRef(module, "error", PyExc_OSError) < 0) { return -1; } diff --git a/Modules/overlapped.c b/Modules/overlapped.c index 18899509c87712..271f6ce7e2d916 100644 --- a/Modules/overlapped.c +++ b/Modules/overlapped.c @@ -8,7 +8,7 @@ Check itemsize */ #include "Python.h" -#include "structmember.h" // PyMemberDef + #define WINDOWS_LEAN_AND_MEAN #include @@ -17,10 +17,10 @@ #if defined(MS_WIN32) && !defined(MS_WIN64) # define F_POINTER "k" -# define T_POINTER T_ULONG +# define T_POINTER Py_T_ULONG #else # define F_POINTER "K" -# define T_POINTER T_ULONGLONG +# define T_POINTER Py_T_ULONGLONG #endif #define F_HANDLE F_POINTER @@ -1942,12 +1942,12 @@ static PyMethodDef Overlapped_methods[] = { }; static PyMemberDef Overlapped_members[] = { - {"error", T_ULONG, + {"error", Py_T_ULONG, offsetof(OverlappedObject, error), - READONLY, "Error from last operation"}, + Py_READONLY, "Error from last operation"}, {"event", T_HANDLE, offsetof(OverlappedObject, overlapped) + offsetof(OVERLAPPED, hEvent), - READONLY, "Overlapped event handle"}, + Py_READONLY, "Overlapped event handle"}, {NULL} }; @@ -1996,12 +1996,7 @@ static PyMethodDef overlapped_functions[] = { #define WINAPI_CONSTANT(fmt, con) \ do { \ - PyObject *value = Py_BuildValue(fmt, con); \ - if (value == NULL) { \ - return -1; \ - } \ - if (PyModule_AddObject(module, #con, value) < 0 ) { \ - Py_DECREF(value); \ + if (PyModule_Add(module, #con, Py_BuildValue(fmt, con)) < 0 ) { \ return -1; \ } \ } while (0) diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 23bf978d0cdbf1..a42e41c081e5b7 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -36,7 +36,7 @@ # endif /* MS_WINDOWS_DESKTOP | MS_WINDOWS_SYSTEM */ #endif -#include "structmember.h" // PyMemberDef + #ifndef MS_WINDOWS # include "posixmodule.h" #else @@ -285,7 +285,7 @@ corresponding Unix manual entries for more information on calls."); # undef HAVE_SCHED_SETAFFINITY #endif -#if defined(HAVE_SYS_XATTR_H) && defined(__linux__) && !defined(__FreeBSD_kernel__) && !defined(__GNU__) +#if defined(HAVE_SYS_XATTR_H) && defined(HAVE_LINUX_LIMITS_H) && !defined(__FreeBSD_kernel__) && !defined(__GNU__) # define USE_XATTRS # include // Needed for XATTR_SIZE_MAX on musl libc. #endif @@ -11283,13 +11283,13 @@ os.pwrite -> Py_ssize_t Write bytes to a file descriptor starting at a particular offset. Write buffer to fd, starting at offset bytes from the beginning of -the file. Returns the number of bytes writte. Does not change the +the file. Returns the number of bytes written. Does not change the current file offset. [clinic start generated code]*/ static Py_ssize_t os_pwrite_impl(PyObject *module, int fd, Py_buffer *buffer, Py_off_t offset) -/*[clinic end generated code: output=c74da630758ee925 input=19903f1b3dd26377]*/ +/*[clinic end generated code: output=c74da630758ee925 input=614acbc7e5a0339a]*/ { Py_ssize_t size; int async_err = 0; @@ -14822,9 +14822,9 @@ os_DirEntry___fspath___impl(DirEntry *self) } static PyMemberDef DirEntry_members[] = { - {"name", T_OBJECT_EX, offsetof(DirEntry, name), READONLY, + {"name", Py_T_OBJECT_EX, offsetof(DirEntry, name), Py_READONLY, "the entry's base filename, relative to scandir() \"path\" argument"}, - {"path", T_OBJECT_EX, offsetof(DirEntry, path), READONLY, + {"path", Py_T_OBJECT_EX, offsetof(DirEntry, path), Py_READONLY, "the entry's full path name; equivalent to os.path.join(scandir_path, entry.name)"}, {NULL} }; diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c index bd8a98a46579a3..a8ce84c0bb9f05 100644 --- a/Modules/pyexpat.c +++ b/Modules/pyexpat.c @@ -4,11 +4,11 @@ #include "Python.h" #include "pycore_import.h" // _PyImport_SetModule() -#include +#include "pycore_pyhash.h" // _Py_HashSecret -#include "structmember.h" // PyMemberDef +#include +#include // offsetof() #include "expat.h" - #include "pyexpat.h" /* Do not emit Clinic output to a file as that wreaks havoc with conditionally @@ -1470,7 +1470,7 @@ xmlparse_specified_attributes_setter(xmlparseobject *self, PyObject *v, void *cl } static PyMemberDef xmlparse_members[] = { - {"intern", T_OBJECT, offsetof(xmlparseobject, intern), READONLY, NULL}, + {"intern", _Py_T_OBJECT, offsetof(xmlparseobject, intern), Py_READONLY, NULL}, {NULL} }; @@ -1655,8 +1655,7 @@ add_submodule(PyObject *mod, const char *fullname) Py_DECREF(mod_name); /* gives away the reference to the submodule */ - if (PyModule_AddObject(mod, name, submodule) < 0) { - Py_DECREF(submodule); + if (PyModule_Add(mod, name, submodule) < 0) { return NULL; } @@ -1886,10 +1885,7 @@ add_features(PyObject *mod) goto error; } } - if (PyModule_AddObject(mod, "features", list) < 0) { - goto error; - } - return 0; + return PyModule_Add(mod, "features", list); error: Py_DECREF(list); @@ -1958,8 +1954,7 @@ pyexpat_exec(PyObject *mod) info.major, info.minor, info.micro); - if (PyModule_AddObject(mod, "version_info", versionInfo) < 0) { - Py_DECREF(versionInfo); + if (PyModule_Add(mod, "version_info", versionInfo) < 0) { return -1; } } @@ -2039,8 +2034,7 @@ pyexpat_exec(PyObject *mod) return -1; } - if (PyModule_AddObject(mod, "expat_CAPI", capi_object) < 0) { - Py_DECREF(capi_object); + if (PyModule_Add(mod, "expat_CAPI", capi_object) < 0) { return -1; } diff --git a/Modules/readline.c b/Modules/readline.c index a592919692cb83..6729a09cb0da5e 100644 --- a/Modules/readline.c +++ b/Modules/readline.c @@ -1313,6 +1313,9 @@ rlhandler(char *text) static char * readline_until_enter_or_signal(const char *prompt, int *signal) { + // Defined in Parser/myreadline.c + extern PyThreadState *_PyOS_ReadlineTState; + char * not_done_reading = ""; fd_set selectset; diff --git a/Modules/resource.c b/Modules/resource.c index 3c89468c48c56e..4614f5e98cc888 100644 --- a/Modules/resource.c +++ b/Modules/resource.c @@ -372,9 +372,7 @@ resource_exec(PyObject *module) } while (0) /* Add some symbolic constants to the module */ - Py_INCREF(PyExc_OSError); - if (PyModule_AddObject(module, "error", PyExc_OSError) < 0) { - Py_DECREF(PyExc_OSError); + if (PyModule_AddObjectRef(module, "error", PyExc_OSError) < 0) { return -1; } @@ -502,12 +500,7 @@ resource_exec(PyObject *module) { v = PyLong_FromLong((long) RLIM_INFINITY); } - if (!v) { - return -1; - } - - if (PyModule_AddObject(module, "RLIM_INFINITY", v) < 0) { - Py_DECREF(v); + if (PyModule_Add(module, "RLIM_INFINITY", v) < 0) { return -1; } return 0; diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index 7ab0804ad27233..94d246960f4410 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -15,7 +15,8 @@ #include "Python.h" #include "pycore_fileutils.h" // _Py_set_inheritable() #include "pycore_time.h" // _PyTime_t -#include "structmember.h" // PyMemberDef + +#include // offsetof() #ifdef HAVE_SYS_DEVPOLL_H #include @@ -1758,18 +1759,18 @@ typedef struct { #if (SIZEOF_UINTPTR_T != SIZEOF_VOID_P) # error uintptr_t does not match void *! #elif (SIZEOF_UINTPTR_T == SIZEOF_LONG_LONG) -# define T_UINTPTRT T_ULONGLONG -# define T_INTPTRT T_LONGLONG +# define T_UINTPTRT Py_T_ULONGLONG +# define T_INTPTRT Py_T_LONGLONG # define UINTPTRT_FMT_UNIT "K" # define INTPTRT_FMT_UNIT "L" #elif (SIZEOF_UINTPTR_T == SIZEOF_LONG) -# define T_UINTPTRT T_ULONG -# define T_INTPTRT T_LONG +# define T_UINTPTRT Py_T_ULONG +# define T_INTPTRT Py_T_LONG # define UINTPTRT_FMT_UNIT "k" # define INTPTRT_FMT_UNIT "l" #elif (SIZEOF_UINTPTR_T == SIZEOF_INT) -# define T_UINTPTRT T_UINT -# define T_INTPTRT T_INT +# define T_UINTPTRT Py_T_UINT +# define T_INTPTRT Py_T_INT # define UINTPTRT_FMT_UNIT "I" # define INTPTRT_FMT_UNIT "i" #else @@ -1777,26 +1778,26 @@ typedef struct { #endif #if SIZEOF_LONG_LONG == 8 -# define T_INT64 T_LONGLONG +# define T_INT64 Py_T_LONGLONG # define INT64_FMT_UNIT "L" #elif SIZEOF_LONG == 8 -# define T_INT64 T_LONG +# define T_INT64 Py_T_LONG # define INT64_FMT_UNIT "l" #elif SIZEOF_INT == 8 -# define T_INT64 T_INT +# define T_INT64 Py_T_INT # define INT64_FMT_UNIT "i" #else # define INT64_FMT_UNIT "_" #endif #if SIZEOF_LONG_LONG == 4 -# define T_UINT32 T_ULONGLONG +# define T_UINT32 Py_T_ULONGLONG # define UINT32_FMT_UNIT "K" #elif SIZEOF_LONG == 4 -# define T_UINT32 T_ULONG +# define T_UINT32 Py_T_ULONG # define UINT32_FMT_UNIT "k" #elif SIZEOF_INT == 4 -# define T_UINT32 T_UINT +# define T_UINT32 Py_T_UINT # define UINT32_FMT_UNIT "I" #else # define UINT32_FMT_UNIT "_" @@ -1813,11 +1814,11 @@ typedef struct { # define FFLAGS_TYPE T_UINT32 # define FFLAGS_FMT_UNIT UINT32_FMT_UNIT #else -# define FILTER_TYPE T_SHORT +# define FILTER_TYPE Py_T_SHORT # define FILTER_FMT_UNIT "h" -# define FLAGS_TYPE T_USHORT +# define FLAGS_TYPE Py_T_USHORT # define FLAGS_FMT_UNIT "H" -# define FFLAGS_TYPE T_UINT +# define FFLAGS_TYPE Py_T_UINT # define FFLAGS_FMT_UNIT "I" #endif @@ -1839,7 +1840,7 @@ static struct PyMemberDef kqueue_event_members[] = { {"ident", T_UINTPTRT, KQ_OFF(e.ident)}, {"filter", FILTER_TYPE, KQ_OFF(e.filter)}, {"flags", FLAGS_TYPE, KQ_OFF(e.flags)}, - {"fflags", T_UINT, KQ_OFF(e.fflags)}, + {"fflags", Py_T_UINT, KQ_OFF(e.fflags)}, {"data", DATA_TYPE, KQ_OFF(e.data)}, {"udata", T_UINTPTRT, KQ_OFF(e.udata)}, {NULL} /* Sentinel */ diff --git a/Modules/sha1module.c b/Modules/sha1module.c index ef8e067dd337b3..3fd53123229ac4 100644 --- a/Modules/sha1module.c +++ b/Modules/sha1module.c @@ -357,16 +357,9 @@ _sha1_exec(PyObject *module) st->sha1_type = (PyTypeObject *)PyType_FromModuleAndSpec( module, &sha1_type_spec, NULL); - - if (st->sha1_type == NULL) { - return -1; - } - - Py_INCREF(st->sha1_type); - if (PyModule_AddObject(module, + if (PyModule_AddObjectRef(module, "SHA1Type", (PyObject *)st->sha1_type) < 0) { - Py_DECREF(st->sha1_type); return -1; } diff --git a/Modules/sha2module.c b/Modules/sha2module.c index db3774c81e2d92..6ad1ff2e05bfd8 100644 --- a/Modules/sha2module.c +++ b/Modules/sha2module.c @@ -25,7 +25,7 @@ #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_typeobject.h" // _PyType_GetModuleState() #include "pycore_strhex.h" // _Py_strhex() -#include "structmember.h" // PyMemberDef + #include "hashlib.h" /*[clinic input] diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index 39bbc911712376..bb5edc368decb3 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -108,7 +108,7 @@ Local naming conventions: #include "Python.h" #include "pycore_fileutils.h" // _Py_set_inheritable() #include "pycore_moduleobject.h" // _PyModule_GetState -#include "structmember.h" // PyMemberDef + #ifdef _Py_MEMORY_SANITIZER # include @@ -5205,9 +5205,9 @@ static PyMethodDef sock_methods[] = { /* SockObject members */ static PyMemberDef sock_memberlist[] = { - {"family", T_INT, offsetof(PySocketSockObject, sock_family), READONLY, "the socket family"}, - {"type", T_INT, offsetof(PySocketSockObject, sock_type), READONLY, "the socket type"}, - {"proto", T_INT, offsetof(PySocketSockObject, sock_proto), READONLY, "the socket protocol"}, + {"family", Py_T_INT, offsetof(PySocketSockObject, sock_family), Py_READONLY, "the socket family"}, + {"type", Py_T_INT, offsetof(PySocketSockObject, sock_type), Py_READONLY, "the socket type"}, + {"proto", Py_T_INT, offsetof(PySocketSockObject, sock_proto), Py_READONLY, "the socket protocol"}, {0}, }; @@ -5779,9 +5779,15 @@ gethost_common(socket_state *state, struct hostent *h, struct sockaddr *addr, /* SF #1511317: h_aliases can be NULL */ if (h->h_aliases) { - for (pch = h->h_aliases; *pch != NULL; pch++) { + for (pch = h->h_aliases; ; pch++) { int status; - tmp = PyUnicode_FromString(*pch); + char *host_alias; + // pch can be misaligned + memcpy(&host_alias, pch, sizeof(host_alias)); + if (host_alias == NULL) { + break; + } + tmp = PyUnicode_FromString(host_alias); if (tmp == NULL) goto err; @@ -5793,8 +5799,14 @@ gethost_common(socket_state *state, struct hostent *h, struct sockaddr *addr, } } - for (pch = h->h_addr_list; *pch != NULL; pch++) { + for (pch = h->h_addr_list; ; pch++) { int status; + char *host_address; + // pch can be misaligned + memcpy(&host_address, pch, sizeof(host_address)); + if (host_address == NULL) { + break; + } switch (af) { @@ -5806,7 +5818,7 @@ gethost_common(socket_state *state, struct hostent *h, struct sockaddr *addr, #ifdef HAVE_SOCKADDR_SA_LEN sin.sin_len = sizeof(sin); #endif - memcpy(&sin.sin_addr, *pch, sizeof(sin.sin_addr)); + memcpy(&sin.sin_addr, host_address, sizeof(sin.sin_addr)); tmp = make_ipv4_addr(&sin); if (pch == h->h_addr_list && alen >= sizeof(sin)) @@ -5823,7 +5835,7 @@ gethost_common(socket_state *state, struct hostent *h, struct sockaddr *addr, #ifdef HAVE_SOCKADDR_SA_LEN sin6.sin6_len = sizeof(sin6); #endif - memcpy(&sin6.sin6_addr, *pch, sizeof(sin6.sin6_addr)); + memcpy(&sin6.sin6_addr, host_address, sizeof(sin6.sin6_addr)); tmp = make_ipv6_addr(&sin6); if (pch == h->h_addr_list && alen >= sizeof(sin6)) diff --git a/Modules/socketmodule.h b/Modules/socketmodule.h index 663ae3d6e0dd6c..47146a28e02c8f 100644 --- a/Modules/socketmodule.h +++ b/Modules/socketmodule.h @@ -100,6 +100,8 @@ typedef int socklen_t; # include # endif # include +#elif defined(HAVE_NETLINK_NETLINK_H) +# include #else # undef AF_NETLINK #endif diff --git a/Modules/termios.c b/Modules/termios.c index 6dc8200572bc0c..6b254104ed1e21 100644 --- a/Modules/termios.c +++ b/Modules/termios.c @@ -1232,12 +1232,7 @@ termios_exec(PyObject *mod) struct constant *constant = termios_constants; termiosmodulestate *state = get_termios_state(mod); state->TermiosError = PyErr_NewException("termios.error", NULL, NULL); - if (state->TermiosError == NULL) { - return -1; - } - Py_INCREF(state->TermiosError); - if (PyModule_AddObject(mod, "error", state->TermiosError) < 0) { - Py_DECREF(state->TermiosError); + if (PyModule_AddObjectRef(mod, "error", state->TermiosError) < 0) { return -1; } diff --git a/Modules/unicodedata.c b/Modules/unicodedata.c index 966123f4624c08..c1e22f3868931f 100644 --- a/Modules/unicodedata.c +++ b/Modules/unicodedata.c @@ -18,9 +18,9 @@ #include "Python.h" #include "pycore_ucnhash.h" // _PyUnicode_Name_CAPI -#include "structmember.h" // PyMemberDef #include +#include // offsetof() /*[clinic input] module unicodedata @@ -82,7 +82,7 @@ typedef struct previous_version { #define get_old_record(self, v) ((((PreviousDBVersion*)self)->getrecord)(v)) static PyMemberDef DB_members[] = { - {"unidata_version", T_STRING, offsetof(PreviousDBVersion, name), READONLY}, + {"unidata_version", Py_T_STRING, offsetof(PreviousDBVersion, name), Py_READONLY}, {NULL} }; @@ -1487,11 +1487,7 @@ unicodedata_exec(PyObject *module) v = new_previous_version(ucd_type, "3.2.0", get_change_3_2_0, normalization_3_2_0); Py_DECREF(ucd_type); - if (v == NULL) { - return -1; - } - if (PyModule_AddObject(module, "ucd_3_2_0", v) < 0) { - Py_DECREF(v); + if (PyModule_Add(module, "ucd_3_2_0", v) < 0) { return -1; } diff --git a/Modules/xxsubtype.c b/Modules/xxsubtype.c index 9e4a3d66ef41bd..63b22268c597b6 100644 --- a/Modules/xxsubtype.c +++ b/Modules/xxsubtype.c @@ -1,5 +1,6 @@ #include "Python.h" -#include "structmember.h" // PyMemberDef +#include // offsetof() + PyDoc_STRVAR(xxsubtype__doc__, "xxsubtype is an example module showing how to subtype builtin types from C.\n" @@ -181,7 +182,7 @@ spamdict_init(spamdictobject *self, PyObject *args, PyObject *kwds) } static PyMemberDef spamdict_members[] = { - {"state", T_INT, offsetof(spamdictobject, state), READONLY, + {"state", Py_T_INT, offsetof(spamdictobject, state), Py_READONLY, PyDoc_STR("an int variable for demonstration purposes")}, {0} }; diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c index c0f6b96f51baba..a98a37adadcff0 100644 --- a/Modules/zlibmodule.c +++ b/Modules/zlibmodule.c @@ -4,9 +4,10 @@ /* Windows users: read Python's PCbuild\readme.txt */ #include "Python.h" -#include "structmember.h" // PyMemberDef + #include "zlib.h" #include "stdbool.h" +#include // offsetof() #if defined(ZLIB_VERNUM) && ZLIB_VERNUM < 0x1221 #error "At least zlib version 1.2.2.1 is required" @@ -1344,7 +1345,7 @@ typedef struct { decompress_buf() */ Py_ssize_t avail_in_real; bool is_initialised; - char eof; /* T_BOOL expects a char */ + char eof; /* Py_T_BOOL expects a char */ char needs_input; } ZlibDecompressor; @@ -1801,9 +1802,9 @@ static PyMethodDef ZlibDecompressor_methods[] = { #define COMP_OFF(x) offsetof(compobject, x) static PyMemberDef Decomp_members[] = { - {"unused_data", T_OBJECT, COMP_OFF(unused_data), READONLY}, - {"unconsumed_tail", T_OBJECT, COMP_OFF(unconsumed_tail), READONLY}, - {"eof", T_BOOL, COMP_OFF(eof), READONLY}, + {"unused_data", _Py_T_OBJECT, COMP_OFF(unused_data), Py_READONLY}, + {"unconsumed_tail", _Py_T_OBJECT, COMP_OFF(unconsumed_tail), Py_READONLY}, + {"eof", Py_T_BOOL, COMP_OFF(eof), Py_READONLY}, {NULL}, }; @@ -1817,11 +1818,11 @@ PyDoc_STRVAR(ZlibDecompressor_needs_input_doc, "True if more input is needed before more decompressed data can be produced."); static PyMemberDef ZlibDecompressor_members[] = { - {"eof", T_BOOL, offsetof(ZlibDecompressor, eof), - READONLY, ZlibDecompressor_eof__doc__}, - {"unused_data", T_OBJECT_EX, offsetof(ZlibDecompressor, unused_data), - READONLY, ZlibDecompressor_unused_data__doc__}, - {"needs_input", T_BOOL, offsetof(ZlibDecompressor, needs_input), READONLY, + {"eof", Py_T_BOOL, offsetof(ZlibDecompressor, eof), + Py_READONLY, ZlibDecompressor_eof__doc__}, + {"unused_data", Py_T_OBJECT_EX, offsetof(ZlibDecompressor, unused_data), + Py_READONLY, ZlibDecompressor_unused_data__doc__}, + {"needs_input", Py_T_BOOL, offsetof(ZlibDecompressor, needs_input), Py_READONLY, ZlibDecompressor_needs_input_doc}, {NULL}, }; @@ -2030,17 +2031,11 @@ zlib_exec(PyObject *mod) } state->ZlibError = PyErr_NewException("zlib.error", NULL, NULL); - if (state->ZlibError == NULL) { - return -1; - } - - if (PyModule_AddObject(mod, "error", Py_NewRef(state->ZlibError)) < 0) { - Py_DECREF(state->ZlibError); + if (PyModule_AddObjectRef(mod, "error", state->ZlibError) < 0) { return -1; } - if (PyModule_AddObject(mod, "_ZlibDecompressor", - Py_NewRef(state->ZlibDecompressorType)) < 0) { - Py_DECREF(state->ZlibDecompressorType); + if (PyModule_AddObjectRef(mod, "_ZlibDecompressor", + (PyObject *)state->ZlibDecompressorType) < 0) { return -1; } @@ -2082,26 +2077,14 @@ zlib_exec(PyObject *mod) #ifdef Z_TREES // 1.2.3.4, only for inflate ZLIB_ADD_INT_MACRO(Z_TREES); #endif - PyObject *ver = PyUnicode_FromString(ZLIB_VERSION); - if (ver == NULL) { - return -1; - } - - if (PyModule_AddObject(mod, "ZLIB_VERSION", ver) < 0) { - Py_DECREF(ver); - return -1; - } - - ver = PyUnicode_FromString(zlibVersion()); - if (ver == NULL) { + if (PyModule_Add(mod, "ZLIB_VERSION", + PyUnicode_FromString(ZLIB_VERSION)) < 0) { return -1; } - - if (PyModule_AddObject(mod, "ZLIB_RUNTIME_VERSION", ver) < 0) { - Py_DECREF(ver); + if (PyModule_Add(mod, "ZLIB_RUNTIME_VERSION", + PyUnicode_FromString(zlibVersion())) < 0) { return -1; } - if (PyModule_AddStringConstant(mod, "__version__", "1.0") < 0) { return -1; } diff --git a/Objects/abstract.c b/Objects/abstract.c index b4edcec6007710..c113364a88a26a 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -2812,7 +2812,7 @@ object_issubclass(PyThreadState *tstate, PyObject *derived, PyObject *cls) return -1; } - /* Probably never reached anymore. */ + /* Can be reached when infinite recursion happens. */ return recursive_issubclass(derived, cls); } diff --git a/Objects/boolobject.c b/Objects/boolobject.c index bbb187cb7121e7..e2e359437f0edf 100644 --- a/Objects/boolobject.c +++ b/Objects/boolobject.c @@ -13,8 +13,7 @@ static PyObject * bool_repr(PyObject *self) { - PyObject *res = self == Py_True ? &_Py_ID(True) : &_Py_ID(False); - return Py_NewRef(res); + return self == Py_True ? &_Py_ID(True) : &_Py_ID(False); } /* Function to return a bool from a C long */ diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 6b9231a9fa7693..afe9192720c628 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -41,17 +41,12 @@ Py_LOCAL_INLINE(Py_ssize_t) _PyBytesWriter_GetSize(_PyBytesWriter *writer, #define EMPTY (&_Py_SINGLETON(bytes_empty)) -// Return a borrowed reference to the empty bytes string singleton. +// Return a reference to the immortal empty bytes string singleton. static inline PyObject* bytes_get_empty(void) { - return &EMPTY->ob_base.ob_base; -} - - -// Return a strong reference to the empty bytes string singleton. -static inline PyObject* bytes_new_empty(void) -{ - return Py_NewRef(EMPTY); + PyObject *empty = &EMPTY->ob_base.ob_base; + assert(_Py_IsImmortal(empty)); + return empty; } @@ -84,7 +79,7 @@ _PyBytes_FromSize(Py_ssize_t size, int use_calloc) assert(size >= 0); if (size == 0) { - return bytes_new_empty(); + return bytes_get_empty(); } if ((size_t)size > (size_t)PY_SSIZE_T_MAX - PyBytesObject_SIZE) { @@ -123,10 +118,11 @@ PyBytes_FromStringAndSize(const char *str, Py_ssize_t size) } if (size == 1 && str != NULL) { op = CHARACTER(*str & 255); - return Py_NewRef(op); + assert(_Py_IsImmortal(op)); + return (PyObject *)op; } if (size == 0) { - return bytes_new_empty(); + return bytes_get_empty(); } op = (PyBytesObject *)_PyBytes_FromSize(size, 0); @@ -154,11 +150,12 @@ PyBytes_FromString(const char *str) } if (size == 0) { - return bytes_new_empty(); + return bytes_get_empty(); } else if (size == 1) { op = CHARACTER(*str & 255); - return Py_NewRef(op); + assert(_Py_IsImmortal(op)); + return (PyObject *)op; } /* Inline PyObject_NewVar */ @@ -3065,7 +3062,7 @@ _PyBytes_Resize(PyObject **pv, Py_ssize_t newsize) goto error; } if (newsize == 0) { - *pv = bytes_new_empty(); + *pv = bytes_get_empty(); Py_DECREF(v); return 0; } diff --git a/Objects/call.c b/Objects/call.c index 396552d85cfca0..b1610dababd466 100644 --- a/Objects/call.c +++ b/Objects/call.c @@ -2,6 +2,7 @@ #include "pycore_call.h" // _PyObject_CallNoArgsTstate() #include "pycore_ceval.h" // _Py_EnterRecursiveCallTstate() #include "pycore_dict.h" // _PyDict_FromItems() +#include "pycore_function.h" // _PyFunction_Vectorcall() definition #include "pycore_modsupport.h" // _Py_VaBuildStack() #include "pycore_object.h" // _PyCFunctionWithKeywords_TrampolineCall() #include "pycore_pyerrors.h" // _PyErr_Occurred() diff --git a/Objects/classobject.c b/Objects/classobject.c index 548b8672f86321..5471045d777c9d 100644 --- a/Objects/classobject.c +++ b/Objects/classobject.c @@ -5,7 +5,7 @@ #include "pycore_object.h" #include "pycore_pyerrors.h" #include "pycore_pystate.h" // _PyThreadState_GET() -#include "structmember.h" // PyMemberDef + #include "clinic/classobject.c.h" @@ -48,6 +48,7 @@ method_vectorcall(PyObject *method, PyObject *const *args, PyObject *self = PyMethod_GET_SELF(method); PyObject *func = PyMethod_GET_FUNCTION(method); Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); + assert(nargs == 0 || args[nargs-1]); PyObject *result; if (nargsf & PY_VECTORCALL_ARGUMENTS_OFFSET) { @@ -56,6 +57,7 @@ method_vectorcall(PyObject *method, PyObject *const *args, nargs += 1; PyObject *tmp = newargs[0]; newargs[0] = self; + assert(newargs[nargs-1]); result = _PyObject_VectorcallTstate(tstate, func, newargs, nargs, kwnames); newargs[0] = tmp; @@ -150,9 +152,9 @@ static PyMethodDef method_methods[] = { #define MO_OFF(x) offsetof(PyMethodObject, x) static PyMemberDef method_memberlist[] = { - {"__func__", T_OBJECT, MO_OFF(im_func), READONLY, + {"__func__", _Py_T_OBJECT, MO_OFF(im_func), Py_READONLY, "the function (or other callable) implementing a method"}, - {"__self__", T_OBJECT, MO_OFF(im_self), READONLY, + {"__self__", _Py_T_OBJECT, MO_OFF(im_self), Py_READONLY, "the instance to which a method is bound"}, {NULL} /* Sentinel */ }; @@ -372,7 +374,7 @@ PyInstanceMethod_Function(PyObject *im) #define IMO_OFF(x) offsetof(PyInstanceMethodObject, x) static PyMemberDef instancemethod_memberlist[] = { - {"__func__", T_OBJECT, IMO_OFF(func), READONLY, + {"__func__", _Py_T_OBJECT, IMO_OFF(func), Py_READONLY, "the function (or other callable) implementing a method"}, {NULL} /* Sentinel */ }; diff --git a/Objects/clinic/codeobject.c.h b/Objects/clinic/codeobject.c.h index 1f2ab56775a1ee..511a8e4aaffea4 100644 --- a/Objects/clinic/codeobject.c.h +++ b/Objects/clinic/codeobject.c.h @@ -154,12 +154,7 @@ code_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) } PyDoc_STRVAR(code_replace__doc__, -"replace($self, /, *, co_argcount=-1, co_posonlyargcount=-1,\n" -" co_kwonlyargcount=-1, co_nlocals=-1, co_stacksize=-1,\n" -" co_flags=-1, co_firstlineno=-1, co_code=None, co_consts=None,\n" -" co_names=None, co_varnames=None, co_freevars=None,\n" -" co_cellvars=None, co_filename=None, co_name=None,\n" -" co_qualname=None, co_linetable=None, co_exceptiontable=None)\n" +"replace($self, /, **changes)\n" "--\n" "\n" "Return a copy of the code object with new values for the specified fields."); @@ -171,13 +166,12 @@ static PyObject * code_replace_impl(PyCodeObject *self, int co_argcount, int co_posonlyargcount, int co_kwonlyargcount, int co_nlocals, int co_stacksize, int co_flags, - int co_firstlineno, PyBytesObject *co_code, - PyObject *co_consts, PyObject *co_names, - PyObject *co_varnames, PyObject *co_freevars, - PyObject *co_cellvars, PyObject *co_filename, - PyObject *co_name, PyObject *co_qualname, - PyBytesObject *co_linetable, - PyBytesObject *co_exceptiontable); + int co_firstlineno, PyObject *co_code, PyObject *co_consts, + PyObject *co_names, PyObject *co_varnames, + PyObject *co_freevars, PyObject *co_cellvars, + PyObject *co_filename, PyObject *co_name, + PyObject *co_qualname, PyObject *co_linetable, + PyObject *co_exceptiontable); static PyObject * code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) @@ -217,7 +211,7 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje int co_stacksize = self->co_stacksize; int co_flags = self->co_flags; int co_firstlineno = self->co_firstlineno; - PyBytesObject *co_code = NULL; + PyObject *co_code = NULL; PyObject *co_consts = self->co_consts; PyObject *co_names = self->co_names; PyObject *co_varnames = NULL; @@ -226,8 +220,8 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje PyObject *co_filename = self->co_filename; PyObject *co_name = self->co_name; PyObject *co_qualname = self->co_qualname; - PyBytesObject *co_linetable = (PyBytesObject *)self->co_linetable; - PyBytesObject *co_exceptiontable = (PyBytesObject *)self->co_exceptiontable; + PyObject *co_linetable = self->co_linetable; + PyObject *co_exceptiontable = self->co_exceptiontable; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf); if (!args) { @@ -304,7 +298,7 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje _PyArg_BadArgument("replace", "argument 'co_code'", "bytes", args[7]); goto exit; } - co_code = (PyBytesObject *)args[7]; + co_code = args[7]; if (!--noptargs) { goto skip_optional_kwonly; } @@ -394,7 +388,7 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje _PyArg_BadArgument("replace", "argument 'co_linetable'", "bytes", args[16]); goto exit; } - co_linetable = (PyBytesObject *)args[16]; + co_linetable = args[16]; if (!--noptargs) { goto skip_optional_kwonly; } @@ -403,7 +397,7 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje _PyArg_BadArgument("replace", "argument 'co_exceptiontable'", "bytes", args[17]); goto exit; } - co_exceptiontable = (PyBytesObject *)args[17]; + co_exceptiontable = args[17]; skip_optional_kwonly: return_value = code_replace_impl(self, co_argcount, co_posonlyargcount, co_kwonlyargcount, co_nlocals, co_stacksize, co_flags, co_firstlineno, co_code, co_consts, co_names, co_varnames, co_freevars, co_cellvars, co_filename, co_name, co_qualname, co_linetable, co_exceptiontable); @@ -470,4 +464,4 @@ code__varname_from_oparg(PyCodeObject *self, PyObject *const *args, Py_ssize_t n exit: return return_value; } -/*[clinic end generated code: output=4ca4c0c403dbfa71 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=16c95266bbc4bc03 input=a9049054013a1b77]*/ diff --git a/Objects/codeobject.c b/Objects/codeobject.c index d2670c71caa44a..6987a2382d81c2 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -2,12 +2,13 @@ #include "Python.h" #include "opcode.h" -#include "structmember.h" // PyMemberDef + #include "pycore_code.h" // _PyCodeConstructor #include "pycore_frame.h" // FRAME_SPECIALS_SIZE #include "pycore_interp.h" // PyInterpreterState.co_extra_freefuncs #include "pycore_opcode.h" // _PyOpcode_Deopt #include "pycore_pystate.h" // _PyInterpreterState_GET() +#include "pycore_setobject.h" // _PySet_NextEntry() #include "pycore_tuple.h" // _PyTuple_ITEMS() #include "clinic/codeobject.c.h" @@ -1876,20 +1877,20 @@ code_hash(PyCodeObject *co) #define OFF(x) offsetof(PyCodeObject, x) static PyMemberDef code_memberlist[] = { - {"co_argcount", T_INT, OFF(co_argcount), READONLY}, - {"co_posonlyargcount", T_INT, OFF(co_posonlyargcount), READONLY}, - {"co_kwonlyargcount", T_INT, OFF(co_kwonlyargcount), READONLY}, - {"co_stacksize", T_INT, OFF(co_stacksize), READONLY}, - {"co_flags", T_INT, OFF(co_flags), READONLY}, - {"co_nlocals", T_INT, OFF(co_nlocals), READONLY}, - {"co_consts", T_OBJECT, OFF(co_consts), READONLY}, - {"co_names", T_OBJECT, OFF(co_names), READONLY}, - {"co_filename", T_OBJECT, OFF(co_filename), READONLY}, - {"co_name", T_OBJECT, OFF(co_name), READONLY}, - {"co_qualname", T_OBJECT, OFF(co_qualname), READONLY}, - {"co_firstlineno", T_INT, OFF(co_firstlineno), READONLY}, - {"co_linetable", T_OBJECT, OFF(co_linetable), READONLY}, - {"co_exceptiontable", T_OBJECT, OFF(co_exceptiontable), READONLY}, + {"co_argcount", Py_T_INT, OFF(co_argcount), Py_READONLY}, + {"co_posonlyargcount", Py_T_INT, OFF(co_posonlyargcount), Py_READONLY}, + {"co_kwonlyargcount", Py_T_INT, OFF(co_kwonlyargcount), Py_READONLY}, + {"co_stacksize", Py_T_INT, OFF(co_stacksize), Py_READONLY}, + {"co_flags", Py_T_INT, OFF(co_flags), Py_READONLY}, + {"co_nlocals", Py_T_INT, OFF(co_nlocals), Py_READONLY}, + {"co_consts", _Py_T_OBJECT, OFF(co_consts), Py_READONLY}, + {"co_names", _Py_T_OBJECT, OFF(co_names), Py_READONLY}, + {"co_filename", _Py_T_OBJECT, OFF(co_filename), Py_READONLY}, + {"co_name", _Py_T_OBJECT, OFF(co_name), Py_READONLY}, + {"co_qualname", _Py_T_OBJECT, OFF(co_qualname), Py_READONLY}, + {"co_firstlineno", Py_T_INT, OFF(co_firstlineno), Py_READONLY}, + {"co_linetable", _Py_T_OBJECT, OFF(co_linetable), Py_READONLY}, + {"co_exceptiontable", _Py_T_OBJECT, OFF(co_exceptiontable), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -1967,27 +1968,28 @@ code_linesiterator(PyCodeObject *code, PyObject *Py_UNUSED(args)) } /*[clinic input] +@text_signature "($self, /, **changes)" code.replace * - co_argcount: int(c_default="self->co_argcount") = -1 - co_posonlyargcount: int(c_default="self->co_posonlyargcount") = -1 - co_kwonlyargcount: int(c_default="self->co_kwonlyargcount") = -1 - co_nlocals: int(c_default="self->co_nlocals") = -1 - co_stacksize: int(c_default="self->co_stacksize") = -1 - co_flags: int(c_default="self->co_flags") = -1 - co_firstlineno: int(c_default="self->co_firstlineno") = -1 - co_code: PyBytesObject(c_default="NULL") = None - co_consts: object(subclass_of="&PyTuple_Type", c_default="self->co_consts") = None - co_names: object(subclass_of="&PyTuple_Type", c_default="self->co_names") = None - co_varnames: object(subclass_of="&PyTuple_Type", c_default="NULL") = None - co_freevars: object(subclass_of="&PyTuple_Type", c_default="NULL") = None - co_cellvars: object(subclass_of="&PyTuple_Type", c_default="NULL") = None - co_filename: unicode(c_default="self->co_filename") = None - co_name: unicode(c_default="self->co_name") = None - co_qualname: unicode(c_default="self->co_qualname") = None - co_linetable: PyBytesObject(c_default="(PyBytesObject *)self->co_linetable") = None - co_exceptiontable: PyBytesObject(c_default="(PyBytesObject *)self->co_exceptiontable") = None + co_argcount: int(c_default="self->co_argcount") = unchanged + co_posonlyargcount: int(c_default="self->co_posonlyargcount") = unchanged + co_kwonlyargcount: int(c_default="self->co_kwonlyargcount") = unchanged + co_nlocals: int(c_default="self->co_nlocals") = unchanged + co_stacksize: int(c_default="self->co_stacksize") = unchanged + co_flags: int(c_default="self->co_flags") = unchanged + co_firstlineno: int(c_default="self->co_firstlineno") = unchanged + co_code: object(subclass_of="&PyBytes_Type", c_default="NULL") = unchanged + co_consts: object(subclass_of="&PyTuple_Type", c_default="self->co_consts") = unchanged + co_names: object(subclass_of="&PyTuple_Type", c_default="self->co_names") = unchanged + co_varnames: object(subclass_of="&PyTuple_Type", c_default="NULL") = unchanged + co_freevars: object(subclass_of="&PyTuple_Type", c_default="NULL") = unchanged + co_cellvars: object(subclass_of="&PyTuple_Type", c_default="NULL") = unchanged + co_filename: unicode(c_default="self->co_filename") = unchanged + co_name: unicode(c_default="self->co_name") = unchanged + co_qualname: unicode(c_default="self->co_qualname") = unchanged + co_linetable: object(subclass_of="&PyBytes_Type", c_default="self->co_linetable") = unchanged + co_exceptiontable: object(subclass_of="&PyBytes_Type", c_default="self->co_exceptiontable") = unchanged Return a copy of the code object with new values for the specified fields. [clinic start generated code]*/ @@ -1996,14 +1998,13 @@ static PyObject * code_replace_impl(PyCodeObject *self, int co_argcount, int co_posonlyargcount, int co_kwonlyargcount, int co_nlocals, int co_stacksize, int co_flags, - int co_firstlineno, PyBytesObject *co_code, - PyObject *co_consts, PyObject *co_names, - PyObject *co_varnames, PyObject *co_freevars, - PyObject *co_cellvars, PyObject *co_filename, - PyObject *co_name, PyObject *co_qualname, - PyBytesObject *co_linetable, - PyBytesObject *co_exceptiontable) -/*[clinic end generated code: output=b6cd9988391d5711 input=f6f68e03571f8d7c]*/ + int co_firstlineno, PyObject *co_code, PyObject *co_consts, + PyObject *co_names, PyObject *co_varnames, + PyObject *co_freevars, PyObject *co_cellvars, + PyObject *co_filename, PyObject *co_name, + PyObject *co_qualname, PyObject *co_linetable, + PyObject *co_exceptiontable) +/*[clinic end generated code: output=e75c48a15def18b9 input=18e280e07846c122]*/ { #define CHECK_INT_ARG(ARG) \ if (ARG < 0) { \ @@ -2028,7 +2029,7 @@ code_replace_impl(PyCodeObject *self, int co_argcount, if (code == NULL) { return NULL; } - co_code = (PyBytesObject *)code; + co_code = code; } if (PySys_Audit("code.__new__", "OOOiiiiii", @@ -2067,10 +2068,10 @@ code_replace_impl(PyCodeObject *self, int co_argcount, co = PyCode_NewWithPosOnlyArgs( co_argcount, co_posonlyargcount, co_kwonlyargcount, co_nlocals, - co_stacksize, co_flags, (PyObject*)co_code, co_consts, co_names, + co_stacksize, co_flags, co_code, co_consts, co_names, co_varnames, co_freevars, co_cellvars, co_filename, co_name, co_qualname, co_firstlineno, - (PyObject*)co_linetable, (PyObject*)co_exceptiontable); + co_linetable, co_exceptiontable); error: Py_XDECREF(code); diff --git a/Objects/complexobject.c b/Objects/complexobject.c index 12968a63cd6fdd..0e96f54584677c 100644 --- a/Objects/complexobject.c +++ b/Objects/complexobject.c @@ -11,7 +11,7 @@ #include "pycore_long.h" // _PyLong_GetZero() #include "pycore_object.h" // _PyObject_Init() #include "pycore_pymath.h" // _Py_ADJUST_ERANGE2() -#include "structmember.h" // PyMemberDef + /*[clinic input] @@ -720,9 +720,9 @@ static PyMethodDef complex_methods[] = { }; static PyMemberDef complex_members[] = { - {"real", T_DOUBLE, offsetof(PyComplexObject, cval.real), READONLY, + {"real", Py_T_DOUBLE, offsetof(PyComplexObject, cval.real), Py_READONLY, "the real part of a complex number"}, - {"imag", T_DOUBLE, offsetof(PyComplexObject, cval.imag), READONLY, + {"imag", Py_T_DOUBLE, offsetof(PyComplexObject, cval.imag), Py_READONLY, "the imaginary part of a complex number"}, {0}, }; diff --git a/Objects/descrobject.c b/Objects/descrobject.c index 810bd196e8f7e7..a744c3d1e58658 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -4,11 +4,11 @@ #include "pycore_abstract.h" // _PyObject_RealIsSubclass() #include "pycore_call.h" // _PyStack_AsDict() #include "pycore_ceval.h" // _Py_EnterRecursiveCallTstate() +#include "pycore_descrobject.h" // _PyMethodWrapper_Type #include "pycore_object.h" // _PyObject_GC_UNTRACK() #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_tuple.h" // _PyTuple_ITEMS() -#include "structmember.h" // PyMemberDef -#include "pycore_descrobject.h" + /*[clinic input] class mappingproxy "mappingproxyobject *" "&PyDictProxy_Type" @@ -182,7 +182,7 @@ member_get(PyMemberDescrObject *descr, PyObject *obj, PyObject *type) return NULL; } - if (descr->d_member->flags & PY_AUDIT_READ) { + if (descr->d_member->flags & Py_AUDIT_READ) { if (PySys_Audit("object.__getattr__", "Os", obj ? obj : Py_None, descr->d_member->name) < 0) { return NULL; @@ -588,7 +588,9 @@ method_get_doc(PyMethodDescrObject *descr, void *closure) static PyObject * method_get_text_signature(PyMethodDescrObject *descr, void *closure) { - return _PyType_GetTextSignatureFromInternalDoc(descr->d_method->ml_name, descr->d_method->ml_doc); + return _PyType_GetTextSignatureFromInternalDoc(descr->d_method->ml_name, + descr->d_method->ml_doc, + descr->d_method->ml_flags); } static PyObject * @@ -640,8 +642,8 @@ static PyMethodDef descr_methods[] = { }; static PyMemberDef descr_members[] = { - {"__objclass__", T_OBJECT, offsetof(PyDescrObject, d_type), READONLY}, - {"__name__", T_OBJECT, offsetof(PyDescrObject, d_name), READONLY}, + {"__objclass__", _Py_T_OBJECT, offsetof(PyDescrObject, d_type), Py_READONLY}, + {"__name__", _Py_T_OBJECT, offsetof(PyDescrObject, d_name), Py_READONLY}, {0} }; @@ -691,7 +693,8 @@ wrapperdescr_get_doc(PyWrapperDescrObject *descr, void *closure) static PyObject * wrapperdescr_get_text_signature(PyWrapperDescrObject *descr, void *closure) { - return _PyType_GetTextSignatureFromInternalDoc(descr->d_base->name, descr->d_base->doc); + return _PyType_GetTextSignatureFromInternalDoc(descr->d_base->name, + descr->d_base->doc, 0); } static PyGetSetDef wrapperdescr_getset[] = { @@ -1355,7 +1358,7 @@ static PyMethodDef wrapper_methods[] = { }; static PyMemberDef wrapper_members[] = { - {"__self__", T_OBJECT, offsetof(wrapperobject, self), READONLY}, + {"__self__", _Py_T_OBJECT, offsetof(wrapperobject, self), Py_READONLY}, {0} }; @@ -1384,7 +1387,8 @@ wrapper_doc(wrapperobject *wp, void *Py_UNUSED(ignored)) static PyObject * wrapper_text_signature(wrapperobject *wp, void *Py_UNUSED(ignored)) { - return _PyType_GetTextSignatureFromInternalDoc(wp->descr->d_base->name, wp->descr->d_base->doc); + return _PyType_GetTextSignatureFromInternalDoc(wp->descr->d_base->name, + wp->descr->d_base->doc, 0); } static PyObject * @@ -1515,10 +1519,10 @@ static PyObject * property_copy(PyObject *, PyObject *, PyObject *, PyObject *); static PyMemberDef property_members[] = { - {"fget", T_OBJECT, offsetof(propertyobject, prop_get), READONLY}, - {"fset", T_OBJECT, offsetof(propertyobject, prop_set), READONLY}, - {"fdel", T_OBJECT, offsetof(propertyobject, prop_del), READONLY}, - {"__doc__", T_OBJECT, offsetof(propertyobject, prop_doc), 0}, + {"fget", _Py_T_OBJECT, offsetof(propertyobject, prop_get), Py_READONLY}, + {"fset", _Py_T_OBJECT, offsetof(propertyobject, prop_set), Py_READONLY}, + {"fdel", _Py_T_OBJECT, offsetof(propertyobject, prop_del), Py_READONLY}, + {"__doc__", _Py_T_OBJECT, offsetof(propertyobject, prop_doc), 0}, {0} }; diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 013c21884032aa..f9701f6b4b09ad 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -118,9 +118,10 @@ As a consequence of this, split keys have a maximum size of 16. #include "pycore_code.h" // stats #include "pycore_dict.h" // PyDictKeysObject #include "pycore_gc.h" // _PyObject_GC_IS_TRACKED() -#include "pycore_object.h" // _PyObject_GC_TRACK() +#include "pycore_object.h" // _PyObject_GC_TRACK(), _PyDebugAllocatorStats() #include "pycore_pyerrors.h" // _PyErr_GetRaisedException() #include "pycore_pystate.h" // _PyThreadState_GET() +#include "pycore_setobject.h" // _PySet_NextEntry() #include "stringlib/eq.h" // unicode_eq() #include @@ -1696,9 +1697,8 @@ PyDict_GetItem(PyObject *op, PyObject *key) /* Ignore any exception raised by the lookup */ _PyErr_SetRaisedException(tstate, exc); - assert(ix >= 0 || value == NULL); - return value; + return value; // borrowed reference } Py_ssize_t @@ -1737,9 +1737,46 @@ _PyDict_GetItem_KnownHash(PyObject *op, PyObject *key, Py_hash_t hash) ix = _Py_dict_lookup(mp, key, hash, &value); assert(ix >= 0 || value == NULL); - return value; + return value; // borrowed reference +} + + +int +PyDict_GetItemRef(PyObject *op, PyObject *key, PyObject **result) +{ + if (!PyDict_Check(op)) { + PyErr_BadInternalCall(); + *result = NULL; + return -1; + } + PyDictObject*mp = (PyDictObject *)op; + + Py_hash_t hash; + if (!PyUnicode_CheckExact(key) || (hash = unicode_get_hash(key)) == -1) + { + hash = PyObject_Hash(key); + if (hash == -1) { + *result = NULL; + return -1; + } + } + + PyObject *value; + Py_ssize_t ix = _Py_dict_lookup(mp, key, hash, &value); + assert(ix >= 0 || value == NULL); + if (ix == DKIX_ERROR) { + *result = NULL; + return -1; + } + if (value == NULL) { + *result = NULL; + return 0; // missing key + } + *result = Py_NewRef(value); + return 1; // key is present } + /* Variant of PyDict_GetItem() that doesn't suppress exceptions. This returns NULL *with* an exception set if an exception occurred. It returns NULL *without* an exception set if the key wasn't present. @@ -1766,7 +1803,7 @@ PyDict_GetItemWithError(PyObject *op, PyObject *key) ix = _Py_dict_lookup(mp, key, hash, &value); assert(ix >= 0 || value == NULL); - return value; + return value; // borrowed reference } PyObject * @@ -1777,7 +1814,7 @@ _PyDict_GetItemWithError(PyObject *dp, PyObject *kv) if (hash == -1) { return NULL; } - return _PyDict_GetItem_KnownHash(dp, kv, hash); + return _PyDict_GetItem_KnownHash(dp, kv, hash); // borrowed reference } PyObject * @@ -1789,7 +1826,7 @@ _PyDict_GetItemIdWithError(PyObject *dp, _Py_Identifier *key) return NULL; Py_hash_t hash = unicode_get_hash(kv); assert (hash != -1); /* interned strings have their hash value initialised */ - return _PyDict_GetItem_KnownHash(dp, kv, hash); + return _PyDict_GetItem_KnownHash(dp, kv, hash); // borrowed reference } PyObject * @@ -1802,7 +1839,7 @@ _PyDict_GetItemStringWithError(PyObject *v, const char *key) } rv = PyDict_GetItemWithError(v, kv); Py_DECREF(kv); - return rv; + return rv; // borrowed reference } /* Fast version of global value lookup (LOAD_GLOBAL). @@ -3894,7 +3931,20 @@ PyDict_GetItemString(PyObject *v, const char *key) } rv = PyDict_GetItem(v, kv); Py_DECREF(kv); - return rv; + return rv; // borrowed reference +} + +int +PyDict_GetItemStringRef(PyObject *v, const char *key, PyObject **result) +{ + PyObject *key_obj = PyUnicode_FromString(key); + if (key_obj == NULL) { + *result = NULL; + return -1; + } + int res = PyDict_GetItemRef(v, key_obj, result); + Py_DECREF(key_obj); + return res; } int @@ -5146,15 +5196,11 @@ dictitems_contains(_PyDictViewObject *dv, PyObject *obj) return 0; key = PyTuple_GET_ITEM(obj, 0); value = PyTuple_GET_ITEM(obj, 1); - found = PyDict_GetItemWithError((PyObject *)dv->dv_dict, key); - if (found == NULL) { - if (PyErr_Occurred()) - return -1; - return 0; + result = PyDict_GetItemRef((PyObject *)dv->dv_dict, key, &found); + if (result == 1) { + result = PyObject_RichCompareBool(found, value, Py_EQ); + Py_DECREF(found); } - Py_INCREF(found); - result = PyObject_RichCompareBool(found, value, Py_EQ); - Py_DECREF(found); return result; } @@ -5418,6 +5464,37 @@ _PyObject_MakeDictFromInstanceAttributes(PyObject *obj, PyDictValues *values) return make_dict_from_instance_attributes(interp, keys, values); } +// Return true if the dict was dematerialized, false otherwise. +bool +_PyObject_MakeInstanceAttributesFromDict(PyObject *obj, PyDictOrValues *dorv) +{ + assert(_PyObject_DictOrValuesPointer(obj) == dorv); + assert(!_PyDictOrValues_IsValues(*dorv)); + PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(*dorv); + if (dict == NULL) { + return false; + } + // It's likely that this dict still shares its keys (if it was materialized + // on request and not heavily modified): + if (!PyDict_CheckExact(dict)) { + return false; + } + assert(_PyType_HasFeature(Py_TYPE(obj), Py_TPFLAGS_HEAPTYPE)); + if (dict->ma_keys != CACHED_KEYS(Py_TYPE(obj)) || Py_REFCNT(dict) != 1) { + return false; + } + assert(dict->ma_values); + // We have an opportunity to do something *really* cool: dematerialize it! + _PyDictKeys_DecRef(dict->ma_keys); + _PyDictOrValues_SetValues(dorv, dict->ma_values); + OBJECT_STAT_INC(dict_dematerialized); + // Don't try this at home, kids: + dict->ma_keys = NULL; + dict->ma_values = NULL; + Py_DECREF(dict); + return true; +} + int _PyObject_StoreInstanceAttribute(PyObject *obj, PyDictValues *values, PyObject *name, PyObject *value) @@ -5642,6 +5719,7 @@ PyObject_GenericGetDict(PyObject *obj, void *context) dict = _PyDictOrValues_GetDict(*dorv_ptr); if (dict == NULL) { dictkeys_incref(CACHED_KEYS(tp)); + OBJECT_STAT_INC(dict_materialized_on_request); dict = new_dict_with_shared_keys(interp, CACHED_KEYS(tp)); dorv_ptr->dict = dict; } @@ -5684,6 +5762,7 @@ _PyObjectDict_SetItem(PyTypeObject *tp, PyObject **dictptr, assert(dictptr != NULL); dict = *dictptr; if (dict == NULL) { + assert(!_PyType_HasFeature(tp, Py_TPFLAGS_MANAGED_DICT)); dictkeys_incref(cached); dict = new_dict_with_shared_keys(interp, cached); if (dict == NULL) diff --git a/Objects/exception_handling_notes.txt b/Objects/exception_handling_notes.txt index 7de01fdbf5ff48..387ef935ce739e 100644 --- a/Objects/exception_handling_notes.txt +++ b/Objects/exception_handling_notes.txt @@ -47,7 +47,7 @@ a table to determine where to jump to when an exception is raised. 2 2 NOP - 3 4 LOAD_GLOBAL 1 (NULL + g) + 3 4 LOAD_GLOBAL 1 (g + NULL) 16 LOAD_CONST 1 (0) 18 PRECALL 1 22 CALL 1 diff --git a/Objects/exceptions.c b/Objects/exceptions.c index 42c5317d83d0c9..62a44234b34047 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -12,7 +12,7 @@ #include "pycore_exceptions.h" // struct _Py_exc_state #include "pycore_initconfig.h" #include "pycore_object.h" -#include "structmember.h" // PyMemberDef + #include "osdefs.h" // SEP @@ -439,7 +439,7 @@ PyExceptionClass_Name(PyObject *ob) } static struct PyMemberDef BaseException_members[] = { - {"__suppress_context__", T_BOOL, + {"__suppress_context__", Py_T_BOOL, offsetof(PyBaseExceptionObject, suppress_context)}, {NULL} }; @@ -569,7 +569,7 @@ SimpleExtendsException(PyExc_Exception, StopAsyncIteration, */ static PyMemberDef StopIteration_members[] = { - {"value", T_OBJECT, offsetof(PyStopIterationObject, value), 0, + {"value", _Py_T_OBJECT, offsetof(PyStopIterationObject, value), 0, PyDoc_STR("generator return value")}, {NULL} /* Sentinel */ }; @@ -671,7 +671,7 @@ SystemExit_traverse(PySystemExitObject *self, visitproc visit, void *arg) } static PyMemberDef SystemExit_members[] = { - {"code", T_OBJECT, offsetof(PySystemExitObject, code), 0, + {"code", _Py_T_OBJECT, offsetof(PySystemExitObject, code), 0, PyDoc_STR("exception code")}, {NULL} /* Sentinel */ }; @@ -1477,9 +1477,9 @@ PyUnstable_Exc_PrepReraiseStar(PyObject *orig, PyObject *excs) } static PyMemberDef BaseExceptionGroup_members[] = { - {"message", T_OBJECT, offsetof(PyBaseExceptionGroupObject, msg), READONLY, + {"message", _Py_T_OBJECT, offsetof(PyBaseExceptionGroupObject, msg), Py_READONLY, PyDoc_STR("exception message")}, - {"exceptions", T_OBJECT, offsetof(PyBaseExceptionGroupObject, excs), READONLY, + {"exceptions", _Py_T_OBJECT, offsetof(PyBaseExceptionGroupObject, excs), Py_READONLY, PyDoc_STR("nested exceptions")}, {NULL} /* Sentinel */ }; @@ -1654,13 +1654,13 @@ ImportError_reduce(PyImportErrorObject *self, PyObject *Py_UNUSED(ignored)) } static PyMemberDef ImportError_members[] = { - {"msg", T_OBJECT, offsetof(PyImportErrorObject, msg), 0, + {"msg", _Py_T_OBJECT, offsetof(PyImportErrorObject, msg), 0, PyDoc_STR("exception message")}, - {"name", T_OBJECT, offsetof(PyImportErrorObject, name), 0, + {"name", _Py_T_OBJECT, offsetof(PyImportErrorObject, name), 0, PyDoc_STR("module name")}, - {"path", T_OBJECT, offsetof(PyImportErrorObject, path), 0, + {"path", _Py_T_OBJECT, offsetof(PyImportErrorObject, path), 0, PyDoc_STR("module path")}, - {"name_from", T_OBJECT, offsetof(PyImportErrorObject, name_from), 0, + {"name_from", _Py_T_OBJECT, offsetof(PyImportErrorObject, name_from), 0, PyDoc_STR("name imported from module")}, {NULL} /* Sentinel */ }; @@ -2103,16 +2103,16 @@ OSError_written_set(PyOSErrorObject *self, PyObject *arg, void *context) } static PyMemberDef OSError_members[] = { - {"errno", T_OBJECT, offsetof(PyOSErrorObject, myerrno), 0, + {"errno", _Py_T_OBJECT, offsetof(PyOSErrorObject, myerrno), 0, PyDoc_STR("POSIX exception code")}, - {"strerror", T_OBJECT, offsetof(PyOSErrorObject, strerror), 0, + {"strerror", _Py_T_OBJECT, offsetof(PyOSErrorObject, strerror), 0, PyDoc_STR("exception strerror")}, - {"filename", T_OBJECT, offsetof(PyOSErrorObject, filename), 0, + {"filename", _Py_T_OBJECT, offsetof(PyOSErrorObject, filename), 0, PyDoc_STR("exception filename")}, - {"filename2", T_OBJECT, offsetof(PyOSErrorObject, filename2), 0, + {"filename2", _Py_T_OBJECT, offsetof(PyOSErrorObject, filename2), 0, PyDoc_STR("second exception filename")}, #ifdef MS_WINDOWS - {"winerror", T_OBJECT, offsetof(PyOSErrorObject, winerror), 0, + {"winerror", _Py_T_OBJECT, offsetof(PyOSErrorObject, winerror), 0, PyDoc_STR("Win32 exception code")}, #endif {NULL} /* Sentinel */ @@ -2249,7 +2249,7 @@ NameError_traverse(PyNameErrorObject *self, visitproc visit, void *arg) } static PyMemberDef NameError_members[] = { - {"name", T_OBJECT, offsetof(PyNameErrorObject, name), 0, PyDoc_STR("name")}, + {"name", _Py_T_OBJECT, offsetof(PyNameErrorObject, name), 0, PyDoc_STR("name")}, {NULL} /* Sentinel */ }; @@ -2368,8 +2368,8 @@ AttributeError_reduce(PyAttributeErrorObject *self, PyObject *Py_UNUSED(ignored) } static PyMemberDef AttributeError_members[] = { - {"name", T_OBJECT, offsetof(PyAttributeErrorObject, name), 0, PyDoc_STR("attribute name")}, - {"obj", T_OBJECT, offsetof(PyAttributeErrorObject, obj), 0, PyDoc_STR("object")}, + {"name", _Py_T_OBJECT, offsetof(PyAttributeErrorObject, name), 0, PyDoc_STR("attribute name")}, + {"obj", _Py_T_OBJECT, offsetof(PyAttributeErrorObject, obj), 0, PyDoc_STR("object")}, {NULL} /* Sentinel */ }; @@ -2541,21 +2541,21 @@ SyntaxError_str(PySyntaxErrorObject *self) } static PyMemberDef SyntaxError_members[] = { - {"msg", T_OBJECT, offsetof(PySyntaxErrorObject, msg), 0, + {"msg", _Py_T_OBJECT, offsetof(PySyntaxErrorObject, msg), 0, PyDoc_STR("exception msg")}, - {"filename", T_OBJECT, offsetof(PySyntaxErrorObject, filename), 0, + {"filename", _Py_T_OBJECT, offsetof(PySyntaxErrorObject, filename), 0, PyDoc_STR("exception filename")}, - {"lineno", T_OBJECT, offsetof(PySyntaxErrorObject, lineno), 0, + {"lineno", _Py_T_OBJECT, offsetof(PySyntaxErrorObject, lineno), 0, PyDoc_STR("exception lineno")}, - {"offset", T_OBJECT, offsetof(PySyntaxErrorObject, offset), 0, + {"offset", _Py_T_OBJECT, offsetof(PySyntaxErrorObject, offset), 0, PyDoc_STR("exception offset")}, - {"text", T_OBJECT, offsetof(PySyntaxErrorObject, text), 0, + {"text", _Py_T_OBJECT, offsetof(PySyntaxErrorObject, text), 0, PyDoc_STR("exception text")}, - {"end_lineno", T_OBJECT, offsetof(PySyntaxErrorObject, end_lineno), 0, + {"end_lineno", _Py_T_OBJECT, offsetof(PySyntaxErrorObject, end_lineno), 0, PyDoc_STR("exception end lineno")}, - {"end_offset", T_OBJECT, offsetof(PySyntaxErrorObject, end_offset), 0, + {"end_offset", _Py_T_OBJECT, offsetof(PySyntaxErrorObject, end_offset), 0, PyDoc_STR("exception end offset")}, - {"print_file_and_line", T_OBJECT, + {"print_file_and_line", _Py_T_OBJECT, offsetof(PySyntaxErrorObject, print_file_and_line), 0, PyDoc_STR("exception print_file_and_line")}, {NULL} /* Sentinel */ @@ -2910,15 +2910,15 @@ UnicodeError_traverse(PyUnicodeErrorObject *self, visitproc visit, void *arg) } static PyMemberDef UnicodeError_members[] = { - {"encoding", T_OBJECT, offsetof(PyUnicodeErrorObject, encoding), 0, + {"encoding", _Py_T_OBJECT, offsetof(PyUnicodeErrorObject, encoding), 0, PyDoc_STR("exception encoding")}, - {"object", T_OBJECT, offsetof(PyUnicodeErrorObject, object), 0, + {"object", _Py_T_OBJECT, offsetof(PyUnicodeErrorObject, object), 0, PyDoc_STR("exception object")}, - {"start", T_PYSSIZET, offsetof(PyUnicodeErrorObject, start), 0, + {"start", Py_T_PYSSIZET, offsetof(PyUnicodeErrorObject, start), 0, PyDoc_STR("exception start")}, - {"end", T_PYSSIZET, offsetof(PyUnicodeErrorObject, end), 0, + {"end", Py_T_PYSSIZET, offsetof(PyUnicodeErrorObject, end), 0, PyDoc_STR("exception end")}, - {"reason", T_OBJECT, offsetof(PyUnicodeErrorObject, reason), 0, + {"reason", _Py_T_OBJECT, offsetof(PyUnicodeErrorObject, reason), 0, PyDoc_STR("exception reason")}, {NULL} /* Sentinel */ }; diff --git a/Objects/floatobject.c b/Objects/floatobject.c index fa55481f09dec0..6a0c2e033e3e9a 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -10,7 +10,7 @@ #include "pycore_interp.h" // _PyInterpreterState.float_state #include "pycore_long.h" // _PyLong_GetOne() #include "pycore_modsupport.h" // _PyArg_NoKwnames() -#include "pycore_object.h" // _PyObject_Init() +#include "pycore_object.h" // _PyObject_Init(), _PyDebugAllocatorStats() #include "pycore_pymath.h" // _PY_SHORT_FLOAT_REPR #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_structseq.h" // _PyStructSequence_FiniBuiltin() diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 18820551a0547e..17571535048e23 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -11,12 +11,12 @@ #include "frameobject.h" // PyFrameObject #include "pycore_frame.h" #include "opcode.h" // EXTENDED_ARG -#include "structmember.h" // PyMemberDef + #define OFF(x) offsetof(PyFrameObject, x) static PyMemberDef frame_memberlist[] = { - {"f_trace_lines", T_BOOL, OFF(f_trace_lines), 0}, + {"f_trace_lines", Py_T_BOOL, OFF(f_trace_lines), 0}, {NULL} /* Sentinel */ }; @@ -879,9 +879,6 @@ frame_dealloc(PyFrameObject *f) /* It is the responsibility of the owning generator/coroutine * to have cleared the generator pointer */ - assert(f->f_frame->owner != FRAME_OWNED_BY_GENERATOR || - _PyFrame_GetGenerator(f->f_frame)->gi_frame_state == FRAME_CLEARED); - if (_PyObject_GC_IS_TRACKED(f)) { _PyObject_GC_UNTRACK(f); } @@ -889,10 +886,14 @@ frame_dealloc(PyFrameObject *f) Py_TRASHCAN_BEGIN(f, frame_dealloc); PyObject *co = NULL; + /* GH-106092: If f->f_frame was on the stack and we reached the maximum + * nesting depth for deallocations, the trashcan may have delayed this + * deallocation until after f->f_frame is freed. Avoid dereferencing + * f->f_frame unless we know it still points to valid memory. */ + _PyInterpreterFrame *frame = (_PyInterpreterFrame *)f->_f_frame_data; + /* Kill all local variables including specials, if we own them */ - if (f->f_frame->owner == FRAME_OWNED_BY_FRAME_OBJECT) { - assert(f->f_frame == (_PyInterpreterFrame *)f->_f_frame_data); - _PyInterpreterFrame *frame = (_PyInterpreterFrame *)f->_f_frame_data; + if (f->f_frame == frame && frame->owner == FRAME_OWNED_BY_FRAME_OBJECT) { /* Don't clear code object until the end */ co = frame->f_executable; frame->f_executable = NULL; diff --git a/Objects/funcobject.c b/Objects/funcobject.c index 0c69bf4ebcfed5..8c0bface3ac710 100644 --- a/Objects/funcobject.c +++ b/Objects/funcobject.c @@ -6,7 +6,7 @@ #include "pycore_code.h" // _Py_next_func_version #include "pycore_object.h" // _PyObject_GC_UNTRACK() #include "pycore_pyerrors.h" // _PyErr_Occurred() -#include "structmember.h" // PyMemberDef + static PyObject* func_repr(PyFunctionObject *op); @@ -451,11 +451,11 @@ PyFunction_SetAnnotations(PyObject *op, PyObject *annotations) #define OFF(x) offsetof(PyFunctionObject, x) static PyMemberDef func_memberlist[] = { - {"__closure__", T_OBJECT, OFF(func_closure), READONLY}, - {"__doc__", T_OBJECT, OFF(func_doc), 0}, - {"__globals__", T_OBJECT, OFF(func_globals), READONLY}, - {"__module__", T_OBJECT, OFF(func_module), 0}, - {"__builtins__", T_OBJECT, OFF(func_builtins), READONLY}, + {"__closure__", _Py_T_OBJECT, OFF(func_closure), Py_READONLY}, + {"__doc__", _Py_T_OBJECT, OFF(func_doc), 0}, + {"__globals__", _Py_T_OBJECT, OFF(func_globals), Py_READONLY}, + {"__module__", _Py_T_OBJECT, OFF(func_module), 0}, + {"__builtins__", _Py_T_OBJECT, OFF(func_builtins), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -831,8 +831,8 @@ func_clear(PyFunctionObject *op) // However, name and qualname could be str subclasses, so they // could have reference cycles. The solution is to replace them // with a genuinely immutable string. - Py_SETREF(op->func_name, Py_NewRef(&_Py_STR(empty))); - Py_SETREF(op->func_qualname, Py_NewRef(&_Py_STR(empty))); + Py_SETREF(op->func_name, &_Py_STR(empty)); + Py_SETREF(op->func_qualname, &_Py_STR(empty)); return 0; } @@ -1063,8 +1063,8 @@ cm_init(PyObject *self, PyObject *args, PyObject *kwds) } static PyMemberDef cm_memberlist[] = { - {"__func__", T_OBJECT, offsetof(classmethod, cm_callable), READONLY}, - {"__wrapped__", T_OBJECT, offsetof(classmethod, cm_callable), READONLY}, + {"__func__", _Py_T_OBJECT, offsetof(classmethod, cm_callable), Py_READONLY}, + {"__wrapped__", _Py_T_OBJECT, offsetof(classmethod, cm_callable), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -1258,8 +1258,8 @@ sm_call(PyObject *callable, PyObject *args, PyObject *kwargs) } static PyMemberDef sm_memberlist[] = { - {"__func__", T_OBJECT, offsetof(staticmethod, sm_callable), READONLY}, - {"__wrapped__", T_OBJECT, offsetof(staticmethod, sm_callable), READONLY}, + {"__func__", _Py_T_OBJECT, offsetof(staticmethod, sm_callable), Py_READONLY}, + {"__wrapped__", _Py_T_OBJECT, offsetof(staticmethod, sm_callable), Py_READONLY}, {NULL} /* Sentinel */ }; diff --git a/Objects/genericaliasobject.c b/Objects/genericaliasobject.c index 0c478f3717e036..faf517b66b9350 100644 --- a/Objects/genericaliasobject.c +++ b/Objects/genericaliasobject.c @@ -3,7 +3,7 @@ #include "Python.h" #include "pycore_object.h" #include "pycore_unionobject.h" // _Py_union_type_or, _PyGenericAlias_Check -#include "structmember.h" // PyMemberDef + #include @@ -626,6 +626,7 @@ ga_vectorcall(PyObject *self, PyObject *const *args, static const char* const attr_exceptions[] = { "__class__", + "__bases__", "__origin__", "__args__", "__unpacked__", @@ -782,9 +783,9 @@ static PyMethodDef ga_methods[] = { }; static PyMemberDef ga_members[] = { - {"__origin__", T_OBJECT, offsetof(gaobject, origin), READONLY}, - {"__args__", T_OBJECT, offsetof(gaobject, args), READONLY}, - {"__unpacked__", T_BOOL, offsetof(gaobject, starred), READONLY}, + {"__origin__", _Py_T_OBJECT, offsetof(gaobject, origin), Py_READONLY}, + {"__args__", _Py_T_OBJECT, offsetof(gaobject, args), Py_READONLY}, + {"__unpacked__", Py_T_BOOL, offsetof(gaobject, starred), Py_READONLY}, {0} }; diff --git a/Objects/genobject.c b/Objects/genobject.c index 103e8b8bb882f6..65782be182cd71 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -10,7 +10,7 @@ #include "pycore_object.h" // _PyObject_GC_UNTRACK() #include "pycore_pyerrors.h" // _PyErr_ClearExcState() #include "pycore_pystate.h" // _PyThreadState_GET() -#include "structmember.h" // PyMemberDef + #include "opcode.h" // SEND #include "frameobject.h" // _PyInterpreterFrame_GetLine #include "pystats.h" @@ -149,14 +149,16 @@ gen_dealloc(PyGenObject *gen) gen->gi_frame_state = FRAME_CLEARED; frame->previous = NULL; _PyFrame_ClearExceptCode(frame); + _PyErr_ClearExcState(&gen->gi_exc_state); } + assert(gen->gi_exc_state.exc_value == NULL); if (_PyGen_GetCode(gen)->co_flags & CO_COROUTINE) { Py_CLEAR(((PyCoroObject *)gen)->cr_origin_or_finalizer); } Py_DECREF(_PyGen_GetCode(gen)); Py_CLEAR(gen->gi_name); Py_CLEAR(gen->gi_qualname); - _PyErr_ClearExcState(&gen->gi_exc_state); + PyObject_GC_Del(gen); } @@ -252,10 +254,7 @@ gen_send_ex2(PyGenObject *gen, PyObject *arg, PyObject **presult, !PyErr_ExceptionMatches(PyExc_StopAsyncIteration)); } - /* generator can't be rerun, so release the frame */ - /* first clean reference cycle through stored exception traceback */ - _PyErr_ClearExcState(&gen->gi_exc_state); - + assert(gen->gi_exc_state.exc_value == NULL); assert(gen->gi_frame_state == FRAME_CLEARED); *presult = result; return result ? PYGEN_RETURN : PYGEN_ERROR; @@ -1144,7 +1143,7 @@ static PyGetSetDef coro_getsetlist[] = { }; static PyMemberDef coro_memberlist[] = { - {"cr_origin", T_OBJECT, offsetof(PyCoroObject, cr_origin_or_finalizer), READONLY}, + {"cr_origin", _Py_T_OBJECT, offsetof(PyCoroObject, cr_origin_or_finalizer), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -1558,8 +1557,8 @@ static PyGetSetDef async_gen_getsetlist[] = { }; static PyMemberDef async_gen_memberlist[] = { - {"ag_running", T_BOOL, offsetof(PyAsyncGenObject, ag_running_async), - READONLY}, + {"ag_running", Py_T_BOOL, offsetof(PyAsyncGenObject, ag_running_async), + Py_READONLY}, {NULL} /* Sentinel */ }; diff --git a/Objects/interpreteridobject.c b/Objects/interpreteridobject.c index 46239100dcb7b7..16e27b64c0c9c2 100644 --- a/Objects/interpreteridobject.c +++ b/Objects/interpreteridobject.c @@ -46,7 +46,7 @@ static int interp_id_converter(PyObject *arg, void *ptr) { int64_t id; - if (PyObject_TypeCheck(arg, &_PyInterpreterID_Type)) { + if (PyObject_TypeCheck(arg, &PyInterpreterID_Type)) { id = ((interpid *)arg)->id; } else if (_PyIndex_Check(arg)) { @@ -183,13 +183,13 @@ interpid_richcompare(PyObject *self, PyObject *other, int op) Py_RETURN_NOTIMPLEMENTED; } - if (!PyObject_TypeCheck(self, &_PyInterpreterID_Type)) { + if (!PyObject_TypeCheck(self, &PyInterpreterID_Type)) { Py_RETURN_NOTIMPLEMENTED; } interpid *id = (interpid *)self; int equal; - if (PyObject_TypeCheck(other, &_PyInterpreterID_Type)) { + if (PyObject_TypeCheck(other, &PyInterpreterID_Type)) { interpid *otherid = (interpid *)other; equal = (id->id == otherid->id); } @@ -224,7 +224,7 @@ interpid_richcompare(PyObject *self, PyObject *other, int op) PyDoc_STRVAR(interpid_doc, "A interpreter ID identifies a interpreter and may be used as an int."); -PyTypeObject _PyInterpreterID_Type = { +PyTypeObject PyInterpreterID_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) "InterpreterID", /* tp_name */ sizeof(interpid), /* tp_basicsize */ @@ -265,13 +265,13 @@ PyTypeObject _PyInterpreterID_Type = { interpid_new, /* tp_new */ }; -PyObject *_PyInterpreterID_New(int64_t id) +PyObject *PyInterpreterID_New(int64_t id) { - return (PyObject *)newinterpid(&_PyInterpreterID_Type, id, 0); + return (PyObject *)newinterpid(&PyInterpreterID_Type, id, 0); } PyObject * -_PyInterpreterState_GetIDObject(PyInterpreterState *interp) +PyInterpreterState_GetIDObject(PyInterpreterState *interp) { if (_PyInterpreterState_IDInitref(interp) != 0) { return NULL; @@ -280,11 +280,11 @@ _PyInterpreterState_GetIDObject(PyInterpreterState *interp) if (id < 0) { return NULL; } - return (PyObject *)newinterpid(&_PyInterpreterID_Type, id, 0); + return (PyObject *)newinterpid(&PyInterpreterID_Type, id, 0); } PyInterpreterState * -_PyInterpreterID_LookUp(PyObject *requested_id) +PyInterpreterID_LookUp(PyObject *requested_id) { int64_t id; if (!interp_id_converter(requested_id, &id)) { diff --git a/Objects/listobject.c b/Objects/listobject.c index 144ede6351e03c..c0da9dd916851a 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -6,7 +6,7 @@ #include "pycore_list.h" // struct _Py_list_state, _PyListIterObject #include "pycore_long.h" // _PyLong_DigitCount #include "pycore_modsupport.h" // _PyArg_NoKwnames() -#include "pycore_object.h" // _PyObject_GC_TRACK() +#include "pycore_object.h" // _PyObject_GC_TRACK(), _PyDebugAllocatorStats() #include "pycore_tuple.h" // _PyTuple_FromArray() #include diff --git a/Objects/longobject.c b/Objects/longobject.c index 5fca55e5c3a2be..354cba9d6d800f 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -163,6 +163,9 @@ _PyLong_New(Py_ssize_t size) } _PyLong_SetSignAndDigitCount(result, size != 0, size); _PyObject_Init((PyObject*)result, &PyLong_Type); + /* The digit has to be initialized explicitly to avoid + * use-of-uninitialized-value. */ + result->long_value.ob_digit[0] = 0; return result; } @@ -171,7 +174,7 @@ _PyLong_FromDigits(int negative, Py_ssize_t digit_count, digit *digits) { assert(digit_count >= 0); if (digit_count == 0) { - return (PyLongObject *)Py_NewRef(_PyLong_GetZero()); + return (PyLongObject *)_PyLong_GetZero(); } PyLongObject *result = _PyLong_New(digit_count); if (result == NULL) { @@ -2854,8 +2857,7 @@ long_divrem(PyLongObject *a, PyLongObject *b, if (*prem == NULL) { return -1; } - PyObject *zero = _PyLong_GetZero(); - *pdiv = (PyLongObject*)Py_NewRef(zero); + *pdiv = (PyLongObject*)_PyLong_GetZero(); return 0; } if (size_b == 1) { diff --git a/Objects/methodobject.c b/Objects/methodobject.c index fe081992d51fda..521c9059770acb 100644 --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -7,7 +7,7 @@ #include "pycore_object.h" #include "pycore_pyerrors.h" #include "pycore_pystate.h" // _PyThreadState_GET() -#include "structmember.h" // PyMemberDef + /* undefine macro trampoline to PyCFunction_NewEx */ #undef PyCFunction_New @@ -192,7 +192,9 @@ static PyMethodDef meth_methods[] = { static PyObject * meth_get__text_signature__(PyCFunctionObject *m, void *closure) { - return _PyType_GetTextSignatureFromInternalDoc(m->m_ml->ml_name, m->m_ml->ml_doc); + return _PyType_GetTextSignatureFromInternalDoc(m->m_ml->ml_name, + m->m_ml->ml_doc, + m->m_ml->ml_flags); } static PyObject * @@ -273,7 +275,7 @@ static PyGetSetDef meth_getsets [] = { #define OFF(x) offsetof(PyCFunctionObject, x) static PyMemberDef meth_members[] = { - {"__module__", T_OBJECT, OFF(m_module), 0}, + {"__module__", _Py_T_OBJECT, OFF(m_module), 0}, {NULL} }; diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c index ba20534c3bdd8d..7e890d021cb946 100644 --- a/Objects/moduleobject.c +++ b/Objects/moduleobject.c @@ -9,11 +9,11 @@ #include "pycore_object.h" // _PyType_AllocNoTrack #include "pycore_pyerrors.h" // _PyErr_FormatFromCause() #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "structmember.h" // PyMemberDef + static PyMemberDef module_members[] = { - {"__dict__", T_OBJECT, offsetof(PyModuleObject, md_dict), READONLY}, + {"__dict__", _Py_T_OBJECT, offsetof(PyModuleObject, md_dict), Py_READONLY}, {0} }; diff --git a/Objects/namespaceobject.c b/Objects/namespaceobject.c index 2cc4ddd3c91daa..11cf859add3ab8 100644 --- a/Objects/namespaceobject.c +++ b/Objects/namespaceobject.c @@ -2,7 +2,9 @@ #include "Python.h" #include "pycore_namespace.h" // _PyNamespace_Type -#include "structmember.h" // PyMemberDef + +#include // offsetof() + typedef struct { @@ -12,7 +14,7 @@ typedef struct { static PyMemberDef namespace_members[] = { - {"__dict__", T_OBJECT, offsetof(_PyNamespaceObject, ns_dict), READONLY}, + {"__dict__", _Py_T_OBJECT, offsetof(_PyNamespaceObject, ns_dict), Py_READONLY}, {NULL} }; diff --git a/Objects/object.c b/Objects/object.c index d30e048335ab63..868623a9f7bffc 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -5,11 +5,12 @@ #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_ceval.h" // _Py_EnterRecursiveCallTstate() #include "pycore_context.h" // _PyContextTokenMissing_Type +#include "pycore_descrobject.h" // _PyMethodWrapper_Type #include "pycore_dict.h" // _PyObject_MakeDictFromInstanceAttributes() #include "pycore_floatobject.h" // _PyFloat_DebugMallocStats() #include "pycore_initconfig.h" // _PyStatus_EXCEPTION() #include "pycore_namespace.h" // _PyNamespace_Type -#include "pycore_object.h" // _PyType_CheckConsistency(), _Py_FatalRefcountError() +#include "pycore_object.h" // PyAPI_DATA() _Py_SwappedOp definition #include "pycore_pyerrors.h" // _PyErr_Occurred() #include "pycore_pymem.h" // _PyMem_IsPtrFreed() #include "pycore_pystate.h" // _PyThreadState_GET() @@ -158,11 +159,16 @@ _PyDebug_PrintTotalRefs(void) { Do not call them otherwise, they do not initialize the object! */ #ifdef Py_TRACE_REFS -/* Head of circular doubly-linked list of all objects. These are linked - * together via the _ob_prev and _ob_next members of a PyObject, which - * exist only in a Py_TRACE_REFS build. - */ -static PyObject refchain = {&refchain, &refchain}; + +#define REFCHAIN(interp) &interp->object_state.refchain + +static inline void +init_refchain(PyInterpreterState *interp) +{ + PyObject *refchain = REFCHAIN(interp); + refchain->_ob_prev = refchain; + refchain->_ob_next = refchain; +} /* Insert op at the front of the list of all objects. If force is true, * op is added even if _ob_prev and _ob_next are non-NULL already. If @@ -187,10 +193,11 @@ _Py_AddToAllObjects(PyObject *op, int force) } #endif if (force || op->_ob_prev == NULL) { - op->_ob_next = refchain._ob_next; - op->_ob_prev = &refchain; - refchain._ob_next->_ob_prev = op; - refchain._ob_next = op; + PyObject *refchain = REFCHAIN(_PyInterpreterState_GET()); + op->_ob_next = refchain->_ob_next; + op->_ob_prev = refchain; + refchain->_ob_next->_ob_prev = op; + refchain->_ob_next = op; } } #endif /* Py_TRACE_REFS */ @@ -206,14 +213,14 @@ _Py_NegativeRefcount(const char *filename, int lineno, PyObject *op) /* This is used strictly by Py_INCREF(). */ void -_Py_IncRefTotal_DO_NOT_USE_THIS(void) +_Py_INCREF_IncRefTotal(void) { reftotal_increment(_PyInterpreterState_GET()); } /* This is used strictly by Py_DECREF(). */ void -_Py_DecRefTotal_DO_NOT_USE_THIS(void) +_Py_DECREF_DecRefTotal(void) { reftotal_decrement(_PyInterpreterState_GET()); } @@ -1570,15 +1577,32 @@ _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name, goto error_check; } dictptr = &dorv_ptr->dict; + if (*dictptr == NULL) { + if (_PyObject_InitInlineValues(obj, tp) < 0) { + goto done; + } + res = _PyObject_StoreInstanceAttribute( + obj, _PyDictOrValues_GetValues(*dorv_ptr), name, value); + goto error_check; + } } else { dictptr = _PyObject_ComputedDictPointer(obj); } if (dictptr == NULL) { if (descr == NULL) { - PyErr_Format(PyExc_AttributeError, - "'%.100s' object has no attribute '%U'", - tp->tp_name, name); + if (tp->tp_setattro == PyObject_GenericSetAttr) { + PyErr_Format(PyExc_AttributeError, + "'%.100s' object has no attribute '%U' and no " + "__dict__ for setting new attributes", + tp->tp_name, name); + } + else { + PyErr_Format(PyExc_AttributeError, + "'%.100s' object has no attribute '%U'", + tp->tp_name, name); + } + set_attribute_error_context(obj, name); } else { PyErr_Format(PyExc_AttributeError, @@ -1611,6 +1635,7 @@ _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name, "'%.100s' object has no attribute '%U'", tp->tp_name, name); } + set_attribute_error_context(obj, name); } done: Py_XDECREF(descr); @@ -2010,6 +2035,18 @@ PyObject _Py_NotImplementedStruct = { &_PyNotImplemented_Type }; + +void +_PyObject_InitState(PyInterpreterState *interp) +{ +#ifdef Py_TRACE_REFS + if (!_Py_IsMainInterpreter(interp)) { + init_refchain(interp); + } +#endif +} + + extern PyTypeObject _Py_GenericAliasIterType; extern PyTypeObject _PyMemoryIter_Type; extern PyTypeObject _PyLineIterator; @@ -2061,6 +2098,7 @@ static PyTypeObject* static_types[] = { &PyGen_Type, &PyGetSetDescr_Type, &PyInstanceMethod_Type, + &PyInterpreterID_Type, &PyListIter_Type, &PyListRevIter_Type, &PyList_Type, @@ -2111,7 +2149,6 @@ static PyTypeObject* static_types[] = { &_PyHamt_CollisionNode_Type, &_PyHamt_Type, &_PyLegacyEventHandler_Type, - &_PyInterpreterID_Type, &_PyLineIterator, &_PyManagedBuffer_Type, &_PyMemoryIter_Type, @@ -2218,7 +2255,8 @@ _Py_ForgetReference(PyObject *op) _PyObject_ASSERT_FAILED_MSG(op, "negative refcnt"); } - if (op == &refchain || + PyObject *refchain = REFCHAIN(_PyInterpreterState_GET()); + if (op == refchain || op->_ob_prev->_ob_next != op || op->_ob_next->_ob_prev != op) { _PyObject_ASSERT_FAILED_MSG(op, "invalid object chain"); @@ -2226,12 +2264,12 @@ _Py_ForgetReference(PyObject *op) #ifdef SLOW_UNREF_CHECK PyObject *p; - for (p = refchain._ob_next; p != &refchain; p = p->_ob_next) { + for (p = refchain->_ob_next; p != refchain; p = p->_ob_next) { if (p == op) { break; } } - if (p == &refchain) { + if (p == refchain) { /* Not found */ _PyObject_ASSERT_FAILED_MSG(op, "object not found in the objects list"); @@ -2247,11 +2285,15 @@ _Py_ForgetReference(PyObject *op) * interpreter must be in a healthy state. */ void -_Py_PrintReferences(FILE *fp) +_Py_PrintReferences(PyInterpreterState *interp, FILE *fp) { PyObject *op; + if (interp == NULL) { + interp = _PyInterpreterState_Main(); + } fprintf(fp, "Remaining objects:\n"); - for (op = refchain._ob_next; op != &refchain; op = op->_ob_next) { + PyObject *refchain = REFCHAIN(interp); + for (op = refchain->_ob_next; op != refchain; op = op->_ob_next) { fprintf(fp, "%p [%zd] ", (void *)op, Py_REFCNT(op)); if (PyObject_Print(op, fp, 0) != 0) { PyErr_Clear(); @@ -2263,34 +2305,42 @@ _Py_PrintReferences(FILE *fp) /* Print the addresses of all live objects. Unlike _Py_PrintReferences, this * doesn't make any calls to the Python C API, so is always safe to call. */ +// XXX This function is not safe to use if the interpreter has been +// freed or is in an unhealthy state (e.g. late in finalization). +// The call in Py_FinalizeEx() is okay since the main interpreter +// is statically allocated. void -_Py_PrintReferenceAddresses(FILE *fp) +_Py_PrintReferenceAddresses(PyInterpreterState *interp, FILE *fp) { PyObject *op; + PyObject *refchain = REFCHAIN(interp); fprintf(fp, "Remaining object addresses:\n"); - for (op = refchain._ob_next; op != &refchain; op = op->_ob_next) + for (op = refchain->_ob_next; op != refchain; op = op->_ob_next) fprintf(fp, "%p [%zd] %s\n", (void *)op, Py_REFCNT(op), Py_TYPE(op)->tp_name); } +/* The implementation of sys.getobjects(). */ PyObject * _Py_GetObjects(PyObject *self, PyObject *args) { int i, n; PyObject *t = NULL; PyObject *res, *op; + PyInterpreterState *interp = _PyInterpreterState_GET(); if (!PyArg_ParseTuple(args, "i|O", &n, &t)) return NULL; - op = refchain._ob_next; + PyObject *refchain = REFCHAIN(interp); + op = refchain->_ob_next; res = PyList_New(0); if (res == NULL) return NULL; - for (i = 0; (n == 0 || i < n) && op != &refchain; i++) { + for (i = 0; (n == 0 || i < n) && op != refchain; i++) { while (op == self || op == args || op == res || op == t || (t != NULL && !Py_IS_TYPE(op, (PyTypeObject *) t))) { op = op->_ob_next; - if (op == &refchain) + if (op == refchain) return res; } if (PyList_Append(res, op) < 0) { @@ -2302,7 +2352,9 @@ _Py_GetObjects(PyObject *self, PyObject *args) return res; } -#endif +#undef REFCHAIN + +#endif /* Py_TRACE_REFS */ /* Hack to force loading of abstract.o */ diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index eb68d7c030d293..7d552ff57c8f1e 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -2,6 +2,7 @@ #include "Python.h" #include "pycore_code.h" // stats +#include "pycore_object.h" // _PyDebugAllocatorStats() definition #include "pycore_obmalloc.h" #include "pycore_pyerrors.h" // _Py_FatalErrorFormat() #include "pycore_pymem.h" diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c index 6dc41d71287cab..6e06bef95032cf 100644 --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -6,7 +6,7 @@ #include "pycore_modsupport.h" // _PyArg_NoKwnames() #include "pycore_range.h" #include "pycore_tuple.h" // _PyTuple_ITEMS() -#include "structmember.h" // PyMemberDef + /* Support objects whose length is > PY_SSIZE_T_MAX. @@ -106,8 +106,8 @@ range_from_array(PyTypeObject *type, PyObject *const *args, Py_ssize_t num_args) if (!stop) { return NULL; } - start = Py_NewRef(_PyLong_GetZero()); - step = Py_NewRef(_PyLong_GetOne()); + start = _PyLong_GetZero(); + step = _PyLong_GetOne(); break; case 0: PyErr_SetString(PyExc_TypeError, @@ -757,9 +757,9 @@ static PyMethodDef range_methods[] = { }; static PyMemberDef range_members[] = { - {"start", T_OBJECT_EX, offsetof(rangeobject, start), READONLY}, - {"stop", T_OBJECT_EX, offsetof(rangeobject, stop), READONLY}, - {"step", T_OBJECT_EX, offsetof(rangeobject, step), READONLY}, + {"start", Py_T_OBJECT_EX, offsetof(rangeobject, start), Py_READONLY}, + {"stop", Py_T_OBJECT_EX, offsetof(rangeobject, stop), Py_READONLY}, + {"step", Py_T_OBJECT_EX, offsetof(rangeobject, step), Py_READONLY}, {0} }; diff --git a/Objects/setobject.c b/Objects/setobject.c index 4ac541b9509752..c96b62e38ec27e 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -32,8 +32,10 @@ */ #include "Python.h" +#include "pycore_dict.h" // _PyDict_Contains_KnownHash() #include "pycore_modsupport.h" // _PyArg_NoKwnames() #include "pycore_object.h" // _PyObject_GC_UNTRACK() +#include "pycore_setobject.h" // _PySet_NextEntry() definition #include // offsetof() /* Object used as dummy key to fill deleted entries */ diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c index 5a33977f064421..8cf654fb6f812d 100644 --- a/Objects/sliceobject.c +++ b/Objects/sliceobject.c @@ -17,7 +17,7 @@ this type and there is exactly one in existence. #include "pycore_abstract.h" // _PyIndex_Check() #include "pycore_long.h" // _PyLong_GetZero() #include "pycore_object.h" // _PyObject_GC_TRACK() -#include "structmember.h" // PyMemberDef + static PyObject * ellipsis_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) @@ -377,9 +377,9 @@ slice_repr(PySliceObject *r) } static PyMemberDef slice_members[] = { - {"start", T_OBJECT, offsetof(PySliceObject, start), READONLY}, - {"stop", T_OBJECT, offsetof(PySliceObject, stop), READONLY}, - {"step", T_OBJECT, offsetof(PySliceObject, step), READONLY}, + {"start", _Py_T_OBJECT, offsetof(PySliceObject, start), Py_READONLY}, + {"stop", _Py_T_OBJECT, offsetof(PySliceObject, stop), Py_READONLY}, + {"step", _Py_T_OBJECT, offsetof(PySliceObject, step), Py_READONLY}, {0} }; @@ -415,7 +415,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, /* Convert step to an integer; raise for zero step. */ if (self->step == Py_None) { - step = Py_NewRef(_PyLong_GetOne()); + step = _PyLong_GetOne(); step_is_negative = 0; } else { @@ -443,7 +443,7 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, goto error; } else { - lower = Py_NewRef(_PyLong_GetZero()); + lower = _PyLong_GetZero(); upper = Py_NewRef(length); } diff --git a/Objects/structseq.c b/Objects/structseq.c index 49011139b66534..700f67c09c9e57 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -10,7 +10,7 @@ #include "Python.h" #include "pycore_tuple.h" // _PyTuple_FromArray() #include "pycore_object.h" // _PyObject_GC_TRACK() -#include "structmember.h" // PyMemberDef + #include "pycore_structseq.h" // PyStructSequence_InitType() #include "pycore_initconfig.h" // _PyStatus_OK() @@ -465,10 +465,10 @@ initialize_members(PyStructSequence_Desc *desc, /* The names and docstrings in these MemberDefs are statically */ /* allocated so it is expected that they'll outlive the MemberDef */ members[k].name = desc->fields[i].name; - members[k].type = T_OBJECT; + members[k].type = _Py_T_OBJECT; members[k].offset = offsetof(PyStructSequence, ob_item) + i * sizeof(PyObject*); - members[k].flags = READONLY; + members[k].flags = Py_READONLY; members[k].doc = desc->fields[i].doc; k++; } diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index e85af2b75e4738..b669a3dd8525eb 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -6,7 +6,7 @@ #include "pycore_gc.h" // _PyObject_GC_IS_TRACKED() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_modsupport.h" // _PyArg_NoKwnames() -#include "pycore_object.h" // _PyObject_GC_TRACK(), _Py_FatalRefcountError() +#include "pycore_object.h" // _PyObject_GC_TRACK(), _Py_FatalRefcountError(), _PyDebugAllocatorStats() /*[clinic input] class tuple "PyTupleObject *" "&PyTuple_Type" @@ -62,7 +62,7 @@ tuple_alloc(Py_ssize_t size) static inline PyObject * tuple_get_empty(void) { - return Py_NewRef(&_Py_SINGLETON(tuple_empty)); + return (PyObject *)&_Py_SINGLETON(tuple_empty); } PyObject * diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 7e5282cabd1bfb..030e8bfc99b6d7 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -18,7 +18,7 @@ #include "pycore_unionobject.h" // _Py_union_type_or #include "pycore_weakref.h" // _PyWeakref_GET_REF() #include "opcode.h" // MAKE_CELL -#include "structmember.h" // PyMemberDef + #include #include // ptrdiff_t @@ -586,8 +586,29 @@ _PyType_GetDocFromInternalDoc(const char *name, const char *internal_doc) return PyUnicode_FromString(doc); } +static const char * +signature_from_flags(int flags) +{ + switch (flags & ~METH_COEXIST) { + case METH_NOARGS: + return "($self, /)"; + case METH_NOARGS|METH_CLASS: + return "($type, /)"; + case METH_NOARGS|METH_STATIC: + return "()"; + case METH_O: + return "($self, object, /)"; + case METH_O|METH_CLASS: + return "($type, object, /)"; + case METH_O|METH_STATIC: + return "(object, /)"; + default: + return NULL; + } +} + PyObject * -_PyType_GetTextSignatureFromInternalDoc(const char *name, const char *internal_doc) +_PyType_GetTextSignatureFromInternalDoc(const char *name, const char *internal_doc, int flags) { const char *start = find_signature(name, internal_doc); const char *end; @@ -597,6 +618,10 @@ _PyType_GetTextSignatureFromInternalDoc(const char *name, const char *internal_d else end = NULL; if (!end) { + start = signature_from_flags(flags); + if (start) { + return PyUnicode_FromString(start); + } Py_RETURN_NONE; } @@ -935,16 +960,16 @@ int PyUnstable_Type_AssignVersionTag(PyTypeObject *type) static PyMemberDef type_members[] = { - {"__basicsize__", T_PYSSIZET, offsetof(PyTypeObject,tp_basicsize),READONLY}, - {"__itemsize__", T_PYSSIZET, offsetof(PyTypeObject, tp_itemsize), READONLY}, - {"__flags__", T_ULONG, offsetof(PyTypeObject, tp_flags), READONLY}, + {"__basicsize__", Py_T_PYSSIZET, offsetof(PyTypeObject,tp_basicsize),Py_READONLY}, + {"__itemsize__", Py_T_PYSSIZET, offsetof(PyTypeObject, tp_itemsize), Py_READONLY}, + {"__flags__", Py_T_ULONG, offsetof(PyTypeObject, tp_flags), Py_READONLY}, /* Note that this value is misleading for static builtin types, since the memory at this offset will always be NULL. */ - {"__weakrefoffset__", T_PYSSIZET, - offsetof(PyTypeObject, tp_weaklistoffset), READONLY}, - {"__base__", T_OBJECT, offsetof(PyTypeObject, tp_base), READONLY}, - {"__dictoffset__", T_PYSSIZET, - offsetof(PyTypeObject, tp_dictoffset), READONLY}, + {"__weakrefoffset__", Py_T_PYSSIZET, + offsetof(PyTypeObject, tp_weaklistoffset), Py_READONLY}, + {"__base__", _Py_T_OBJECT, offsetof(PyTypeObject, tp_base), Py_READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, + offsetof(PyTypeObject, tp_dictoffset), Py_READONLY}, {0} }; @@ -1085,7 +1110,7 @@ type_module(PyTypeObject *type, void *context) PyUnicode_InternInPlace(&mod); } else { - mod = Py_NewRef(&_Py_ID(builtins)); + mod = &_Py_ID(builtins); } } return mod; @@ -1429,7 +1454,7 @@ type_get_doc(PyTypeObject *type, void *context) static PyObject * type_get_text_signature(PyTypeObject *type, void *context) { - return _PyType_GetTextSignatureFromInternalDoc(type->tp_name, type->tp_doc); + return _PyType_GetTextSignatureFromInternalDoc(type->tp_name, type->tp_doc, 0); } static int @@ -1775,7 +1800,7 @@ traverse_slots(PyTypeObject *type, PyObject *self, visitproc visit, void *arg) n = Py_SIZE(type); mp = _PyHeapType_GET_MEMBERS((PyHeapTypeObject *)type); for (i = 0; i < n; i++, mp++) { - if (mp->type == T_OBJECT_EX) { + if (mp->type == Py_T_OBJECT_EX) { char *addr = (char *)self + mp->offset; PyObject *obj = *(PyObject **)addr; if (obj != NULL) { @@ -1850,7 +1875,7 @@ clear_slots(PyTypeObject *type, PyObject *self) n = Py_SIZE(type); mp = _PyHeapType_GET_MEMBERS((PyHeapTypeObject *)type); for (i = 0; i < n; i++, mp++) { - if (mp->type == T_OBJECT_EX && !(mp->flags & READONLY)) { + if (mp->type == Py_T_OBJECT_EX && !(mp->flags & Py_READONLY)) { char *addr = (char *)self + mp->offset; PyObject *obj = *(PyObject **)addr; if (obj != NULL) { @@ -3567,7 +3592,7 @@ type_new_descriptors(const type_new_ctx *ctx, PyTypeObject *type) if (mp->name == NULL) { return -1; } - mp->type = T_OBJECT_EX; + mp->type = Py_T_OBJECT_EX; mp->offset = slotoffset; /* __dict__ and __weakref__ are already filtered out */ @@ -4116,20 +4141,20 @@ _PyType_FromMetaclass_impl( nmembers++; if (strcmp(memb->name, "__weaklistoffset__") == 0) { // The PyMemberDef must be a Py_ssize_t and readonly - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); + assert(memb->type == Py_T_PYSSIZET); + assert(memb->flags == Py_READONLY); weaklistoffset = memb->offset; } if (strcmp(memb->name, "__dictoffset__") == 0) { // The PyMemberDef must be a Py_ssize_t and readonly - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); + assert(memb->type == Py_T_PYSSIZET); + assert(memb->flags == Py_READONLY); dictoffset = memb->offset; } if (strcmp(memb->name, "__vectorcalloffset__") == 0) { // The PyMemberDef must be a Py_ssize_t and readonly - assert(memb->type == T_PYSSIZET); - assert(memb->flags == READONLY); + assert(memb->type == Py_T_PYSSIZET); + assert(memb->flags == Py_READONLY); vectorcalloffset = memb->offset; } if (memb->flags & Py_RELATIVE_OFFSET) { @@ -4264,9 +4289,9 @@ _PyType_FromMetaclass_impl( if (_allow_tp_new) { if (PyErr_WarnFormat( PyExc_DeprecationWarning, 1, - "Using PyType_Spec with metaclasses that have custom " - "tp_new is deprecated and will no longer be allowed in " - "Python 3.14.") < 0) { + "Type %s uses PyType_Spec with a metaclass that has custom " + "tp_new. This is deprecated and will no longer be allowed in " + "Python 3.14.", spec->name) < 0) { goto finally; } } @@ -4965,9 +4990,6 @@ type_setattro(PyTypeObject *type, PyObject *name, PyObject *value) return res; } -extern void -_PyDictKeys_DecRef(PyDictKeysObject *keys); - static void type_dealloc_common(PyTypeObject *type) @@ -10178,11 +10200,11 @@ typedef struct { } superobject; static PyMemberDef super_members[] = { - {"__thisclass__", T_OBJECT, offsetof(superobject, type), READONLY, + {"__thisclass__", _Py_T_OBJECT, offsetof(superobject, type), Py_READONLY, "the class invoking super()"}, - {"__self__", T_OBJECT, offsetof(superobject, obj), READONLY, + {"__self__", _Py_T_OBJECT, offsetof(superobject, obj), Py_READONLY, "the instance invoking super(); may be None"}, - {"__self_class__", T_OBJECT, offsetof(superobject, obj_type), READONLY, + {"__self_class__", _Py_T_OBJECT, offsetof(superobject, obj_type), Py_READONLY, "the type of the instance invoking super(); may be None"}, {0} }; diff --git a/Objects/typevarobject.c b/Objects/typevarobject.c index 5605662f0e6d5e..e09e6a62553cff 100644 --- a/Objects/typevarobject.c +++ b/Objects/typevarobject.c @@ -3,7 +3,7 @@ #include "pycore_object.h" // _PyObject_GC_TRACK/UNTRACK #include "pycore_typevarobject.h" #include "pycore_unionobject.h" // _Py_union_type_or -#include "structmember.h" + /*[clinic input] class typevar "typevarobject *" "&_PyTypeVar_Type" @@ -244,10 +244,10 @@ typevar_repr(PyObject *self) } static PyMemberDef typevar_members[] = { - {"__name__", T_OBJECT, offsetof(typevarobject, name), READONLY}, - {"__covariant__", T_BOOL, offsetof(typevarobject, covariant), READONLY}, - {"__contravariant__", T_BOOL, offsetof(typevarobject, contravariant), READONLY}, - {"__infer_variance__", T_BOOL, offsetof(typevarobject, infer_variance), READONLY}, + {"__name__", _Py_T_OBJECT, offsetof(typevarobject, name), Py_READONLY}, + {"__covariant__", Py_T_BOOL, offsetof(typevarobject, covariant), Py_READONLY}, + {"__contravariant__", Py_T_BOOL, offsetof(typevarobject, contravariant), Py_READONLY}, + {"__infer_variance__", Py_T_BOOL, offsetof(typevarobject, infer_variance), Py_READONLY}, {0} }; @@ -555,7 +555,7 @@ paramspecattr_richcompare(PyObject *a, PyObject *b, int op) } static PyMemberDef paramspecattr_members[] = { - {"__origin__", T_OBJECT, offsetof(paramspecattrobject, __origin__), READONLY}, + {"__origin__", _Py_T_OBJECT, offsetof(paramspecattrobject, __origin__), Py_READONLY}, {0} }; @@ -780,11 +780,11 @@ paramspec_repr(PyObject *self) } static PyMemberDef paramspec_members[] = { - {"__name__", T_OBJECT, offsetof(paramspecobject, name), READONLY}, - {"__bound__", T_OBJECT, offsetof(paramspecobject, bound), READONLY}, - {"__covariant__", T_BOOL, offsetof(paramspecobject, covariant), READONLY}, - {"__contravariant__", T_BOOL, offsetof(paramspecobject, contravariant), READONLY}, - {"__infer_variance__", T_BOOL, offsetof(paramspecobject, infer_variance), READONLY}, + {"__name__", _Py_T_OBJECT, offsetof(paramspecobject, name), Py_READONLY}, + {"__bound__", _Py_T_OBJECT, offsetof(paramspecobject, bound), Py_READONLY}, + {"__covariant__", Py_T_BOOL, offsetof(paramspecobject, covariant), Py_READONLY}, + {"__contravariant__", Py_T_BOOL, offsetof(paramspecobject, contravariant), Py_READONLY}, + {"__infer_variance__", Py_T_BOOL, offsetof(paramspecobject, infer_variance), Py_READONLY}, {0} }; @@ -1054,7 +1054,7 @@ typevartuple_repr(PyObject *self) } static PyMemberDef typevartuple_members[] = { - {"__name__", T_OBJECT, offsetof(typevartupleobject, name), READONLY}, + {"__name__", _Py_T_OBJECT, offsetof(typevartupleobject, name), Py_READONLY}, {0} }; @@ -1292,7 +1292,7 @@ typealias_repr(PyObject *self) } static PyMemberDef typealias_members[] = { - {"__name__", T_OBJECT, offsetof(typealiasobject, name), READONLY}, + {"__name__", _Py_T_OBJECT, offsetof(typealiasobject, name), Py_READONLY}, {0} }; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index f543c0a65b49f6..c6876d4ca0ef0f 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -211,21 +211,13 @@ static int unicode_is_singleton(PyObject *unicode); #endif -// Return a borrowed reference to the empty string singleton. +// Return a reference to the immortal empty string singleton. static inline PyObject* unicode_get_empty(void) { _Py_DECLARE_STR(empty, ""); return &_Py_STR(empty); } - -// Return a strong reference to the empty string singleton. -static inline PyObject* unicode_new_empty(void) -{ - PyObject *empty = unicode_get_empty(); - return Py_NewRef(empty); -} - /* This dictionary holds all interned unicode strings. Note that references to strings in this dictionary are *not* counted in the string's ob_refcnt. When the interned string reaches a refcnt of 0 the string deallocation @@ -236,15 +228,54 @@ static inline PyObject *get_interned_dict(PyInterpreterState *interp) return _Py_INTERP_CACHED_OBJECT(interp, interned_strings); } +#define INTERNED_STRINGS _PyRuntime.cached_objects.interned_strings + Py_ssize_t _PyUnicode_InternedSize(void) { - return PyObject_Length(get_interned_dict(_PyInterpreterState_GET())); + PyObject *dict = get_interned_dict(_PyInterpreterState_GET()); + return _Py_hashtable_len(INTERNED_STRINGS) + PyDict_GET_SIZE(dict); +} + +static Py_hash_t unicode_hash(PyObject *); +static int unicode_compare_eq(PyObject *, PyObject *); + +static Py_uhash_t +hashtable_unicode_hash(const void *key) +{ + return unicode_hash((PyObject *)key); +} + +static int +hashtable_unicode_compare(const void *key1, const void *key2) +{ + PyObject *obj1 = (PyObject *)key1; + PyObject *obj2 = (PyObject *)key2; + if (obj1 != NULL && obj2 != NULL) { + return unicode_compare_eq(obj1, obj2); + } + else { + return obj1 == obj2; + } } static int init_interned_dict(PyInterpreterState *interp) { + if (_Py_IsMainInterpreter(interp)) { + assert(INTERNED_STRINGS == NULL); + _Py_hashtable_allocator_t hashtable_alloc = {PyMem_RawMalloc, PyMem_RawFree}; + INTERNED_STRINGS = _Py_hashtable_new_full( + hashtable_unicode_hash, + hashtable_unicode_compare, + NULL, + NULL, + &hashtable_alloc + ); + if (INTERNED_STRINGS == NULL) { + return -1; + } + } assert(get_interned_dict(interp) == NULL); PyObject *interned = interned = PyDict_New(); if (interned == NULL) { @@ -263,11 +294,15 @@ clear_interned_dict(PyInterpreterState *interp) Py_DECREF(interned); _Py_INTERP_CACHED_OBJECT(interp, interned_strings) = NULL; } + if (_Py_IsMainInterpreter(interp) && INTERNED_STRINGS != NULL) { + _Py_hashtable_destroy(INTERNED_STRINGS); + INTERNED_STRINGS = NULL; + } } #define _Py_RETURN_UNICODE_EMPTY() \ do { \ - return unicode_new_empty(); \ + return unicode_get_empty(); \ } while (0) static inline void @@ -607,7 +642,6 @@ unicode_result(PyObject *unicode) PyObject *empty = unicode_get_empty(); if (unicode != empty) { Py_DECREF(unicode); - Py_INCREF(empty); } return empty; } @@ -619,7 +653,6 @@ unicode_result(PyObject *unicode) Py_UCS1 ch = data[0]; PyObject *latin1_char = LATIN1(ch); if (unicode != latin1_char) { - Py_INCREF(latin1_char); Py_DECREF(unicode); } return latin1_char; @@ -1156,7 +1189,7 @@ PyUnicode_New(Py_ssize_t size, Py_UCS4 maxchar) { /* Optimization for empty strings */ if (size == 0) { - return unicode_new_empty(); + return unicode_get_empty(); } PyObject *obj; @@ -1223,6 +1256,7 @@ PyUnicode_New(Py_ssize_t size, Py_UCS4 maxchar) _PyUnicode_STATE(unicode).kind = kind; _PyUnicode_STATE(unicode).compact = 1; _PyUnicode_STATE(unicode).ascii = is_ascii; + _PyUnicode_STATE(unicode).statically_allocated = 0; if (is_ascii) { ((char*)data)[size] = 0; } @@ -1553,7 +1587,9 @@ unicode_dealloc(PyObject *unicode) * we accidentally decref an immortal string out of existence. Since * the string is an immortal object, just re-set the reference count. */ - if (PyUnicode_CHECK_INTERNED(unicode)) { + if (PyUnicode_CHECK_INTERNED(unicode) + || _PyUnicode_STATE(unicode).statically_allocated) + { _Py_SetImmortal(unicode); return; } @@ -1623,7 +1659,7 @@ unicode_resize(PyObject **p_unicode, Py_ssize_t length) return 0; if (length == 0) { - PyObject *empty = unicode_new_empty(); + PyObject *empty = unicode_get_empty(); Py_SETREF(*p_unicode, empty); return 0; } @@ -1718,7 +1754,9 @@ unicode_write_cstr(PyObject *unicode, Py_ssize_t index, static PyObject* get_latin1_char(Py_UCS1 ch) { - return Py_NewRef(LATIN1(ch)); + PyObject *o = LATIN1(ch); + assert(_Py_IsImmortal(o)); + return o; } static PyObject* @@ -1845,7 +1883,7 @@ PyUnicode_FromStringAndSize(const char *u, Py_ssize_t size) "NULL string with positive size with NULL passed to PyUnicode_FromStringAndSize"); return NULL; } - return unicode_new_empty(); + return unicode_get_empty(); } PyObject * @@ -10215,7 +10253,7 @@ replace(PyObject *self, PyObject *str1, } new_size = slen + n * (len2 - len1); if (new_size == 0) { - u = unicode_new_empty(); + u = unicode_get_empty(); goto done; } if (new_size > (PY_SSIZE_T_MAX / rkind)) { @@ -14459,7 +14497,7 @@ unicode_new_impl(PyTypeObject *type, PyObject *x, const char *encoding, { PyObject *unicode; if (x == NULL) { - unicode = unicode_new_empty(); + unicode = unicode_get_empty(); } else if (encoding == NULL && errors == NULL) { unicode = PyObject_Str(x); @@ -14503,6 +14541,7 @@ unicode_subtype_new(PyTypeObject *type, PyObject *unicode) _PyUnicode_STATE(self).kind = kind; _PyUnicode_STATE(self).compact = 0; _PyUnicode_STATE(self).ascii = _PyUnicode_STATE(unicode).ascii; + _PyUnicode_STATE(self).statically_allocated = 0; _PyUnicode_UTF8_LENGTH(self) = 0; _PyUnicode_UTF8(self) = NULL; _PyUnicode_DATA_ANY(self) = NULL; @@ -14726,6 +14765,23 @@ _PyUnicode_InternInPlace(PyInterpreterState *interp, PyObject **p) return; } + /* Look in the global cache first. */ + PyObject *r = (PyObject *)_Py_hashtable_get(INTERNED_STRINGS, s); + if (r != NULL && r != s) { + Py_SETREF(*p, Py_NewRef(r)); + return; + } + + /* Handle statically allocated strings. */ + if (_PyUnicode_STATE(s).statically_allocated) { + assert(_Py_IsImmortal(s)); + if (_Py_hashtable_set(INTERNED_STRINGS, s, s) == 0) { + _PyUnicode_STATE(*p).interned = SSTATE_INTERNED_IMMORTAL_STATIC; + } + return; + } + + /* Look in the per-interpreter cache. */ PyObject *interned = get_interned_dict(interp); assert(interned != NULL); @@ -14741,9 +14797,11 @@ _PyUnicode_InternInPlace(PyInterpreterState *interp, PyObject **p) } if (_Py_IsImmortal(s)) { + // XXX Restrict this to the main interpreter? _PyUnicode_STATE(*p).interned = SSTATE_INTERNED_IMMORTAL_STATIC; - return; + return; } + #ifdef Py_REF_DEBUG /* The reference count value excluding the 2 references from the interned dictionary should be excluded from the RefTotal. The @@ -14818,6 +14876,7 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp) PyObject *s, *ignored_value; while (PyDict_Next(interned, &pos, &s, &ignored_value)) { assert(PyUnicode_IS_READY(s)); + int shared = 0; switch (PyUnicode_CHECK_INTERNED(s)) { case SSTATE_INTERNED_IMMORTAL: // Skip the Immortal Instance check and restore @@ -14829,6 +14888,14 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp) #endif break; case SSTATE_INTERNED_IMMORTAL_STATIC: + /* It is shared between interpreters, so we should unmark it + only when this is the last interpreter in which it's + interned. We immortalize all the statically initialized + strings during startup, so we can rely on the + main interpreter to be the last one. */ + if (!_Py_IsMainInterpreter(interp)) { + shared = 1; + } break; case SSTATE_INTERNED_MORTAL: /* fall through */ @@ -14837,7 +14904,9 @@ _PyUnicode_ClearInterned(PyInterpreterState *interp) default: Py_UNREACHABLE(); } - _PyUnicode_STATE(s).interned = SSTATE_NOT_INTERNED; + if (!shared) { + _PyUnicode_STATE(s).interned = SSTATE_NOT_INTERNED; + } } #ifdef INTERNED_STATS fprintf(stderr, @@ -14917,8 +14986,7 @@ unicode_ascii_iter_next(unicodeiterobject *it) Py_UCS1 chr = (Py_UCS1)PyUnicode_READ(PyUnicode_1BYTE_KIND, data, it->it_index); it->it_index++; - PyObject *item = (PyObject*)&_Py_SINGLETON(strings).ascii[chr]; - return Py_NewRef(item); + return (PyObject*)&_Py_SINGLETON(strings).ascii[chr]; } it->it_seq = NULL; Py_DECREF(seq); @@ -14948,7 +15016,7 @@ unicodeiter_reduce(unicodeiterobject *it, PyObject *Py_UNUSED(ignored)) if (it->it_seq != NULL) { return Py_BuildValue("N(O)n", iter, it->it_seq, it->it_index); } else { - PyObject *u = unicode_new_empty(); + PyObject *u = unicode_get_empty(); if (u == NULL) { Py_XDECREF(iter); return NULL; @@ -15178,10 +15246,13 @@ init_fs_codec(PyInterpreterState *interp) /* Set Py_FileSystemDefaultEncoding and Py_FileSystemDefaultEncodeErrors global configuration variables. */ - if (_Py_SetFileSystemEncoding(fs_codec->encoding, - fs_codec->errors) < 0) { - PyErr_NoMemory(); - return -1; + if (_Py_IsMainInterpreter(interp)) { + + if (_Py_SetFileSystemEncoding(fs_codec->encoding, + fs_codec->errors) < 0) { + PyErr_NoMemory(); + return -1; + } } return 0; } diff --git a/Objects/unionobject.c b/Objects/unionobject.c index 269f46914f263d..347945a4c45972 100644 --- a/Objects/unionobject.c +++ b/Objects/unionobject.c @@ -3,7 +3,7 @@ #include "pycore_object.h" // _PyObject_GC_TRACK/UNTRACK #include "pycore_typevarobject.h" // _PyTypeAlias_Type #include "pycore_unionobject.h" -#include "structmember.h" + static PyObject *make_union(PyObject *); @@ -273,7 +273,7 @@ union_repr(PyObject *self) } static PyMemberDef union_members[] = { - {"__args__", T_OBJECT, offsetof(unionobject, args), READONLY}, + {"__args__", _Py_T_OBJECT, offsetof(unionobject, args), Py_READONLY}, {0} }; diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index e9563729bf82ba..1814c6eb69c29b 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -2,7 +2,7 @@ #include "pycore_modsupport.h" // _PyArg_NoKwnames() #include "pycore_object.h" // _PyObject_GET_WEAKREFS_LISTPTR() #include "pycore_weakref.h" // _PyWeakref_GET_REF() -#include "structmember.h" // PyMemberDef + #define GET_WEAKREFS_LISTPTR(o) \ @@ -351,7 +351,7 @@ weakref___init__(PyObject *self, PyObject *args, PyObject *kwargs) static PyMemberDef weakref_members[] = { - {"__callback__", T_OBJECT, offsetof(PyWeakReference, wr_callback), READONLY}, + {"__callback__", _Py_T_OBJECT, offsetof(PyWeakReference, wr_callback), Py_READONLY}, {NULL} /* Sentinel */ }; diff --git a/PC/python3dll.c b/PC/python3dll.c index 0b54c5a707231c..64dfbba3e424a1 100755 --- a/PC/python3dll.c +++ b/PC/python3dll.c @@ -172,7 +172,9 @@ EXPORT_FUNC(PyDict_Copy) EXPORT_FUNC(PyDict_DelItem) EXPORT_FUNC(PyDict_DelItemString) EXPORT_FUNC(PyDict_GetItem) +EXPORT_FUNC(PyDict_GetItemRef) EXPORT_FUNC(PyDict_GetItemString) +EXPORT_FUNC(PyDict_GetItemStringRef) EXPORT_FUNC(PyDict_GetItemWithError) EXPORT_FUNC(PyDict_Items) EXPORT_FUNC(PyDict_Keys) diff --git a/PC/winreg.c b/PC/winreg.c index 5252f78a9bdf72..ed6258d2b33e56 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -15,7 +15,7 @@ #include "Python.h" #include "pycore_object.h" // _PyObject_Init() #include "pycore_moduleobject.h" -#include "structmember.h" // PyMemberDef + #include #if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) || defined(MS_WINDOWS_GAMES) @@ -352,7 +352,7 @@ static struct PyMethodDef PyHKEY_methods[] = { #define OFF(e) offsetof(PyHKEYObject, e) static PyMemberDef PyHKEY_memberlist[] = { - {"handle", T_INT, OFF(hkey), READONLY}, + {"handle", Py_T_INT, OFF(hkey), Py_READONLY}, {NULL} /* Sentinel */ }; diff --git a/PCbuild/_freeze_module.vcxproj b/PCbuild/_freeze_module.vcxproj index 87f08e857d0e29..e247637a0dfe5c 100644 --- a/PCbuild/_freeze_module.vcxproj +++ b/PCbuild/_freeze_module.vcxproj @@ -192,6 +192,7 @@ + diff --git a/PCbuild/_freeze_module.vcxproj.filters b/PCbuild/_freeze_module.vcxproj.filters index 3dae8cca7a2d56..2a0e009308022b 100644 --- a/PCbuild/_freeze_module.vcxproj.filters +++ b/PCbuild/_freeze_module.vcxproj.filters @@ -124,6 +124,9 @@ Source Files + + Source Files + Source Files diff --git a/PCbuild/_testcapi.vcxproj b/PCbuild/_testcapi.vcxproj index de17d74c52e56f..8c0fd0cf052b0e 100644 --- a/PCbuild/_testcapi.vcxproj +++ b/PCbuild/_testcapi.vcxproj @@ -99,7 +99,9 @@ + + diff --git a/PCbuild/_testcapi.vcxproj.filters b/PCbuild/_testcapi.vcxproj.filters index 637f7178d39d0e..87d33ebe28e475 100644 --- a/PCbuild/_testcapi.vcxproj.filters +++ b/PCbuild/_testcapi.vcxproj.filters @@ -27,9 +27,15 @@ Source Files + + Source Files + Source Files + + Source Files + Source Files diff --git a/PCbuild/_testclinic.vcxproj b/PCbuild/_testclinic.vcxproj new file mode 100644 index 00000000000000..e319b3c0f42e0f --- /dev/null +++ b/PCbuild/_testclinic.vcxproj @@ -0,0 +1,110 @@ + + + + + Debug + ARM + + + Debug + ARM64 + + + Debug + Win32 + + + Debug + x64 + + + PGInstrument + ARM + + + PGInstrument + ARM64 + + + PGInstrument + Win32 + + + PGInstrument + x64 + + + PGUpdate + ARM + + + PGUpdate + ARM64 + + + PGUpdate + Win32 + + + PGUpdate + x64 + + + Release + ARM + + + Release + ARM64 + + + Release + Win32 + + + Release + x64 + + + + {A840DDFB-ED50-484B-B527-B32E7CF90FD5} + _testclinic + Win32Proj + false + + + + + DynamicLibrary + NotSet + + + + .pyd + + + + + + + + + + <_ProjectFileVersion>10.0.30319.1 + + + + + + + + + + {cf7ac3d1-e2df-41d2-bea6-1e2556cdea26} + false + + + + + + \ No newline at end of file diff --git a/PCbuild/_testclinic.vcxproj.filters b/PCbuild/_testclinic.vcxproj.filters new file mode 100644 index 00000000000000..4a2987eb27b223 --- /dev/null +++ b/PCbuild/_testclinic.vcxproj.filters @@ -0,0 +1,21 @@ + + + + + {5b0a9282-a01c-4b83-9fd4-6deb6c558f9c} + + + {6a89c8a9-5b51-4525-ac5c-7d0a22f9657e} + + + + + Source Files + + + + + Resource Files + + + \ No newline at end of file diff --git a/PCbuild/find_python.bat b/PCbuild/find_python.bat index 7af5503d80a0fc..d3f62c93869003 100644 --- a/PCbuild/find_python.bat +++ b/PCbuild/find_python.bat @@ -52,7 +52,7 @@ @if "%_Py_NUGET%"=="" (set _Py_NUGET=%_Py_EXTERNALS_DIR%\nuget.exe) @if "%_Py_NUGET_URL%"=="" (set _Py_NUGET_URL=https://aka.ms/nugetclidl) @if NOT exist "%_Py_NUGET%" ( - @echo Downloading nuget... + @if not "%_Py_Quiet%"=="1" @echo Downloading nuget... @rem NB: Must use single quotes around NUGET here, NOT double! @rem Otherwise, a space in the path would break things @rem If it fails, retry with any available copy of Python @@ -63,7 +63,11 @@ ) @if not "%_Py_Quiet%"=="1" @echo Installing Python via nuget... -@"%_Py_NUGET%" install pythonx86 -ExcludeVersion -OutputDirectory "%_Py_EXTERNALS_DIR%" +@if not "%_Py_Quiet%"=="1" ( + @"%_Py_NUGET%" install pythonx86 -ExcludeVersion -OutputDirectory "%_Py_EXTERNALS_DIR%" +) else ( + @"%_Py_NUGET%" install pythonx86 -Verbosity quiet -ExcludeVersion -OutputDirectory "%_Py_EXTERNALS_DIR%" +) @rem Quote it here; it's not quoted later because "py -x.y" wouldn't work @if not errorlevel 1 (set PYTHON="%_Py_EXTERNALS_DIR%\pythonx86\tools\python.exe") & (set _Py_Python_Source=found on nuget.org) & goto :found diff --git a/PCbuild/pcbuild.proj b/PCbuild/pcbuild.proj index 46d6961eea0ac5..7735c22f8ae1b3 100644 --- a/PCbuild/pcbuild.proj +++ b/PCbuild/pcbuild.proj @@ -77,7 +77,7 @@ - + diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln index f8f1b83db97e8e..ca70213755310c 100644 --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -46,6 +46,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "python", "python.vcxproj", {EB6E69DD-04BF-4543-9B92-49FAABCEAC2E} = {EB6E69DD-04BF-4543-9B92-49FAABCEAC2E} {16BFE6F0-22EF-40B5-B831-7E937119EF10} = {16BFE6F0-22EF-40B5-B831-7E937119EF10} {FCBE1EF2-E0F0-40B1-88B5-00A35D378742} = {FCBE1EF2-E0F0-40B1-88B5-00A35D378742} + {A840DDFB-ED50-484B-B527-B32E7CF90FD5} = {A840DDFB-ED50-484B-B527-B32E7CF90FD5} EndProjectSection EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythoncore", "pythoncore.vcxproj", "{CF7AC3D1-E2DF-41D2-BEA6-1E2556CDEA26}" @@ -76,6 +77,8 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_ssl", "_ssl.vcxproj", "{C6 EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testcapi", "_testcapi.vcxproj", "{6901D91C-6E48-4BB7-9FEC-700C8131DF1D}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testclinic", "_testclinic.vcxproj", "{A840DDFB-ED50-484B-B527-B32E7CF90FD5}" +EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testinternalcapi", "_testinternalcapi.vcxproj", "{900342D7-516A-4469-B1AD-59A66E49A25F}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_testimportmultiple", "_testimportmultiple.vcxproj", "{36D0C52C-DF4E-45D0-8BC7-E294C3ABC781}" @@ -590,6 +593,38 @@ Global {6901D91C-6E48-4BB7-9FEC-700C8131DF1D}.Release|Win32.Build.0 = Release|Win32 {6901D91C-6E48-4BB7-9FEC-700C8131DF1D}.Release|x64.ActiveCfg = Release|x64 {6901D91C-6E48-4BB7-9FEC-700C8131DF1D}.Release|x64.Build.0 = Release|x64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Debug|ARM.ActiveCfg = Debug|ARM + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Debug|ARM.Build.0 = Debug|ARM + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Debug|ARM64.ActiveCfg = Debug|ARM64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Debug|ARM64.Build.0 = Debug|ARM64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Debug|Win32.ActiveCfg = Debug|Win32 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Debug|Win32.Build.0 = Debug|Win32 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Debug|x64.ActiveCfg = Debug|x64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Debug|x64.Build.0 = Debug|x64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGInstrument|ARM.ActiveCfg = PGInstrument|ARM + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGInstrument|ARM.Build.0 = PGInstrument|ARM + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGInstrument|ARM64.ActiveCfg = PGInstrument|ARM64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGInstrument|ARM64.Build.0 = PGInstrument|ARM64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGInstrument|Win32.ActiveCfg = PGInstrument|Win32 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGInstrument|Win32.Build.0 = PGInstrument|Win32 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGInstrument|x64.ActiveCfg = PGInstrument|x64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGInstrument|x64.Build.0 = PGInstrument|x64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGUpdate|ARM.ActiveCfg = PGUpdate|ARM + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGUpdate|ARM.Build.0 = PGUpdate|ARM + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGUpdate|ARM64.ActiveCfg = PGUpdate|ARM64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGUpdate|ARM64.Build.0 = PGUpdate|ARM64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGUpdate|Win32.ActiveCfg = PGUpdate|Win32 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGUpdate|Win32.Build.0 = PGUpdate|Win32 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGUpdate|x64.ActiveCfg = PGUpdate|x64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.PGUpdate|x64.Build.0 = PGUpdate|x64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Release|ARM.ActiveCfg = Release|ARM + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Release|ARM.Build.0 = Release|ARM + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Release|ARM64.ActiveCfg = Release|ARM64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Release|ARM64.Build.0 = Release|ARM64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Release|Win32.ActiveCfg = Release|Win32 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Release|Win32.Build.0 = Release|Win32 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Release|x64.ActiveCfg = Release|x64 + {A840DDFB-ED50-484B-B527-B32E7CF90FD5}.Release|x64.Build.0 = Release|x64 {900342D7-516A-4469-B1AD-59A66E49A25F}.Debug|ARM.ActiveCfg = Debug|ARM {900342D7-516A-4469-B1AD-59A66E49A25F}.Debug|ARM.Build.0 = Debug|ARM {900342D7-516A-4469-B1AD-59A66E49A25F}.Debug|ARM64.ActiveCfg = Debug|ARM64 diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 760962e4c4b6a9..bfe59acf12a69d 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -261,6 +261,7 @@ + @@ -520,6 +521,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index aaebe1908e30da..0a8b0c3faf51e1 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -687,6 +687,9 @@ Include\internal + + Include\internal + Include\internal @@ -1145,6 +1148,9 @@ Python + + Python + Python diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt index f0de142f0573b9..1d3b45b7912a34 100644 --- a/PCbuild/readme.txt +++ b/PCbuild/readme.txt @@ -143,6 +143,7 @@ _overlapped _socket _testbuffer _testcapi +_testclinic _testconsole _testimportmultiple _testmultiphase @@ -250,9 +251,11 @@ against a profiling library and contain extra debug information. The PGUpdate configuration takes the profiling data and generates optimized binaries. -The build_pgo.bat script automates the creation of optimized binaries. -It creates the PGI files, runs the unit test suite or PyBench with the -PGI python, and finally creates the optimized files. +The build.bat script has an argument `--pgo` that automate the creation +of optimized binaries. +It creates the PGI files, runs the unit test suite with the PGI python, +and finally creates the optimized files. +You can customize the job for profiling with `--pgo-job ` option. See https://docs.microsoft.com/en-us/cpp/build/profile-guided-optimizations diff --git a/PCbuild/regen.targets b/PCbuild/regen.targets index 2dd786e5e82e36..c1189c883b667c 100644 --- a/PCbuild/regen.targets +++ b/PCbuild/regen.targets @@ -59,9 +59,7 @@ Inputs="@(_OpcodeSources)" Outputs="@(_OpcodeOutputs)" DependsOnTargets="FindPythonForBuild"> - - diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index e9665dd808af39..2a36610527f898 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -897,7 +897,7 @@ def visitModule(self, mod): } static PyMemberDef ast_type_members[] = { - {"__dictoffset__", T_PYSSIZET, offsetof(AST_object, dict), READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(AST_object, dict), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -1393,7 +1393,7 @@ class PartingShots(StaticVisitor): int starting_recursion_depth; /* Be careful here to prevent overflow. */ - int COMPILER_STACK_FRAME_SCALE = 3; + int COMPILER_STACK_FRAME_SCALE = 2; PyThreadState *tstate = _PyThreadState_GET(); if (!tstate) { return 0; @@ -1542,7 +1542,6 @@ def generate_module_def(mod, metadata, f, internal_h): #include "pycore_ceval.h" // _Py_EnterRecursiveCall #include "pycore_interp.h" // _PyInterpreterState.ast #include "pycore_pystate.h" // _PyInterpreterState_GET() - #include "structmember.h" #include // Forward declaration diff --git a/Parser/myreadline.c b/Parser/myreadline.c index 7074aba74b728c..815387388218c6 100644 --- a/Parser/myreadline.c +++ b/Parser/myreadline.c @@ -20,7 +20,9 @@ #endif /* MS_WINDOWS */ -PyThreadState* _PyOS_ReadlineTState = NULL; +// Export the symbol since it's used by the readline shared extension +PyAPI_DATA(PyThreadState*) _PyOS_ReadlineTState; +PyThreadState *_PyOS_ReadlineTState = NULL; static PyThread_type_lock _PyOS_ReadlineLock = NULL; diff --git a/Parser/parser.c b/Parser/parser.c index f2ea8f59b00567..44312cff125ae7 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -17,57 +17,59 @@ static KeywordToken *reserved_keywords[] = { (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) { - {"if", 642}, - {"as", 640}, - {"in", 651}, - {"or", 574}, - {"is", 582}, + {"if", 656}, + {"as", 654}, + {"in", 667}, + {"or", 581}, + {"is", 589}, {NULL, -1}, }, (KeywordToken[]) { - {"del", 604}, - {"def", 652}, - {"for", 650}, - {"try", 624}, - {"and", 575}, - {"not", 581}, + {"del", 613}, + {"def", 669}, + {"for", 666}, + {"try", 638}, + {"and", 582}, + {"not", 588}, {NULL, -1}, }, (KeywordToken[]) { - {"from", 608}, + {"from", 618}, {"pass", 504}, - {"with", 615}, - {"elif", 644}, - {"else", 645}, - {"None", 602}, - {"True", 601}, + {"with", 629}, + {"elif", 658}, + {"else", 659}, + {"None", 611}, + {"True", 610}, {NULL, -1}, }, (KeywordToken[]) { - {"raise", 522}, - {"yield", 573}, + {"raise", 525}, + {"yield", 580}, {"break", 508}, - {"class", 654}, - {"while", 647}, - {"False", 603}, + {"async", 668}, + {"class", 671}, + {"while", 661}, + {"False", 612}, + {"await", 590}, {NULL, -1}, }, (KeywordToken[]) { - {"return", 519}, - {"import", 607}, - {"assert", 526}, - {"global", 523}, - {"except", 637}, - {"lambda", 600}, + {"return", 522}, + {"import", 617}, + {"assert", 529}, + {"global", 526}, + {"except", 651}, + {"lambda", 609}, {NULL, -1}, }, (KeywordToken[]) { - {"finally", 633}, + {"finally", 647}, {NULL, -1}, }, (KeywordToken[]) { {"continue", 509}, - {"nonlocal", 524}, + {"nonlocal", 527}, {NULL, -1}, }, }; @@ -1820,7 +1822,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'return' return_stmt")); stmt_ty return_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 519) // token='return' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 522) // token='return' && (return_stmt_var = return_stmt_rule(p)) // return_stmt ) @@ -1862,7 +1864,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt")); stmt_ty raise_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 522) // token='raise' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 525) // token='raise' && (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt ) @@ -1916,7 +1918,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt")); stmt_ty del_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 604) // token='del' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 613) // token='del' && (del_stmt_var = del_stmt_rule(p)) // del_stmt ) @@ -1937,7 +1939,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt")); stmt_ty yield_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 573) // token='yield' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 580) // token='yield' && (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt ) @@ -1958,7 +1960,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'assert' assert_stmt")); stmt_ty assert_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 526) // token='assert' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 529) // token='assert' && (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt ) @@ -2045,7 +2047,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'global' global_stmt")); stmt_ty global_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 523) // token='global' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 526) // token='global' && (global_stmt_var = global_stmt_rule(p)) // global_stmt ) @@ -2066,7 +2068,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'nonlocal' nonlocal_stmt")); stmt_ty nonlocal_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 524) // token='nonlocal' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 527) // token='nonlocal' && (nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt ) @@ -2087,11 +2089,11 @@ simple_stmt_rule(Parser *p) } // compound_stmt: -// | &('def' | '@' | ASYNC) function_def +// | &('def' | '@' | 'async') function_def // | &'if' if_stmt // | &('class' | '@') class_def -// | &('with' | ASYNC) with_stmt -// | &('for' | ASYNC) for_stmt +// | &('with' | 'async') with_stmt +// | &('for' | 'async') for_stmt // | &'try' try_stmt // | &'while' while_stmt // | match_stmt @@ -2108,12 +2110,12 @@ compound_stmt_rule(Parser *p) } stmt_ty _res = NULL; int _mark = p->mark; - { // &('def' | '@' | ASYNC) function_def + { // &('def' | '@' | 'async') function_def if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | 'async') function_def")); stmt_ty function_def_var; if ( _PyPegen_lookahead(1, _tmp_8_rule, p) @@ -2121,13 +2123,13 @@ compound_stmt_rule(Parser *p) (function_def_var = function_def_rule(p)) // function_def ) { - D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | 'async') function_def")); _res = function_def_var; goto done; } p->mark = _mark; D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('def' | '@' | 'async') function_def")); } { // &'if' if_stmt if (p->error_indicator) { @@ -2137,7 +2139,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 642) // token='if' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 656) // token='if' && (if_stmt_var = if_stmt_rule(p)) // if_stmt ) @@ -2171,12 +2173,12 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('class' | '@') class_def")); } - { // &('with' | ASYNC) with_stmt + { // &('with' | 'async') with_stmt if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt")); + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('with' | 'async') with_stmt")); stmt_ty with_stmt_var; if ( _PyPegen_lookahead(1, _tmp_10_rule, p) @@ -2184,20 +2186,20 @@ compound_stmt_rule(Parser *p) (with_stmt_var = with_stmt_rule(p)) // with_stmt ) { - D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt")); + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('with' | 'async') with_stmt")); _res = with_stmt_var; goto done; } p->mark = _mark; D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('with' | ASYNC) with_stmt")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('with' | 'async') with_stmt")); } - { // &('for' | ASYNC) for_stmt + { // &('for' | 'async') for_stmt if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt")); + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('for' | 'async') for_stmt")); stmt_ty for_stmt_var; if ( _PyPegen_lookahead(1, _tmp_11_rule, p) @@ -2205,13 +2207,13 @@ compound_stmt_rule(Parser *p) (for_stmt_var = for_stmt_rule(p)) // for_stmt ) { - D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt")); + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('for' | 'async') for_stmt")); _res = for_stmt_var; goto done; } p->mark = _mark; D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('for' | ASYNC) for_stmt")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('for' | 'async') for_stmt")); } { // &'try' try_stmt if (p->error_indicator) { @@ -2221,7 +2223,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 624) // token='try' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 638) // token='try' && (try_stmt_var = try_stmt_rule(p)) // try_stmt ) @@ -2242,7 +2244,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 647) // token='while' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 661) // token='while' && (while_stmt_var = while_stmt_rule(p)) // while_stmt ) @@ -2939,7 +2941,7 @@ return_stmt_rule(Parser *p) Token * _keyword; void *a; if ( - (_keyword = _PyPegen_expect_token(p, 519)) // token='return' + (_keyword = _PyPegen_expect_token(p, 522)) // token='return' && (a = star_expressions_rule(p), !p->error_indicator) // star_expressions? ) @@ -3005,7 +3007,7 @@ raise_stmt_rule(Parser *p) expr_ty a; void *b; if ( - (_keyword = _PyPegen_expect_token(p, 522)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 525)) // token='raise' && (a = expression_rule(p)) // expression && @@ -3042,7 +3044,7 @@ raise_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 522)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 525)) // token='raise' ) { D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise'")); @@ -3105,7 +3107,7 @@ global_stmt_rule(Parser *p) Token * _keyword; asdl_expr_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 523)) // token='global' + (_keyword = _PyPegen_expect_token(p, 526)) // token='global' && (a = (asdl_expr_seq*)_gather_19_rule(p)) // ','.NAME+ ) @@ -3170,7 +3172,7 @@ nonlocal_stmt_rule(Parser *p) Token * _keyword; asdl_expr_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 524)) // token='nonlocal' + (_keyword = _PyPegen_expect_token(p, 527)) // token='nonlocal' && (a = (asdl_expr_seq*)_gather_21_rule(p)) // ','.NAME+ ) @@ -3235,7 +3237,7 @@ del_stmt_rule(Parser *p) Token * _keyword; asdl_expr_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 604)) // token='del' + (_keyword = _PyPegen_expect_token(p, 613)) // token='del' && (a = del_targets_rule(p)) // del_targets && @@ -3384,7 +3386,7 @@ assert_stmt_rule(Parser *p) expr_ty a; void *b; if ( - (_keyword = _PyPegen_expect_token(p, 526)) // token='assert' + (_keyword = _PyPegen_expect_token(p, 529)) // token='assert' && (a = expression_rule(p)) // expression && @@ -3528,7 +3530,7 @@ import_name_rule(Parser *p) Token * _keyword; asdl_alias_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 607)) // token='import' + (_keyword = _PyPegen_expect_token(p, 617)) // token='import' && (a = dotted_as_names_rule(p)) // dotted_as_names ) @@ -3598,13 +3600,13 @@ import_from_rule(Parser *p) expr_ty b; asdl_alias_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 608)) // token='from' + (_keyword = _PyPegen_expect_token(p, 618)) // token='from' && (a = _loop0_25_rule(p)) // (('.' | '...'))* && (b = dotted_name_rule(p)) // dotted_name && - (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 617)) // token='import' && (c = import_from_targets_rule(p)) // import_from_targets ) @@ -3642,11 +3644,11 @@ import_from_rule(Parser *p) asdl_seq * a; asdl_alias_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 608)) // token='from' + (_keyword = _PyPegen_expect_token(p, 618)) // token='from' && (a = _loop1_26_rule(p)) // (('.' | '...'))+ && - (_keyword_1 = _PyPegen_expect_token(p, 607)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 617)) // token='import' && (b = import_from_targets_rule(p)) // import_from_targets ) @@ -4406,7 +4408,7 @@ class_def_raw_rule(Parser *p) asdl_stmt_seq* c; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 654)) // token='class' + (_keyword = _PyPegen_expect_token(p, 671)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && @@ -4516,7 +4518,7 @@ function_def_rule(Parser *p) // function_def_raw: // | invalid_def_raw // | 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block -// | ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block +// | 'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block static stmt_ty function_def_raw_rule(Parser *p) { @@ -4575,7 +4577,7 @@ function_def_raw_rule(Parser *p) void *t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 652)) // token='def' + (_keyword = _PyPegen_expect_token(p, 669)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4618,27 +4620,27 @@ function_def_raw_rule(Parser *p) D(fprintf(stderr, "%*c%s function_def_raw[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block")); } - { // ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block + { // 'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> function_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block")); + D(fprintf(stderr, "%*c> function_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block")); Token * _keyword; + Token * _keyword_1; Token * _literal; Token * _literal_1; Token * _literal_2; void *a; - Token * async_var; asdl_stmt_seq* b; expr_ty n; void *params; void *t; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + (_keyword = _PyPegen_expect_token(p, 668)) // token='async' && - (_keyword = _PyPegen_expect_token(p, 652)) // token='def' + (_keyword_1 = _PyPegen_expect_token(p, 669)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4659,7 +4661,7 @@ function_def_raw_rule(Parser *p) (b = block_rule(p)) // block ) { - D(fprintf(stderr, "%*c+ function_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block")); + D(fprintf(stderr, "%*c+ function_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { p->level--; @@ -4679,7 +4681,7 @@ function_def_raw_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s function_def_raw[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block")); } _res = NULL; done: @@ -5992,7 +5994,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='if' + (_keyword = _PyPegen_expect_token(p, 656)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -6037,7 +6039,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='if' + (_keyword = _PyPegen_expect_token(p, 656)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -6133,7 +6135,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 658)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6178,7 +6180,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 658)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6260,7 +6262,7 @@ else_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 645)) // token='else' + (_keyword = _PyPegen_expect_token(p, 659)) // token='else' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6340,7 +6342,7 @@ while_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 647)) // token='while' + (_keyword = _PyPegen_expect_token(p, 661)) // token='while' && (a = named_expression_rule(p)) // named_expression && @@ -6382,7 +6384,7 @@ while_stmt_rule(Parser *p) // for_stmt: // | invalid_for_stmt // | 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? -// | ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? +// | 'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? // | invalid_for_target static stmt_ty for_stmt_rule(Parser *p) @@ -6441,11 +6443,11 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 650)) // token='for' + (_keyword = _PyPegen_expect_token(p, 666)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 667)) // token='in' && (_cut_var = 1) && @@ -6486,30 +6488,30 @@ for_stmt_rule(Parser *p) return NULL; } } - { // ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? + { // 'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); + D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); int _cut_var = 0; Token * _keyword; Token * _keyword_1; + Token * _keyword_2; Token * _literal; - Token * async_var; asdl_stmt_seq* b; void *el; expr_ty ex; expr_ty t; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + (_keyword = _PyPegen_expect_token(p, 668)) // token='async' && - (_keyword = _PyPegen_expect_token(p, 650)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 666)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 667)) // token='in' && (_cut_var = 1) && @@ -6524,7 +6526,7 @@ for_stmt_rule(Parser *p) (el = else_block_rule(p), !p->error_indicator) // else_block? ) { - D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); + D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { p->level--; @@ -6544,7 +6546,7 @@ for_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s for_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); if (_cut_var) { p->level--; return NULL; @@ -6579,8 +6581,8 @@ for_stmt_rule(Parser *p) // | invalid_with_stmt_indent // | 'with' '(' ','.with_item+ ','? ')' ':' block // | 'with' ','.with_item+ ':' TYPE_COMMENT? block -// | ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block -// | ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block +// | 'async' 'with' '(' ','.with_item+ ','? ')' ':' block +// | 'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block // | invalid_with_stmt static stmt_ty with_stmt_rule(Parser *p) @@ -6638,7 +6640,7 @@ with_stmt_rule(Parser *p) asdl_withitem_seq* a; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 615)) // token='with' + (_keyword = _PyPegen_expect_token(p, 629)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6687,7 +6689,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 615)) // token='with' + (_keyword = _PyPegen_expect_token(p, 629)) // token='with' && (a = (asdl_withitem_seq*)_gather_54_rule(p)) // ','.with_item+ && @@ -6720,25 +6722,25 @@ with_stmt_rule(Parser *p) D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with' ','.with_item+ ':' TYPE_COMMENT? block")); } - { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block + { // 'async' 'with' '(' ','.with_item+ ','? ')' ':' block if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); + D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async' 'with' '(' ','.with_item+ ','? ')' ':' block")); Token * _keyword; + Token * _keyword_1; Token * _literal; Token * _literal_1; Token * _literal_2; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings asdl_withitem_seq* a; - Token * async_var; asdl_stmt_seq* b; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + (_keyword = _PyPegen_expect_token(p, 668)) // token='async' && - (_keyword = _PyPegen_expect_token(p, 615)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 629)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6753,7 +6755,7 @@ with_stmt_rule(Parser *p) (b = block_rule(p)) // block ) { - D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); + D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async' 'with' '(' ','.with_item+ ','? ')' ':' block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { p->level--; @@ -6773,24 +6775,24 @@ with_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async' 'with' '(' ','.with_item+ ','? ')' ':' block")); } - { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block + { // 'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); + D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block")); Token * _keyword; + Token * _keyword_1; Token * _literal; asdl_withitem_seq* a; - Token * async_var; asdl_stmt_seq* b; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + (_keyword = _PyPegen_expect_token(p, 668)) // token='async' && - (_keyword = _PyPegen_expect_token(p, 615)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 629)) // token='with' && (a = (asdl_withitem_seq*)_gather_58_rule(p)) // ','.with_item+ && @@ -6801,7 +6803,7 @@ with_stmt_rule(Parser *p) (b = block_rule(p)) // block ) { - D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); + D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { p->level--; @@ -6821,7 +6823,7 @@ with_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block")); } if (p->call_invalid_rules) { // invalid_with_stmt if (p->error_indicator) { @@ -6877,7 +6879,7 @@ with_item_rule(Parser *p) if ( (e = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (t = star_target_rule(p)) // star_target && @@ -7003,7 +7005,7 @@ try_stmt_rule(Parser *p) asdl_stmt_seq* b; asdl_stmt_seq* f; if ( - (_keyword = _PyPegen_expect_token(p, 624)) // token='try' + (_keyword = _PyPegen_expect_token(p, 638)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7047,7 +7049,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 624)) // token='try' + (_keyword = _PyPegen_expect_token(p, 638)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7095,7 +7097,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 624)) // token='try' + (_keyword = _PyPegen_expect_token(p, 638)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7194,7 +7196,7 @@ except_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 637)) // token='except' + (_keyword = _PyPegen_expect_token(p, 651)) // token='except' && (e = expression_rule(p)) // expression && @@ -7237,7 +7239,7 @@ except_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 637)) // token='except' + (_keyword = _PyPegen_expect_token(p, 651)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -7349,7 +7351,7 @@ except_star_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 637)) // token='except' + (_keyword = _PyPegen_expect_token(p, 651)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -7452,7 +7454,7 @@ finally_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 633)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 647)) // token='finally' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7764,7 +7766,7 @@ guard_rule(Parser *p) Token * _keyword; expr_ty guard; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='if' + (_keyword = _PyPegen_expect_token(p, 656)) // token='if' && (guard = named_expression_rule(p)) // named_expression ) @@ -7962,7 +7964,7 @@ as_pattern_rule(Parser *p) if ( (pattern = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (target = pattern_capture_target_rule(p)) // pattern_capture_target ) @@ -8399,7 +8401,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 602)) // token='None' + (_keyword = _PyPegen_expect_token(p, 611)) // token='None' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -8432,7 +8434,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 601)) // token='True' + (_keyword = _PyPegen_expect_token(p, 610)) // token='True' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -8465,7 +8467,7 @@ literal_pattern_rule(Parser *p) D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 603)) // token='False' + (_keyword = _PyPegen_expect_token(p, 612)) // token='False' ) { D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -8592,7 +8594,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 602)) // token='None' + (_keyword = _PyPegen_expect_token(p, 611)) // token='None' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -8625,7 +8627,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 601)) // token='True' + (_keyword = _PyPegen_expect_token(p, 610)) // token='True' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -8658,7 +8660,7 @@ literal_expr_rule(Parser *p) D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 603)) // token='False' + (_keyword = _PyPegen_expect_token(p, 612)) // token='False' ) { D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -11222,11 +11224,11 @@ expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 642)) // token='if' + (_keyword = _PyPegen_expect_token(p, 656)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 645)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 659)) // token='else' && (c = expression_rule(p)) // expression ) @@ -11331,9 +11333,9 @@ yield_expr_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 573)) // token='yield' + (_keyword = _PyPegen_expect_token(p, 580)) // token='yield' && - (_keyword_1 = _PyPegen_expect_token(p, 608)) // token='from' + (_keyword_1 = _PyPegen_expect_token(p, 618)) // token='from' && (a = expression_rule(p)) // expression ) @@ -11369,7 +11371,7 @@ yield_expr_rule(Parser *p) Token * _keyword; void *a; if ( - (_keyword = _PyPegen_expect_token(p, 573)) // token='yield' + (_keyword = _PyPegen_expect_token(p, 580)) // token='yield' && (a = star_expressions_rule(p), !p->error_indicator) // star_expressions? ) @@ -12118,7 +12120,7 @@ inversion_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 581)) // token='not' + (_keyword = _PyPegen_expect_token(p, 588)) // token='not' && (a = inversion_rule(p)) // inversion ) @@ -12781,9 +12783,9 @@ notin_bitwise_or_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 581)) // token='not' + (_keyword = _PyPegen_expect_token(p, 588)) // token='not' && - (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 667)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -12830,7 +12832,7 @@ in_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 651)) // token='in' + (_keyword = _PyPegen_expect_token(p, 667)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -12878,9 +12880,9 @@ isnot_bitwise_or_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 582)) // token='is' + (_keyword = _PyPegen_expect_token(p, 589)) // token='is' && - (_keyword_1 = _PyPegen_expect_token(p, 581)) // token='not' + (_keyword_1 = _PyPegen_expect_token(p, 588)) // token='not' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -12927,7 +12929,7 @@ is_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 582)) // token='is' + (_keyword = _PyPegen_expect_token(p, 589)) // token='is' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -14183,7 +14185,7 @@ power_rule(Parser *p) return _res; } -// await_primary: AWAIT primary | primary +// await_primary: 'await' primary | primary static expr_ty await_primary_rule(Parser *p) { @@ -14210,21 +14212,21 @@ await_primary_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // AWAIT primary + { // 'await' primary if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> await_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "AWAIT primary")); + D(fprintf(stderr, "%*c> await_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'await' primary")); + Token * _keyword; expr_ty a; - Token * await_var; if ( - (await_var = _PyPegen_expect_token(p, AWAIT)) // token='AWAIT' + (_keyword = _PyPegen_expect_token(p, 590)) // token='await' && (a = primary_rule(p)) // primary ) { - D(fprintf(stderr, "%*c+ await_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "AWAIT primary")); + D(fprintf(stderr, "%*c+ await_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'await' primary")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { p->level--; @@ -14244,7 +14246,7 @@ await_primary_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s await_primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "AWAIT primary")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'await' primary")); } { // primary if (p->error_indicator) { @@ -14768,7 +14770,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 601)) // token='True' + (_keyword = _PyPegen_expect_token(p, 610)) // token='True' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -14801,7 +14803,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 603)) // token='False' + (_keyword = _PyPegen_expect_token(p, 612)) // token='False' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -14834,7 +14836,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 602)) // token='None' + (_keyword = _PyPegen_expect_token(p, 611)) // token='None' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -15104,7 +15106,7 @@ lambdef_rule(Parser *p) void *a; expr_ty b; if ( - (_keyword = _PyPegen_expect_token(p, 600)) // token='lambda' + (_keyword = _PyPegen_expect_token(p, 609)) // token='lambda' && (a = lambda_params_rule(p), !p->error_indicator) // lambda_params? && @@ -16981,7 +16983,7 @@ for_if_clauses_rule(Parser *p) } // for_if_clause: -// | ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))* +// | 'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))* // | 'for' star_targets 'in' ~ disjunction (('if' disjunction))* // | invalid_for_target static comprehension_ty @@ -16997,27 +16999,27 @@ for_if_clause_rule(Parser *p) } comprehension_ty _res = NULL; int _mark = p->mark; - { // ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))* + { // 'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))* if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); + D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); int _cut_var = 0; Token * _keyword; Token * _keyword_1; + Token * _keyword_2; expr_ty a; - Token * async_var; expr_ty b; asdl_expr_seq* c; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + (_keyword = _PyPegen_expect_token(p, 668)) // token='async' && - (_keyword = _PyPegen_expect_token(p, 650)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 666)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 667)) // token='in' && (_cut_var = 1) && @@ -17026,7 +17028,7 @@ for_if_clause_rule(Parser *p) (c = (asdl_expr_seq*)_loop0_120_rule(p)) // (('if' disjunction))* ) { - D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); + D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); _res = CHECK_VERSION ( comprehension_ty , 6 , "Async comprehensions are" , _PyAST_comprehension ( a , b , c , 1 , p -> arena ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -17037,7 +17039,7 @@ for_if_clause_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); if (_cut_var) { p->level--; return NULL; @@ -17056,11 +17058,11 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 650)) // token='for' + (_keyword = _PyPegen_expect_token(p, 666)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 667)) // token='in' && (_cut_var = 1) && @@ -20349,11 +20351,11 @@ expression_without_invalid_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 642)) // token='if' + (_keyword = _PyPegen_expect_token(p, 656)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 645)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 659)) // token='else' && (c = expression_rule(p)) // expression ) @@ -20536,7 +20538,7 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 642)) // token='if' + (_keyword = _PyPegen_expect_token(p, 656)) // token='if' && (b = disjunction_rule(p)) // disjunction && @@ -20567,7 +20569,7 @@ invalid_expression_rule(Parser *p) Token * a; Token * b; if ( - (a = _PyPegen_expect_token(p, 600)) // token='lambda' + (a = _PyPegen_expect_token(p, 609)) // token='lambda' && (_opt_var = lambda_params_rule(p), !p->error_indicator) // lambda_params? && @@ -21042,7 +21044,7 @@ invalid_del_stmt_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 604)) // token='del' + (_keyword = _PyPegen_expect_token(p, 613)) // token='del' && (a = star_expressions_rule(p)) // star_expressions ) @@ -22492,7 +22494,7 @@ invalid_with_item_rule(Parser *p) if ( (expression_var = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (a = expression_rule(p)) // expression && @@ -22518,7 +22520,7 @@ invalid_with_item_rule(Parser *p) return _res; } -// invalid_for_target: ASYNC? 'for' star_expressions +// invalid_for_target: 'async'? 'for' star_expressions static void * invalid_for_target_rule(Parser *p) { @@ -22532,25 +22534,25 @@ invalid_for_target_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // ASYNC? 'for' star_expressions + { // 'async'? 'for' star_expressions if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_for_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_expressions")); + D(fprintf(stderr, "%*c> invalid_for_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_expressions")); Token * _keyword; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings expr_ty a; if ( - (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? + (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 650)) // token='for' + (_keyword = _PyPegen_expect_token(p, 666)) // token='for' && (a = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ invalid_for_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_expressions")); + D(fprintf(stderr, "%*c+ invalid_for_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_expressions")); _res = RAISE_SYNTAX_ERROR_INVALID_TARGET ( FOR_TARGETS , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -22561,7 +22563,7 @@ invalid_for_target_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_for_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'for' star_expressions")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'for' star_expressions")); } _res = NULL; done: @@ -22677,11 +22679,11 @@ invalid_import_rule(Parser *p) Token * a; expr_ty dotted_name_var; if ( - (a = _PyPegen_expect_token(p, 607)) // token='import' + (a = _PyPegen_expect_token(p, 617)) // token='import' && (_gather_203_var = _gather_203_rule(p)) // ','.dotted_name+ && - (_keyword = _PyPegen_expect_token(p, 608)) // token='from' + (_keyword = _PyPegen_expect_token(p, 618)) // token='from' && (dotted_name_var = dotted_name_rule(p)) // dotted_name ) @@ -22756,8 +22758,8 @@ invalid_import_from_targets_rule(Parser *p) } // invalid_with_stmt: -// | ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE -// | ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE +// | 'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE +// | 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE static void * invalid_with_stmt_rule(Parser *p) { @@ -22771,28 +22773,28 @@ invalid_with_stmt_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE + { // 'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE")); + D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE")); asdl_seq * _gather_205_var; Token * _keyword; void *_opt_var; UNUSED(_opt_var); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? + (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 615)) // token='with' + (_keyword = _PyPegen_expect_token(p, 629)) // token='with' && (_gather_205_var = _gather_205_rule(p)) // ','.(expression ['as' star_target])+ && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ invalid_with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE")); + D(fprintf(stderr, "%*c+ invalid_with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE")); _res = RAISE_SYNTAX_ERROR ( "expected ':'" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -22803,14 +22805,14 @@ invalid_with_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_with_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ NEWLINE")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE")); } - { // ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE + { // 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE")); + D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE")); asdl_seq * _gather_207_var; Token * _keyword; Token * _literal; @@ -22821,9 +22823,9 @@ invalid_with_stmt_rule(Parser *p) UNUSED(_opt_var_1); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? + (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 615)) // token='with' + (_keyword = _PyPegen_expect_token(p, 629)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -22836,7 +22838,7 @@ invalid_with_stmt_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ invalid_with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE")); + D(fprintf(stderr, "%*c+ invalid_with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE")); _res = RAISE_SYNTAX_ERROR ( "expected ':'" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -22847,7 +22849,7 @@ invalid_with_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_with_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE")); } _res = NULL; done: @@ -22856,8 +22858,8 @@ invalid_with_stmt_rule(Parser *p) } // invalid_with_stmt_indent: -// | ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT -// | ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT +// | 'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT +// | 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT static void * invalid_with_stmt_indent_rule(Parser *p) { @@ -22871,12 +22873,12 @@ invalid_with_stmt_indent_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT + { // 'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT")); + D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT")); asdl_seq * _gather_209_var; Token * _literal; void *_opt_var; @@ -22884,9 +22886,9 @@ invalid_with_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? + (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 615)) // token='with' + (a = _PyPegen_expect_token(p, 629)) // token='with' && (_gather_209_var = _gather_209_rule(p)) // ','.(expression ['as' star_target])+ && @@ -22897,7 +22899,7 @@ invalid_with_stmt_indent_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT ) { - D(fprintf(stderr, "%*c+ invalid_with_stmt_indent[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT")); + D(fprintf(stderr, "%*c+ invalid_with_stmt_indent[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT")); _res = RAISE_INDENTATION_ERROR ( "expected an indented block after 'with' statement on line %d" , a -> lineno ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -22908,14 +22910,14 @@ invalid_with_stmt_indent_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_with_stmt_indent[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT")); } - { // ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT + { // 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT")); + D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT")); asdl_seq * _gather_211_var; Token * _literal; Token * _literal_1; @@ -22927,9 +22929,9 @@ invalid_with_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? + (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 615)) // token='with' + (a = _PyPegen_expect_token(p, 629)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -22946,7 +22948,7 @@ invalid_with_stmt_indent_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT ) { - D(fprintf(stderr, "%*c+ invalid_with_stmt_indent[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT")); + D(fprintf(stderr, "%*c+ invalid_with_stmt_indent[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT")); _res = RAISE_INDENTATION_ERROR ( "expected an indented block after 'with' statement on line %d" , a -> lineno ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -22957,7 +22959,7 @@ invalid_with_stmt_indent_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_with_stmt_indent[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT")); } _res = NULL; done: @@ -22993,7 +22995,7 @@ invalid_try_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 624)) // token='try' + (a = _PyPegen_expect_token(p, 638)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23025,7 +23027,7 @@ invalid_try_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 624)) // token='try' + (_keyword = _PyPegen_expect_token(p, 638)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23064,7 +23066,7 @@ invalid_try_stmt_rule(Parser *p) Token * b; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 624)) // token='try' + (_keyword = _PyPegen_expect_token(p, 638)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23072,7 +23074,7 @@ invalid_try_stmt_rule(Parser *p) && (_loop1_215_var = _loop1_215_rule(p)) // except_block+ && - (a = _PyPegen_expect_token(p, 637)) // token='except' + (a = _PyPegen_expect_token(p, 651)) // token='except' && (b = _PyPegen_expect_token(p, 16)) // token='*' && @@ -23111,7 +23113,7 @@ invalid_try_stmt_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings Token * a; if ( - (_keyword = _PyPegen_expect_token(p, 624)) // token='try' + (_keyword = _PyPegen_expect_token(p, 638)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23119,7 +23121,7 @@ invalid_try_stmt_rule(Parser *p) && (_loop1_218_var = _loop1_218_rule(p)) // except_star_block+ && - (a = _PyPegen_expect_token(p, 637)) // token='except' + (a = _PyPegen_expect_token(p, 651)) // token='except' && (_opt_var = _tmp_219_rule(p), !p->error_indicator) // [expression ['as' NAME]] && @@ -23179,7 +23181,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty a; expr_ty expressions_var; if ( - (_keyword = _PyPegen_expect_token(p, 637)) // token='except' + (_keyword = _PyPegen_expect_token(p, 651)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && @@ -23221,7 +23223,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 637)) // token='except' + (a = _PyPegen_expect_token(p, 651)) // token='except' && (_opt_var = _PyPegen_expect_token(p, 16), !p->error_indicator) // '*'? && @@ -23254,7 +23256,7 @@ invalid_except_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 637)) // token='except' + (a = _PyPegen_expect_token(p, 651)) // token='except' && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -23282,7 +23284,7 @@ invalid_except_stmt_rule(Parser *p) void *_tmp_222_var; Token * a; if ( - (a = _PyPegen_expect_token(p, 637)) // token='except' + (a = _PyPegen_expect_token(p, 651)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -23332,7 +23334,7 @@ invalid_finally_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 633)) // token='finally' + (a = _PyPegen_expect_token(p, 647)) // token='finally' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23389,7 +23391,7 @@ invalid_except_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 637)) // token='except' + (a = _PyPegen_expect_token(p, 651)) // token='except' && (expression_var = expression_rule(p)) // expression && @@ -23425,7 +23427,7 @@ invalid_except_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 637)) // token='except' + (a = _PyPegen_expect_token(p, 651)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -23482,7 +23484,7 @@ invalid_except_star_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 637)) // token='except' + (a = _PyPegen_expect_token(p, 651)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -23724,7 +23726,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"' ) @@ -23754,7 +23756,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && _PyPegen_lookahead_with_name(0, _PyPegen_name_token, p) && @@ -23911,7 +23913,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='if' + (_keyword = _PyPegen_expect_token(p, 656)) // token='if' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -23942,7 +23944,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty a_1; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 642)) // token='if' + (a = _PyPegen_expect_token(p, 656)) // token='if' && (a_1 = named_expression_rule(p)) // named_expression && @@ -23998,7 +24000,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 644)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 658)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -24029,7 +24031,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 644)) // token='elif' + (a = _PyPegen_expect_token(p, 658)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -24083,7 +24085,7 @@ invalid_else_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 645)) // token='else' + (a = _PyPegen_expect_token(p, 659)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24137,7 +24139,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 647)) // token='while' + (_keyword = _PyPegen_expect_token(p, 661)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -24168,7 +24170,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 647)) // token='while' + (a = _PyPegen_expect_token(p, 661)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -24199,8 +24201,8 @@ invalid_while_stmt_rule(Parser *p) } // invalid_for_stmt: -// | ASYNC? 'for' star_targets 'in' star_expressions NEWLINE -// | ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT +// | 'async'? 'for' star_targets 'in' star_expressions NEWLINE +// | 'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT static void * invalid_for_stmt_rule(Parser *p) { @@ -24214,12 +24216,12 @@ invalid_for_stmt_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // ASYNC? 'for' star_targets 'in' star_expressions NEWLINE + { // 'async'? 'for' star_targets 'in' star_expressions NEWLINE if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions NEWLINE")); + D(fprintf(stderr, "%*c> invalid_for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions NEWLINE")); Token * _keyword; Token * _keyword_1; void *_opt_var; @@ -24228,20 +24230,20 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? + (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 650)) // token='for' + (_keyword = _PyPegen_expect_token(p, 666)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 667)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ invalid_for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions NEWLINE")); + D(fprintf(stderr, "%*c+ invalid_for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions NEWLINE")); _res = RAISE_SYNTAX_ERROR ( "expected ':'" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24252,14 +24254,14 @@ invalid_for_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_for_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions NEWLINE")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions NEWLINE")); } - { // ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT + { // 'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT")); + D(fprintf(stderr, "%*c> invalid_for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT")); Token * _keyword; Token * _literal; void *_opt_var; @@ -24269,13 +24271,13 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? + (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 650)) // token='for' + (a = _PyPegen_expect_token(p, 666)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword = _PyPegen_expect_token(p, 651)) // token='in' + (_keyword = _PyPegen_expect_token(p, 667)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -24286,7 +24288,7 @@ invalid_for_stmt_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT ) { - D(fprintf(stderr, "%*c+ invalid_for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT")); + D(fprintf(stderr, "%*c+ invalid_for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT")); _res = RAISE_INDENTATION_ERROR ( "expected an indented block after 'for' statement on line %d" , a -> lineno ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24297,7 +24299,7 @@ invalid_for_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_for_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT")); } _res = NULL; done: @@ -24306,7 +24308,7 @@ invalid_for_stmt_rule(Parser *p) } // invalid_def_raw: -// | ASYNC? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT +// | 'async'? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT static void * invalid_def_raw_rule(Parser *p) { @@ -24320,12 +24322,12 @@ invalid_def_raw_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // ASYNC? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT + { // 'async'? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT")); + D(fprintf(stderr, "%*c> invalid_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT")); Token * _literal; Token * _literal_1; Token * _literal_2; @@ -24339,9 +24341,9 @@ invalid_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, ASYNC), !p->error_indicator) // ASYNC? + (_opt_var = _PyPegen_expect_token(p, 668), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 652)) // token='def' + (a = _PyPegen_expect_token(p, 669)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -24360,7 +24362,7 @@ invalid_def_raw_rule(Parser *p) _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT ) { - D(fprintf(stderr, "%*c+ invalid_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT")); + D(fprintf(stderr, "%*c+ invalid_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT")); _res = RAISE_INDENTATION_ERROR ( "expected an indented block after function definition on line %d" , a -> lineno ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24371,7 +24373,7 @@ invalid_def_raw_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_def_raw[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'? 'def' NAME '(' params? ')' ['->' expression] ':' NEWLINE !INDENT")); } _res = NULL; done: @@ -24407,7 +24409,7 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 654)) // token='class' + (_keyword = _PyPegen_expect_token(p, 671)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -24442,7 +24444,7 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 654)) // token='class' + (a = _PyPegen_expect_token(p, 671)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -25610,7 +25612,7 @@ _tmp_7_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 607)) // token='import' + (_keyword = _PyPegen_expect_token(p, 617)) // token='import' ) { D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'")); @@ -25629,7 +25631,7 @@ _tmp_7_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 608)) // token='from' + (_keyword = _PyPegen_expect_token(p, 618)) // token='from' ) { D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'")); @@ -25646,7 +25648,7 @@ _tmp_7_rule(Parser *p) return _res; } -// _tmp_8: 'def' | '@' | ASYNC +// _tmp_8: 'def' | '@' | 'async' static void * _tmp_8_rule(Parser *p) { @@ -25668,7 +25670,7 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 652)) // token='def' + (_keyword = _PyPegen_expect_token(p, 669)) // token='def' ) { D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); @@ -25698,24 +25700,24 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'")); } - { // ASYNC + { // 'async' if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); - Token * async_var; + D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); + Token * _keyword; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + (_keyword = _PyPegen_expect_token(p, 668)) // token='async' ) { - D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); - _res = async_var; + D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); + _res = _keyword; goto done; } p->mark = _mark; D(fprintf(stderr, "%*c%s _tmp_8[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'")); } _res = NULL; done: @@ -25745,7 +25747,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 654)) // token='class' + (_keyword = _PyPegen_expect_token(p, 671)) // token='class' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); @@ -25781,7 +25783,7 @@ _tmp_9_rule(Parser *p) return _res; } -// _tmp_10: 'with' | ASYNC +// _tmp_10: 'with' | 'async' static void * _tmp_10_rule(Parser *p) { @@ -25803,7 +25805,7 @@ _tmp_10_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 615)) // token='with' + (_keyword = _PyPegen_expect_token(p, 629)) // token='with' ) { D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); @@ -25814,24 +25816,24 @@ _tmp_10_rule(Parser *p) D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with'")); } - { // ASYNC + { // 'async' if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); - Token * async_var; + D(fprintf(stderr, "%*c> _tmp_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); + Token * _keyword; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + (_keyword = _PyPegen_expect_token(p, 668)) // token='async' ) { - D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); - _res = async_var; + D(fprintf(stderr, "%*c+ _tmp_10[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); + _res = _keyword; goto done; } p->mark = _mark; D(fprintf(stderr, "%*c%s _tmp_10[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'")); } _res = NULL; done: @@ -25839,7 +25841,7 @@ _tmp_10_rule(Parser *p) return _res; } -// _tmp_11: 'for' | ASYNC +// _tmp_11: 'for' | 'async' static void * _tmp_11_rule(Parser *p) { @@ -25861,7 +25863,7 @@ _tmp_11_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 650)) // token='for' + (_keyword = _PyPegen_expect_token(p, 666)) // token='for' ) { D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); @@ -25872,24 +25874,24 @@ _tmp_11_rule(Parser *p) D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for'")); } - { // ASYNC + { // 'async' if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); - Token * async_var; + D(fprintf(stderr, "%*c> _tmp_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); + Token * _keyword; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + (_keyword = _PyPegen_expect_token(p, 668)) // token='async' ) { - D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); - _res = async_var; + D(fprintf(stderr, "%*c+ _tmp_11[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); + _res = _keyword; goto done; } p->mark = _mark; D(fprintf(stderr, "%*c%s _tmp_11[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'async'")); } _res = NULL; done: @@ -26272,7 +26274,7 @@ _tmp_18_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 608)) // token='from' + (_keyword = _PyPegen_expect_token(p, 618)) // token='from' && (z = expression_rule(p)) // expression ) @@ -26922,7 +26924,7 @@ _tmp_29_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -27088,7 +27090,7 @@ _tmp_32_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -29106,7 +29108,7 @@ _tmp_63_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -29153,7 +29155,7 @@ _tmp_64_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -34721,7 +34723,7 @@ _tmp_153_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 601)) // token='True' + (_keyword = _PyPegen_expect_token(p, 610)) // token='True' ) { D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -34740,7 +34742,7 @@ _tmp_153_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 603)) // token='False' + (_keyword = _PyPegen_expect_token(p, 612)) // token='False' ) { D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -34759,7 +34761,7 @@ _tmp_153_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 602)) // token='None' + (_keyword = _PyPegen_expect_token(p, 611)) // token='None' ) { D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -34901,7 +34903,7 @@ _tmp_156_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 645)) // token='else' + (_keyword = _PyPegen_expect_token(p, 659)) // token='else' ) { D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); @@ -35132,7 +35134,7 @@ _tmp_159_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 601)) // token='True' + (_keyword = _PyPegen_expect_token(p, 610)) // token='True' ) { D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -35151,7 +35153,7 @@ _tmp_159_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 602)) // token='None' + (_keyword = _PyPegen_expect_token(p, 611)) // token='None' ) { D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -35170,7 +35172,7 @@ _tmp_159_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 603)) // token='False' + (_keyword = _PyPegen_expect_token(p, 612)) // token='False' ) { D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -38608,7 +38610,7 @@ _tmp_213_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 637)) // token='except' + (_keyword = _PyPegen_expect_token(p, 651)) // token='except' ) { D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); @@ -38627,7 +38629,7 @@ _tmp_213_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_213[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 633)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 647)) // token='finally' ) { D(fprintf(stderr, "%*c+ _tmp_213[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); @@ -38808,7 +38810,7 @@ _tmp_216_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -39034,7 +39036,7 @@ _tmp_220_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -39076,7 +39078,7 @@ _tmp_221_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -39176,7 +39178,7 @@ _tmp_223_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -39218,7 +39220,7 @@ _tmp_224_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -40867,7 +40869,7 @@ _tmp_254_rule(Parser *p) Token * _keyword; expr_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 574)) // token='or' + (_keyword = _PyPegen_expect_token(p, 581)) // token='or' && (c = conjunction_rule(p)) // conjunction ) @@ -40914,7 +40916,7 @@ _tmp_255_rule(Parser *p) Token * _keyword; expr_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 575)) // token='and' + (_keyword = _PyPegen_expect_token(p, 582)) // token='and' && (c = inversion_rule(p)) // inversion ) @@ -41077,7 +41079,7 @@ _tmp_258_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='if' + (_keyword = _PyPegen_expect_token(p, 656)) // token='if' && (z = disjunction_rule(p)) // disjunction ) @@ -41124,7 +41126,7 @@ _tmp_259_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 642)) // token='if' + (_keyword = _PyPegen_expect_token(p, 656)) // token='if' && (z = disjunction_rule(p)) // disjunction ) @@ -41697,7 +41699,7 @@ _tmp_271_rule(Parser *p) Token * _keyword; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME ) @@ -41799,7 +41801,7 @@ _tmp_273_rule(Parser *p) Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) @@ -41841,7 +41843,7 @@ _tmp_274_rule(Parser *p) Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) @@ -41883,7 +41885,7 @@ _tmp_275_rule(Parser *p) Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) @@ -41925,7 +41927,7 @@ _tmp_276_rule(Parser *p) Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 640)) // token='as' + (_keyword = _PyPegen_expect_token(p, 654)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) diff --git a/Parser/pegen.c b/Parser/pegen.c index 885d423fca66a9..bfade3446a57f7 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -734,9 +734,6 @@ compute_parser_flags(PyCompilerFlags *flags) if (flags->cf_flags & PyCF_TYPE_COMMENTS) { parser_flags |= PyPARSE_TYPE_COMMENTS; } - if ((flags->cf_flags & PyCF_ONLY_AST) && flags->cf_feature_version < 7) { - parser_flags |= PyPARSE_ASYNC_HACKS; - } if (flags->cf_flags & PyCF_ALLOW_INCOMPLETE_INPUT) { parser_flags |= PyPARSE_ALLOW_INCOMPLETE_INPUT; } @@ -755,7 +752,6 @@ _PyPegen_Parser_New(struct tok_state *tok, int start_rule, int flags, } assert(tok != NULL); tok->type_comments = (flags & PyPARSE_TYPE_COMMENTS) > 0; - tok->async_hacks = (flags & PyPARSE_ASYNC_HACKS) > 0; p->tok = tok; p->keywords = NULL; p->n_keyword_lists = -1; diff --git a/Parser/pegen.h b/Parser/pegen.h index 5f29285951e812..0852bb51d4fe72 100644 --- a/Parser/pegen.h +++ b/Parser/pegen.h @@ -20,7 +20,6 @@ #define PyPARSE_IGNORE_COOKIE 0x0010 #define PyPARSE_BARRY_AS_BDFL 0x0020 #define PyPARSE_TYPE_COMMENTS 0x0040 -#define PyPARSE_ASYNC_HACKS 0x0080 #define PyPARSE_ALLOW_INCOMPLETE_INPUT 0x0100 #define CURRENT_POS (-5) diff --git a/Parser/string_parser.c b/Parser/string_parser.c index bc1f99d607ae4d..72898c38b79bde 100644 --- a/Parser/string_parser.c +++ b/Parser/string_parser.c @@ -1,6 +1,7 @@ #include #include +#include "pycore_bytesobject.h" // _PyBytes_DecodeEscape() #include "pycore_unicodeobject.h" // _PyUnicode_DecodeUnicodeEscapeInternal() #include "tokenizer.h" diff --git a/Parser/token.c b/Parser/token.c index 2bc963a91c7701..4f163f21609a0a 100644 --- a/Parser/token.c +++ b/Parser/token.c @@ -62,8 +62,6 @@ const char * const _PyParser_TokenNames[] = { "COLONEQUAL", "EXCLAMATION", "OP", - "AWAIT", - "ASYNC", "TYPE_IGNORE", "TYPE_COMMENT", "SOFT_KEYWORD", diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index f19198600fa018..5a42f6f357317f 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -104,10 +104,6 @@ tok_new(void) tok->decoding_buffer = NULL; tok->readline = NULL; tok->type_comments = 0; - tok->async_hacks = 0; - tok->async_def = 0; - tok->async_def_indent = 0; - tok->async_def_nl = 0; tok->interactive_underflow = IUNDERFLOW_NORMAL; tok->str = NULL; tok->report_warnings = 1; @@ -116,7 +112,6 @@ tok_new(void) tok->implicit_newline = 0; tok->tok_mode_stack[0] = (tokenizer_mode){.kind =TOK_REGULAR_MODE, .f_string_quote='\0', .f_string_quote_size = 0, .f_string_debug=0}; tok->tok_mode_stack_index = 0; - tok->tok_report_warnings = 1; #ifdef Py_DEBUG tok->debug = _Py_GetConfig()->parser_debug; #endif @@ -1545,10 +1540,6 @@ static int warn_invalid_escape_sequence(struct tok_state *tok, int first_invalid_escape_char) { - if (!tok->tok_report_warnings) { - return 0; - } - PyObject *msg = PyUnicode_FromFormat( "invalid escape sequence '\\%c'", (char) first_invalid_escape_char @@ -1930,27 +1921,6 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t /* Peek ahead at the next character */ c = tok_nextc(tok); tok_backup(tok, c); - /* Check if we are closing an async function */ - if (tok->async_def - && !blankline - /* Due to some implementation artifacts of type comments, - * a TYPE_COMMENT at the start of a function won't set an - * indentation level and it will produce a NEWLINE after it. - * To avoid spuriously ending an async function due to this, - * wait until we have some non-newline char in front of us. */ - && c != '\n' - && tok->level == 0 - /* There was a NEWLINE after ASYNC DEF, - so we're past the signature. */ - && tok->async_def_nl - /* Current indentation level is less than where - the async function was defined */ - && tok->async_def_indent >= tok->indent) - { - tok->async_def = 0; - tok->async_def_indent = 0; - tok->async_def_nl = 0; - } again: tok->start = NULL; @@ -2099,54 +2069,6 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t p_start = tok->start; p_end = tok->cur; - /* async/await parsing block. */ - if (tok->cur - tok->start == 5 && tok->start[0] == 'a') { - /* May be an 'async' or 'await' token. For Python 3.7 or - later we recognize them unconditionally. For Python - 3.5 or 3.6 we recognize 'async' in front of 'def', and - either one inside of 'async def'. (Technically we - shouldn't recognize these at all for 3.4 or earlier, - but there's no *valid* Python 3.4 code that would be - rejected, and async functions will be rejected in a - later phase.) */ - if (!tok->async_hacks || tok->async_def) { - /* Always recognize the keywords. */ - if (memcmp(tok->start, "async", 5) == 0) { - return MAKE_TOKEN(ASYNC); - } - if (memcmp(tok->start, "await", 5) == 0) { - return MAKE_TOKEN(AWAIT); - } - } - else if (memcmp(tok->start, "async", 5) == 0) { - /* The current token is 'async'. - Look ahead one token to see if that is 'def'. */ - - struct tok_state ahead_tok; - struct token ahead_token; - _PyToken_Init(&ahead_token); - int ahead_tok_kind; - - memcpy(&ahead_tok, tok, sizeof(ahead_tok)); - ahead_tok_kind = tok_get_normal_mode(&ahead_tok, - current_tok, - &ahead_token); - - if (ahead_tok_kind == NAME - && ahead_tok.cur - ahead_tok.start == 3 - && memcmp(ahead_tok.start, "def", 3) == 0) - { - /* The next token is going to be 'def', so instead of - returning a plain NAME token, return ASYNC. */ - tok->async_def_indent = tok->indent; - tok->async_def = 1; - _PyToken_Free(&ahead_token); - return MAKE_TOKEN(ASYNC); - } - _PyToken_Free(&ahead_token); - } - } - return MAKE_TOKEN(NAME); } @@ -2177,11 +2099,6 @@ tok_get_normal_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct t p_start = tok->start; p_end = tok->cur - 1; /* Leave '\n' out of the string */ tok->cont_line = 0; - if (tok->async_def) { - /* We're somewhere inside an 'async def' function, and - we've encountered a NEWLINE after its signature. */ - tok->async_def_nl = 1; - } return MAKE_TOKEN(NEWLINE); } diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h index cb44845c1d306e..11d69fc5b2e15c 100644 --- a/Parser/tokenizer.h +++ b/Parser/tokenizer.h @@ -116,19 +116,12 @@ struct tok_state { int type_comments; /* Whether to look for type comments */ - /* async/await related fields (still needed depending on feature_version) */ - int async_hacks; /* =1 if async/await aren't always keywords */ - int async_def; /* =1 if tokens are inside an 'async def' body. */ - int async_def_indent; /* Indentation level of the outermost 'async def'. */ - int async_def_nl; /* =1 if the outermost 'async def' had at least one - NEWLINE token after it. */ /* How to proceed when asked for a new token in interactive mode */ enum interactive_underflow_t interactive_underflow; int report_warnings; // TODO: Factor this into its own thing tokenizer_mode tok_mode_stack[MAXFSTRINGLEVEL]; int tok_mode_stack_index; - int tok_report_warnings; int tok_extra_tokens; int comment_newline; int implicit_newline; diff --git a/Programs/test_frozenmain.h b/Programs/test_frozenmain.h index 9058327e846dc3..0dca507e28bc14 100644 --- a/Programs/test_frozenmain.h +++ b/Programs/test_frozenmain.h @@ -2,14 +2,14 @@ unsigned char M_test_frozenmain[] = { 227,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0, 0,0,0,0,0,243,164,0,0,0,151,0,100,0,100,1, - 108,0,90,0,100,0,100,1,108,1,90,1,2,0,101,2, - 100,2,171,1,0,0,0,0,0,0,1,0,2,0,101,2, + 108,0,90,0,100,0,100,1,108,1,90,1,101,2,2,0, + 100,2,171,1,0,0,0,0,0,0,1,0,101,2,2,0, 100,3,101,0,106,6,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,171,2,0,0,0,0,0,0, - 1,0,2,0,101,1,106,8,0,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,171,0,0,0,0,0, + 1,0,101,1,106,8,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,2,0,171,0,0,0,0,0, 0,0,100,4,25,0,0,0,90,5,100,5,68,0,93,20, - 0,0,90,6,2,0,101,2,100,6,101,6,40,0,100,7, + 0,0,90,6,101,2,2,0,100,6,101,6,40,0,100,7, 101,5,101,6,25,0,0,0,40,0,157,4,171,1,0,0, 0,0,0,0,1,0,140,22,0,0,4,0,121,1,41,8, 233,0,0,0,0,78,122,18,70,114,111,122,101,110,32,72, @@ -27,12 +27,12 @@ unsigned char M_test_frozenmain[] = { 0,0,218,3,107,101,121,169,0,243,0,0,0,0,250,18, 116,101,115,116,95,102,114,111,122,101,110,109,97,105,110,46, 112,121,250,8,60,109,111,100,117,108,101,62,114,18,0,0, - 0,1,0,0,0,115,102,0,0,0,240,3,1,1,1,243, + 0,1,0,0,0,115,99,0,0,0,240,3,1,1,1,243, 8,0,1,11,219,0,24,225,0,5,208,6,26,212,0,27, 217,0,5,128,106,144,35,151,40,145,40,212,0,27,216,9, - 38,208,9,26,215,9,38,209,9,38,211,9,40,168,24,209, - 9,50,128,6,240,2,6,12,2,242,0,7,1,42,128,67, - 241,14,0,5,10,136,71,144,67,144,53,152,2,152,54,160, - 35,153,59,152,45,208,10,40,214,4,41,241,15,7,1,42, - 114,16,0,0,0, + 26,215,9,38,210,9,38,211,9,40,168,24,209,9,50,128, + 6,240,2,6,12,2,242,0,7,1,42,128,67,241,14,0, + 5,10,136,71,144,67,144,53,152,2,152,54,160,35,153,59, + 152,45,208,10,40,214,4,41,241,15,7,1,42,114,16,0, + 0,0, }; diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 55a1370fbd038b..8047b1259c5d86 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -6,7 +6,6 @@ #include "pycore_ceval.h" // _Py_EnterRecursiveCall #include "pycore_interp.h" // _PyInterpreterState.ast #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "structmember.h" #include // Forward declaration @@ -923,7 +922,7 @@ ast_type_reduce(PyObject *self, PyObject *unused) } static PyMemberDef ast_type_members[] = { - {"__dictoffset__", T_PYSSIZET, offsetof(AST_object, dict), READONLY}, + {"__dictoffset__", Py_T_PYSSIZET, offsetof(AST_object, dict), Py_READONLY}, {NULL} /* Sentinel */ }; @@ -13074,7 +13073,7 @@ PyObject* PyAST_mod2obj(mod_ty t) int starting_recursion_depth; /* Be careful here to prevent overflow. */ - int COMPILER_STACK_FRAME_SCALE = 3; + int COMPILER_STACK_FRAME_SCALE = 2; PyThreadState *tstate = _PyThreadState_GET(); if (!tstate) { return 0; diff --git a/Python/Python-tokenize.c b/Python/Python-tokenize.c index 1938562706914c..1b021069c5e10b 100644 --- a/Python/Python-tokenize.c +++ b/Python/Python-tokenize.c @@ -237,9 +237,6 @@ tokenizeriter_next(tokenizeriterobject *it) if (type > DEDENT && type < OP) { type = OP; } - else if (type == ASYNC || type == AWAIT) { - type = NAME; - } else if (type == NEWLINE) { Py_DECREF(str); if (!it->tok->implicit_newline) { diff --git a/Python/_warnings.c b/Python/_warnings.c index 82e621243a0c15..40ec5f613d5bf4 100644 --- a/Python/_warnings.c +++ b/Python/_warnings.c @@ -1,4 +1,5 @@ #include "Python.h" +#include "pycore_dict.h" // _PyDict_GetItemWithError() #include "pycore_frame.h" #include "pycore_initconfig.h" #include "pycore_interp.h" // PyInterpreterState.warnings diff --git a/Python/ast.c b/Python/ast.c index 68600ce683b974..74c97f948d15e6 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -1029,7 +1029,7 @@ validate_type_params(struct validator *state, asdl_type_param_seq *tps) /* See comments in symtable.c. */ -#define COMPILER_STACK_FRAME_SCALE 3 +#define COMPILER_STACK_FRAME_SCALE 2 int _PyAST_Validate(mod_ty mod) diff --git a/Python/ast_opt.c b/Python/ast_opt.c index 276e910089a277..82e7559e5b629a 100644 --- a/Python/ast_opt.c +++ b/Python/ast_opt.c @@ -1,9 +1,10 @@ /* AST Optimizer */ #include "Python.h" #include "pycore_ast.h" // _PyAST_GetDocString() -#include "pycore_long.h" // _PyLong -#include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_format.h" // F_LJUST +#include "pycore_long.h" // _PyLong +#include "pycore_pystate.h" // _PyThreadState_GET() +#include "pycore_setobject.h" // _PySet_NextEntry() typedef struct { @@ -1111,7 +1112,7 @@ astfold_type_param(type_param_ty node_, PyArena *ctx_, _PyASTOptimizeState *stat #undef CALL_SEQ /* See comments in symtable.c. */ -#define COMPILER_STACK_FRAME_SCALE 3 +#define COMPILER_STACK_FRAME_SCALE 2 int _PyAST_Optimize(mod_ty mod, PyArena *arena, int optimize, int ff_features) diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 7d77fd5c0c328e..9baf233614879e 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -6,6 +6,7 @@ #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_ceval.h" // _PyEval_Vector() #include "pycore_compile.h" // _PyAST_Compile() +#include "pycore_dict.h" // _PyDict_GetItemWithError() #include "pycore_long.h" // _PyLong_CompactValue #include "pycore_modsupport.h" // _PyArg_NoKwnames() #include "pycore_object.h" // _Py_AddToAllObjects() diff --git a/Python/bytecodes.c b/Python/bytecodes.c index ea136a3fca2e02..5efa36fcf5c629 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -10,23 +10,24 @@ #include "pycore_abstract.h" // _PyIndex_Check() #include "pycore_ceval.h" // _PyEval_SignalAsyncExc() #include "pycore_code.h" +#include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS #include "pycore_function.h" +#include "pycore_instruments.h" #include "pycore_intrinsics.h" #include "pycore_long.h" // _PyLong_GetZero() -#include "pycore_instruments.h" -#include "pycore_object.h" // _PyObject_GC_TRACK() #include "pycore_moduleobject.h" // PyModuleObject +#include "pycore_object.h" // _PyObject_GC_TRACK() #include "pycore_opcode.h" // EXTRA_CASES #include "pycore_opcode_metadata.h" // uop names #include "pycore_opcode_utils.h" // MAKE_FUNCTION_* #include "pycore_pyerrors.h" // _PyErr_GetRaisedException() #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_range.h" // _PyRangeIterObject +#include "pycore_setobject.h" // _PySet_NextEntry() #include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs #include "pycore_sysmodule.h" // _PySys_Audit() #include "pycore_tuple.h" // _PyTuple_ITEMS() #include "pycore_typeobject.h" // _PySuper_Lookup() -#include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS #include "pycore_dict.h" #include "dictobject.h" @@ -35,7 +36,7 @@ #include "optimizer.h" #include "pydtrace.h" #include "setobject.h" -#include "structmember.h" // struct PyMemberDef, T_OFFSET_EX + #define USE_COMPUTED_GOTOS 0 #include "ceval_macros.h" @@ -77,7 +78,6 @@ dummy_func( PyObject **stack_pointer, PyObject *kwnames, int throwflag, - binaryfunc binary_ops[], PyObject *args[] ) { @@ -509,6 +509,7 @@ dummy_func( BINARY_SUBSCR_DICT, BINARY_SUBSCR_GETITEM, BINARY_SUBSCR_LIST_INT, + BINARY_SUBSCR_STR_INT, BINARY_SUBSCR_TUPLE_INT, }; @@ -574,6 +575,21 @@ dummy_func( Py_DECREF(list); } + inst(BINARY_SUBSCR_STR_INT, (unused/1, str, sub -- res)) { + DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); + DEOPT_IF(!PyUnicode_CheckExact(str), BINARY_SUBSCR); + DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), BINARY_SUBSCR); + Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; + DEOPT_IF(PyUnicode_GET_LENGTH(str) <= index, BINARY_SUBSCR); + // Specialize for reading an ASCII character from any string: + Py_UCS4 c = PyUnicode_READ_CHAR(str, index); + DEOPT_IF(Py_ARRAY_LENGTH(_Py_SINGLETON(strings).ascii) <= c, BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = (PyObject*)&_Py_SINGLETON(strings).ascii[c]; + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(str); + } + inst(BINARY_SUBSCR_TUPLE_INT, (unused/1, tuple, sub -- res)) { DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); @@ -698,14 +714,14 @@ dummy_func( inst(CALL_INTRINSIC_1, (value -- res)) { assert(oparg <= MAX_INTRINSIC_1); - res = _PyIntrinsics_UnaryFunctions[oparg](tstate, value); + res = _PyIntrinsics_UnaryFunctions[oparg].func(tstate, value); DECREF_INPUTS(); ERROR_IF(res == NULL, error); } inst(CALL_INTRINSIC_2, (value2, value1 -- res)) { assert(oparg <= MAX_INTRINSIC_2); - res = _PyIntrinsics_BinaryFunctions[oparg](tstate, value2, value1); + res = _PyIntrinsics_BinaryFunctions[oparg].func(tstate, value2, value1); DECREF_INPUTS(); ERROR_IF(res == NULL, error); } @@ -720,7 +736,11 @@ dummy_func( exc = args[0]; /* fall through */ case 0: - ERROR_IF(do_raise(tstate, exc, cause), exception_unwind); + if (do_raise(tstate, exc, cause)) { + assert(oparg == 0); + monitor_reraise(tstate, frame, next_instr-1); + goto exception_unwind; + } break; default: _PyErr_SetString(tstate, PyExc_SystemError, @@ -737,7 +757,7 @@ dummy_func( tstate->cframe = cframe.previous; assert(tstate->cframe->current_frame == frame->previous); assert(!_PyErr_Occurred(tstate)); - _Py_LeaveRecursiveCallTstate(tstate); + tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; return retval; } @@ -893,7 +913,7 @@ dummy_func( iter = _PyCoro_GetAwaitableIter(iterable); if (iter == NULL) { - format_awaitable_error(tstate, Py_TYPE(iterable), oparg); + _PyEval_FormatAwaitableError(tstate, Py_TYPE(iterable), oparg); } DECREF_INPUTS(); @@ -1047,6 +1067,7 @@ dummy_func( assert(exc && PyExceptionInstance_Check(exc)); Py_INCREF(exc); _PyErr_SetRaisedException(tstate, exc); + monitor_reraise(tstate, frame, next_instr-1); goto exception_unwind; } @@ -1058,6 +1079,7 @@ dummy_func( else { Py_INCREF(exc); _PyErr_SetRaisedException(tstate, exc); + monitor_reraise(tstate, frame, next_instr-1); goto exception_unwind; } } @@ -1072,6 +1094,7 @@ dummy_func( } else { _PyErr_SetRaisedException(tstate, Py_NewRef(exc_value)); + monitor_reraise(tstate, frame, next_instr-1); goto exception_unwind; } } @@ -1120,9 +1143,9 @@ dummy_func( err = PyObject_DelItem(ns, name); // Can't use ERROR_IF here. if (err != 0) { - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, - name); + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + NAME_ERROR_MSG, + name); goto error; } } @@ -1145,7 +1168,7 @@ dummy_func( DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ PyObject **top = stack_pointer + oparg - 1; - int res = unpack_iterable(tstate, seq, oparg, -1, top); + int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top); DECREF_INPUTS(); ERROR_IF(res == 0, error); } @@ -1185,7 +1208,7 @@ dummy_func( inst(UNPACK_EX, (seq -- unused[oparg & 0xFF], unused, unused[oparg >> 8])) { int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); PyObject **top = stack_pointer + totalargs - 1; - int res = unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top); + int res = _PyEval_UnpackIterable(tstate, seq, oparg & 0xFF, oparg >> 8, top); DECREF_INPUTS(); ERROR_IF(res == 0, error); } @@ -1235,8 +1258,8 @@ dummy_func( // Can't use ERROR_IF here. if (err != 0) { if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); } goto error; } @@ -1274,7 +1297,7 @@ dummy_func( goto error; } if (v == NULL) { - format_exc_check_arg( + _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); goto error; @@ -1292,7 +1315,7 @@ dummy_func( LOAD_GLOBAL_BUILTIN, }; - inst(LOAD_GLOBAL, (unused/1, unused/1, unused/1, unused/1 -- null if (oparg & 1), v)) { + inst(LOAD_GLOBAL, (unused/1, unused/1, unused/1, unused/1 -- res, null if (oparg & 1))) { #if ENABLE_SPECIALIZATION _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1308,30 +1331,30 @@ dummy_func( if (PyDict_CheckExact(GLOBALS()) && PyDict_CheckExact(BUILTINS())) { - v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), - (PyDictObject *)BUILTINS(), - name); - if (v == NULL) { + res = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), + (PyDictObject *)BUILTINS(), + name); + if (res == NULL) { if (!_PyErr_Occurred(tstate)) { /* _PyDict_LoadGlobal() returns NULL without raising * an exception if the key doesn't exist */ - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); } ERROR_IF(true, error); } - Py_INCREF(v); + Py_INCREF(res); } else { /* Slow-path if globals or builtins is not a dict */ /* namespace 1: globals */ - ERROR_IF(PyMapping_GetOptionalItem(GLOBALS(), name, &v) < 0, error); - if (v == NULL) { + ERROR_IF(PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0, error); + if (res == NULL) { /* namespace 2: builtins */ - ERROR_IF(PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0, error); - if (v == NULL) { - format_exc_check_arg( + ERROR_IF(PyMapping_GetOptionalItem(BUILTINS(), name, &res) < 0, error); + if (res == NULL) { + _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); ERROR_IF(true, error); @@ -1341,9 +1364,6 @@ dummy_func( null = NULL; } - op(_SKIP_CACHE, (unused/1 -- )) { - } - op(_GUARD_GLOBALS_VERSION, (version/1 --)) { PyDictObject *dict = (PyDictObject *)GLOBALS(); DEOPT_IF(!PyDict_CheckExact(dict), LOAD_GLOBAL); @@ -1358,7 +1378,7 @@ dummy_func( assert(DK_IS_UNICODE(dict->ma_keys)); } - op(_LOAD_GLOBAL_MODULE, (index/1 -- null if (oparg & 1), res)) { + op(_LOAD_GLOBAL_MODULE, (index/1 -- res, null if (oparg & 1))) { PyDictObject *dict = (PyDictObject *)GLOBALS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); res = entries[index].me_value; @@ -1368,7 +1388,7 @@ dummy_func( null = NULL; } - op(_LOAD_GLOBAL_BUILTINS, (index/1 -- null if (oparg & 1), res)) { + op(_LOAD_GLOBAL_BUILTINS, (index/1 -- res, null if (oparg & 1))) { PyDictObject *bdict = (PyDictObject *)BUILTINS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys); res = entries[index].me_value; @@ -1379,13 +1399,13 @@ dummy_func( } macro(LOAD_GLOBAL_MODULE) = - _SKIP_CACHE + // Skip over the counter + unused/1 + // Skip over the counter _GUARD_GLOBALS_VERSION + - _SKIP_CACHE + // Skip over the builtins version + unused/1 + // Skip over the builtins version _LOAD_GLOBAL_MODULE; macro(LOAD_GLOBAL_BUILTIN) = - _SKIP_CACHE + // Skip over the counter + unused/1 + // Skip over the counter _GUARD_GLOBALS_VERSION + _GUARD_BUILTINS_VERSION + _LOAD_GLOBAL_BUILTINS; @@ -1413,7 +1433,7 @@ dummy_func( // Can't use ERROR_IF here. // Fortunately we don't need its superpower. if (oldobj == NULL) { - format_exc_unbound(tstate, _PyFrame_GetCode(frame), oparg); + _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); goto error; } PyCell_SET(cell, NULL); @@ -1434,7 +1454,7 @@ dummy_func( PyObject *cell = GETLOCAL(oparg); value = PyCell_GET(cell); if (value == NULL) { - format_exc_unbound(tstate, _PyFrame_GetCode(frame), oparg); + _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); goto error; } Py_INCREF(value); @@ -1445,7 +1465,7 @@ dummy_func( PyObject *cell = GETLOCAL(oparg); value = PyCell_GET(cell); if (value == NULL) { - format_exc_unbound(tstate, _PyFrame_GetCode(frame), oparg); + _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); ERROR_IF(true, error); } Py_INCREF(value); @@ -1594,8 +1614,7 @@ dummy_func( ERROR_IF(map == NULL, error); } - inst(DICT_UPDATE, (update --)) { - PyObject *dict = PEEK(oparg + 1); // update is still on the stack + inst(DICT_UPDATE, (dict, unused[oparg - 1], update -- dict, unused[oparg - 1])) { if (PyDict_Update(dict, update) < 0) { if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { _PyErr_Format(tstate, PyExc_TypeError, @@ -1608,26 +1627,23 @@ dummy_func( DECREF_INPUTS(); } - inst(DICT_MERGE, (update --)) { - PyObject *dict = PEEK(oparg + 1); // update is still on the stack - + inst(DICT_MERGE, (callable, unused, unused, dict, unused[oparg - 1], update -- callable, unused, unused, dict, unused[oparg - 1])) { if (_PyDict_MergeEx(dict, update, 2) < 0) { - format_kwargs_error(tstate, PEEK(3 + oparg), update); + _PyEval_FormatKwargsError(tstate, callable, update); DECREF_INPUTS(); ERROR_IF(true, error); } DECREF_INPUTS(); } - inst(MAP_ADD, (key, value --)) { - PyObject *dict = PEEK(oparg + 2); // key, value are still on the stack + inst(MAP_ADD, (dict, unused[oparg - 1], key, value -- dict, unused[oparg - 1])) { assert(PyDict_CheckExact(dict)); /* dict[key] = value */ // Do not DECREF INPUTS because the function steals the references ERROR_IF(_PyDict_SetItem_Take2((PyDictObject *)dict, key, value) != 0, error); } - inst(INSTRUMENTED_LOAD_SUPER_ATTR, (unused/9, unused, unused, unused -- unused if (oparg & 1), unused)) { + inst(INSTRUMENTED_LOAD_SUPER_ATTR, (unused/9, unused, unused, unused -- unused, unused if (oparg & 1))) { _PySuperAttrCache *cache = (_PySuperAttrCache *)next_instr; // cancel out the decrement that will happen in LOAD_SUPER_ATTR; we // don't want to specialize instrumented instructions @@ -1640,7 +1656,7 @@ dummy_func( LOAD_SUPER_ATTR_METHOD, }; - inst(LOAD_SUPER_ATTR, (unused/1, global_super, class, self -- res2 if (oparg & 1), res)) { + inst(LOAD_SUPER_ATTR, (unused/1, global_super, class, self -- attr, null if (oparg & 1))) { PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); int load_method = oparg & 1; #if ENABLE_SPECIALIZATION @@ -1684,9 +1700,10 @@ dummy_func( } DECREF_INPUTS(); ERROR_IF(super == NULL, error); - res = PyObject_GetAttr(super, name); + attr = PyObject_GetAttr(super, name); Py_DECREF(super); - ERROR_IF(res == NULL, error); + ERROR_IF(attr == NULL, error); + null = NULL; } pseudo(LOAD_SUPER_METHOD) = { @@ -1701,18 +1718,18 @@ dummy_func( LOAD_SUPER_ATTR, }; - inst(LOAD_SUPER_ATTR_ATTR, (unused/1, global_super, class, self -- res2 if (oparg & 1), res)) { + inst(LOAD_SUPER_ATTR_ATTR, (unused/1, global_super, class, self -- attr, unused if (0))) { assert(!(oparg & 1)); DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); STAT_INC(LOAD_SUPER_ATTR, hit); PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); - res = _PySuper_Lookup((PyTypeObject *)class, self, name, NULL); + attr = _PySuper_Lookup((PyTypeObject *)class, self, name, NULL); DECREF_INPUTS(); - ERROR_IF(res == NULL, error); + ERROR_IF(attr == NULL, error); } - inst(LOAD_SUPER_ATTR_METHOD, (unused/1, global_super, class, self -- res2, res)) { + inst(LOAD_SUPER_ATTR_METHOD, (unused/1, global_super, class, self -- attr, self_or_null)) { assert(oparg & 1); DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); @@ -1720,20 +1737,19 @@ dummy_func( PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); PyTypeObject *cls = (PyTypeObject *)class; int method_found = 0; - res2 = _PySuper_Lookup(cls, self, name, - cls->tp_getattro == PyObject_GenericGetAttr ? &method_found : NULL); + attr = _PySuper_Lookup(cls, self, name, + Py_TYPE(self)->tp_getattro == PyObject_GenericGetAttr ? &method_found : NULL); Py_DECREF(global_super); Py_DECREF(class); - if (res2 == NULL) { + if (attr == NULL) { Py_DECREF(self); ERROR_IF(true, error); } if (method_found) { - res = self; // transfer ownership + self_or_null = self; // transfer ownership } else { Py_DECREF(self); - res = res2; - res2 = NULL; + self_or_null = NULL; } } @@ -1752,7 +1768,7 @@ dummy_func( LOAD_ATTR_NONDESCRIPTOR_NO_DICT, }; - inst(LOAD_ATTR, (unused/9, owner -- res2 if (oparg & 1), res)) { + inst(LOAD_ATTR, (unused/9, owner -- attr, self_or_null if (oparg & 1))) { #if ENABLE_SPECIALIZATION _PyAttrCache *cache = (_PyAttrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1767,16 +1783,15 @@ dummy_func( PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); if (oparg & 1) { /* Designed to work in tandem with CALL, pushes two values. */ - PyObject* meth = NULL; - if (_PyObject_GetMethod(owner, name, &meth)) { + attr = NULL; + if (_PyObject_GetMethod(owner, name, &attr)) { /* We can bypass temporary bound method object. meth is unbound method and obj is self. meth | self | arg1 | ... | argN */ - assert(meth != NULL); // No errors on this branch - res2 = meth; - res = owner; // Transfer ownership + assert(attr != NULL); // No errors on this branch + self_or_null = owner; // Transfer ownership } else { /* meth is not an unbound method (but a regular attr, or @@ -1787,16 +1802,15 @@ dummy_func( NULL | meth | arg1 | ... | argN */ DECREF_INPUTS(); - ERROR_IF(meth == NULL, error); - res2 = NULL; - res = meth; + ERROR_IF(attr == NULL, error); + self_or_null = NULL; } } else { /* Classic, pushes one value. */ - res = PyObject_GetAttr(owner, name); + attr = PyObject_GetAttr(owner, name); DECREF_INPUTS(); - ERROR_IF(res == NULL, error); + ERROR_IF(attr == NULL, error); } } @@ -1813,27 +1827,30 @@ dummy_func( op(_CHECK_MANAGED_OBJECT_HAS_VALUES, (owner -- owner)) { assert(Py_TYPE(owner)->tp_dictoffset < 0); assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && + !_PyObject_MakeInstanceAttributesFromDict(owner, dorv), + LOAD_ATTR); } - op(_LOAD_ATTR_INSTANCE_VALUE, (index/1, unused/5, owner -- res2 if (oparg & 1), res)) { + op(_LOAD_ATTR_INSTANCE_VALUE, (index/1, owner -- attr, null if (oparg & 1))) { PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - res = _PyDictOrValues_GetValues(dorv)->values[index]; - DEOPT_IF(res == NULL, LOAD_ATTR); + attr = _PyDictOrValues_GetValues(dorv)->values[index]; + DEOPT_IF(attr == NULL, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - res2 = NULL; + Py_INCREF(attr); + null = NULL; DECREF_INPUTS(); } macro(LOAD_ATTR_INSTANCE_VALUE) = - _SKIP_CACHE + // Skip over the counter + unused/1 + // Skip over the counter _GUARD_TYPE_VERSION + _CHECK_MANAGED_OBJECT_HAS_VALUES + - _LOAD_ATTR_INSTANCE_VALUE; + _LOAD_ATTR_INSTANCE_VALUE + + unused/5; // Skip over rest of cache - inst(LOAD_ATTR_MODULE, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) { + inst(LOAD_ATTR_MODULE, (unused/1, type_version/2, index/1, unused/5, owner -- attr, null if (oparg & 1))) { DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict; assert(dict != NULL); @@ -1841,15 +1858,15 @@ dummy_func( assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); assert(index < dict->ma_keys->dk_nentries); PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + index; - res = ep->me_value; - DEOPT_IF(res == NULL, LOAD_ATTR); + attr = ep->me_value; + DEOPT_IF(attr == NULL, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - res2 = NULL; + Py_INCREF(attr); + null = NULL; DECREF_INPUTS(); } - inst(LOAD_ATTR_WITH_HINT, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) { + inst(LOAD_ATTR_WITH_HINT, (unused/1, type_version/2, index/1, unused/5, owner -- attr, null if (oparg & 1))) { PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); @@ -1865,49 +1882,50 @@ dummy_func( if (DK_IS_UNICODE(dict->ma_keys)) { PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; DEOPT_IF(ep->me_key != name, LOAD_ATTR); - res = ep->me_value; + attr = ep->me_value; } else { PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; DEOPT_IF(ep->me_key != name, LOAD_ATTR); - res = ep->me_value; + attr = ep->me_value; } - DEOPT_IF(res == NULL, LOAD_ATTR); + DEOPT_IF(attr == NULL, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - res2 = NULL; + Py_INCREF(attr); + null = NULL; DECREF_INPUTS(); } - inst(LOAD_ATTR_SLOT, (unused/1, type_version/2, index/1, unused/5, owner -- res2 if (oparg & 1), res)) { + inst(LOAD_ATTR_SLOT, (unused/1, type_version/2, index/1, unused/5, owner -- attr, null if (oparg & 1))) { PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); char *addr = (char *)owner + index; - res = *(PyObject **)addr; - DEOPT_IF(res == NULL, LOAD_ATTR); + attr = *(PyObject **)addr; + DEOPT_IF(attr == NULL, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - res2 = NULL; + Py_INCREF(attr); + null = NULL; DECREF_INPUTS(); } - inst(LOAD_ATTR_CLASS, (unused/1, type_version/2, unused/2, descr/4, cls -- res2 if (oparg & 1), res)) { + inst(LOAD_ATTR_CLASS, (unused/1, type_version/2, unused/2, descr/4, owner -- attr, null if (oparg & 1))) { - DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); - DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, + DEOPT_IF(!PyType_Check(owner), LOAD_ATTR); + DEOPT_IF(((PyTypeObject *)owner)->tp_version_tag != type_version, LOAD_ATTR); assert(type_version != 0); STAT_INC(LOAD_ATTR, hit); - res2 = NULL; - res = descr; - assert(res != NULL); - Py_INCREF(res); + null = NULL; + attr = descr; + assert(attr != NULL); + Py_INCREF(attr); DECREF_INPUTS(); } - inst(LOAD_ATTR_PROPERTY, (unused/1, type_version/2, func_version/2, fget/4, owner -- unused if (oparg & 1), unused)) { + inst(LOAD_ATTR_PROPERTY, (unused/1, type_version/2, func_version/2, fget/4, owner -- unused, unused if (0))) { + assert((oparg & 1) == 0); DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); @@ -1924,16 +1942,15 @@ dummy_func( Py_INCREF(fget); _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f, 1); // Manipulate stack directly because we exit with DISPATCH_INLINED(). - SET_TOP(NULL); - int shrink_stack = !(oparg & 1); - STACK_SHRINK(shrink_stack); + STACK_SHRINK(1); new_frame->localsplus[0] = owner; SKIP_OVER(INLINE_CACHE_ENTRIES_LOAD_ATTR); frame->return_offset = 0; DISPATCH_INLINED(new_frame); } - inst(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, (unused/1, type_version/2, func_version/2, getattribute/4, owner -- unused if (oparg & 1), unused)) { + inst(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, (unused/1, type_version/2, func_version/2, getattribute/4, owner -- unused, unused if (0))) { + assert((oparg & 1) == 0); DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); @@ -1951,9 +1968,7 @@ dummy_func( Py_INCREF(f); _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f, 2); // Manipulate stack directly because we exit with DISPATCH_INLINED(). - SET_TOP(NULL); - int shrink_stack = !(oparg & 1); - STACK_SHRINK(shrink_stack); + STACK_SHRINK(1); new_frame->localsplus[0] = owner; new_frame->localsplus[1] = Py_NewRef(name); SKIP_OVER(INLINE_CACHE_ENTRIES_LOAD_ATTR); @@ -2126,15 +2141,15 @@ dummy_func( } inst(CHECK_EG_MATCH, (exc_value, match_type -- rest, match)) { - if (check_except_star_type_valid(tstate, match_type) < 0) { + if (_PyEval_CheckExceptStarTypeValid(tstate, match_type) < 0) { DECREF_INPUTS(); ERROR_IF(true, error); } match = NULL; rest = NULL; - int res = exception_group_match(exc_value, match_type, - &match, &rest); + int res = _PyEval_ExceptionGroupMatch(exc_value, match_type, + &match, &rest); DECREF_INPUTS(); ERROR_IF(res < 0, error); @@ -2148,7 +2163,7 @@ dummy_func( inst(CHECK_EXC_MATCH, (left, right -- left, b)) { assert(PyExceptionInstance_Check(left)); - if (check_except_type_valid(tstate, right) < 0) { + if (_PyEval_CheckExceptTypeValid(tstate, right) < 0) { DECREF_INPUTS(); ERROR_IF(true, error); } @@ -2182,7 +2197,14 @@ dummy_func( JUMPBY(1-oparg); #if ENABLE_SPECIALIZATION here[1].cache += (1 << OPTIMIZER_BITS_IN_COUNTER); - if (here[1].cache > tstate->interp->optimizer_backedge_threshold) { + if (here[1].cache > tstate->interp->optimizer_backedge_threshold && + // Double-check that the opcode isn't instrumented or something: + here->op.code == JUMP_BACKWARD && + // _PyOptimizer_BackEdge is going to change frame->prev_instr, + // which breaks line event calculations: + next_instr->op.code != INSTRUMENTED_LINE + ) + { OBJECT_STAT_INC(optimization_attempts); frame = _PyOptimizer_BackEdge(frame, here, next_instr, stack_pointer); if (frame == NULL) { @@ -2268,7 +2290,7 @@ dummy_func( // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or // None on failure. assert(PyTuple_CheckExact(names)); - attrs = match_class(tstate, subject, type, oparg, names); + attrs = _PyEval_MatchClass(tstate, subject, type, oparg, names); DECREF_INPUTS(); if (attrs) { assert(PyTuple_CheckExact(attrs)); // Success! @@ -2291,7 +2313,7 @@ dummy_func( inst(MATCH_KEYS, (subject, keys -- subject, keys, values_or_none)) { // On successful match, PUSH(values). Otherwise, PUSH(None). - values_or_none = match_keys(tstate, subject, keys); + values_or_none = _PyEval_MatchKeys(tstate, subject, keys); ERROR_IF(values_or_none == NULL, error); } @@ -2658,7 +2680,12 @@ dummy_func( assert(val && PyExceptionInstance_Check(val)); exc = PyExceptionInstance_Class(val); tb = PyException_GetTraceback(val); - Py_XDECREF(tb); + if (tb == NULL) { + tb = Py_None; + } + else { + Py_DECREF(tb); + } assert(PyLong_Check(lasti)); (void)lasti; // Shut up compiler warning if asserts are off PyObject *stack[4] = {NULL, exc, val, tb}; @@ -2695,80 +2722,84 @@ dummy_func( exc_info->exc_value = Py_NewRef(new_exc); } - inst(LOAD_ATTR_METHOD_WITH_VALUES, (unused/1, type_version/2, keys_version/2, descr/4, self -- res2 if (1), res)) { + inst(LOAD_ATTR_METHOD_WITH_VALUES, (unused/1, type_version/2, keys_version/2, descr/4, owner -- attr, self if (1))) { assert(oparg & 1); /* Cached method object */ - PyTypeObject *self_cls = Py_TYPE(self); + PyTypeObject *owner_cls = Py_TYPE(owner); assert(type_version != 0); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(self); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - PyHeapTypeObject *self_heap_type = (PyHeapTypeObject *)self_cls; - DEOPT_IF(self_heap_type->ht_cached_keys->dk_version != + DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(owner_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && + !_PyObject_MakeInstanceAttributesFromDict(owner, dorv), + LOAD_ATTR); + PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; + DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); - res2 = Py_NewRef(descr); - assert(_PyType_HasFeature(Py_TYPE(res2), Py_TPFLAGS_METHOD_DESCRIPTOR)); - res = self; + attr = Py_NewRef(descr); + assert(_PyType_HasFeature(Py_TYPE(attr), Py_TPFLAGS_METHOD_DESCRIPTOR)); + self = owner; } - inst(LOAD_ATTR_METHOD_NO_DICT, (unused/1, type_version/2, unused/2, descr/4, self -- res2 if (1), res)) { + inst(LOAD_ATTR_METHOD_NO_DICT, (unused/1, type_version/2, unused/2, descr/4, owner -- attr, self if (1))) { assert(oparg & 1); - PyTypeObject *self_cls = Py_TYPE(self); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_dictoffset == 0); + PyTypeObject *owner_cls = Py_TYPE(owner); + DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(owner_cls->tp_dictoffset == 0); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)); - res2 = Py_NewRef(descr); - res = self; + attr = Py_NewRef(descr); + self = owner; } - inst(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, (unused/1, type_version/2, keys_version/2, descr/4, self -- res2 if (0), res)) { + inst(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, (unused/1, type_version/2, keys_version/2, descr/4, owner -- attr, unused if (0))) { assert((oparg & 1) == 0); - PyTypeObject *self_cls = Py_TYPE(self); + PyTypeObject *owner_cls = Py_TYPE(owner); assert(type_version != 0); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(self); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - PyHeapTypeObject *self_heap_type = (PyHeapTypeObject *)self_cls; - DEOPT_IF(self_heap_type->ht_cached_keys->dk_version != + DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(owner_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && + !_PyObject_MakeInstanceAttributesFromDict(owner, dorv), + LOAD_ATTR); + PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; + DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); DECREF_INPUTS(); - res = Py_NewRef(descr); + attr = Py_NewRef(descr); } - inst(LOAD_ATTR_NONDESCRIPTOR_NO_DICT, (unused/1, type_version/2, unused/2, descr/4, self -- res2 if (0), res)) { + inst(LOAD_ATTR_NONDESCRIPTOR_NO_DICT, (unused/1, type_version/2, unused/2, descr/4, owner -- attr, unused if (0))) { assert((oparg & 1) == 0); - PyTypeObject *self_cls = Py_TYPE(self); + PyTypeObject *owner_cls = Py_TYPE(owner); assert(type_version != 0); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_dictoffset == 0); + DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(owner_cls->tp_dictoffset == 0); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); DECREF_INPUTS(); - res = Py_NewRef(descr); + attr = Py_NewRef(descr); } - inst(LOAD_ATTR_METHOD_LAZY_DICT, (unused/1, type_version/2, unused/2, descr/4, self -- res2 if (1), res)) { + inst(LOAD_ATTR_METHOD_LAZY_DICT, (unused/1, type_version/2, unused/2, descr/4, owner -- attr, self if (1))) { assert(oparg & 1); - PyTypeObject *self_cls = Py_TYPE(self); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - Py_ssize_t dictoffset = self_cls->tp_dictoffset; + PyTypeObject *owner_cls = Py_TYPE(owner); + DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); + Py_ssize_t dictoffset = owner_cls->tp_dictoffset; assert(dictoffset > 0); - PyObject *dict = *(PyObject **)((char *)self + dictoffset); + PyObject *dict = *(PyObject **)((char *)owner + dictoffset); /* This object has a __dict__, just not yet created */ DEOPT_IF(dict != NULL, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)); - res2 = Py_NewRef(descr); - res = self; + attr = Py_NewRef(descr); + self = owner; } inst(KW_NAMES, (--)) { @@ -2778,9 +2809,9 @@ dummy_func( } inst(INSTRUMENTED_CALL, ( -- )) { - int is_meth = PEEK(oparg+2) != NULL; + int is_meth = PEEK(oparg + 1) != NULL; int total_args = oparg + is_meth; - PyObject *function = PEEK(total_args + 1); + PyObject *function = PEEK(oparg + 2); PyObject *arg = total_args == 0 ? &_PyInstrumentation_MISSING : PEEK(total_args); int err = _Py_call_instrumentation_2args( @@ -2822,11 +2853,9 @@ dummy_func( // (Some args may be keywords, see KW_NAMES, which sets 'kwnames'.) // On exit, the stack is [result]. // When calling Python, inline the call using DISPATCH_INLINED(). - inst(CALL, (unused/1, unused/2, method, callable, args[oparg] -- res)) { - int is_meth = method != NULL; + inst(CALL, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -2840,13 +2869,12 @@ dummy_func( STAT_INC(CALL, deferred); DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ - if (!is_meth && Py_TYPE(callable) == &PyMethod_Type) { - is_meth = 1; // For consistenct; it's dead, though + if (self_or_null == NULL && Py_TYPE(callable) == &PyMethod_Type) { args--; total_args++; PyObject *self = ((PyMethodObject *)callable)->im_self; args[0] = Py_NewRef(self); - method = ((PyMethodObject *)callable)->im_func; + PyObject *method = ((PyMethodObject *)callable)->im_func; args[-1] = Py_NewRef(method); Py_DECREF(callable); callable = method; @@ -2882,7 +2910,7 @@ dummy_func( kwnames); if (opcode == INSTRUMENTED_CALL) { PyObject *arg = total_args == 0 ? - &_PyInstrumentation_MISSING : PEEK(total_args); + &_PyInstrumentation_MISSING : args[0]; if (res == NULL) { _Py_call_instrumentation_exc2( tstate, PY_MONITORING_EVENT_C_RAISE, @@ -2910,25 +2938,23 @@ dummy_func( // Start out with [NULL, bound_method, arg1, arg2, ...] // Transform to [callable, self, arg1, arg2, ...] // Then fall through to CALL_PY_EXACT_ARGS - inst(CALL_BOUND_METHOD_EXACT_ARGS, (unused/1, unused/2, method, callable, unused[oparg] -- unused)) { - DEOPT_IF(method != NULL, CALL); + inst(CALL_BOUND_METHOD_EXACT_ARGS, (unused/1, unused/2, callable, null, unused[oparg] -- unused)) { + DEOPT_IF(null != NULL, CALL); DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type, CALL); STAT_INC(CALL, hit); PyObject *self = ((PyMethodObject *)callable)->im_self; - PEEK(oparg + 1) = Py_NewRef(self); // callable + PEEK(oparg + 1) = Py_NewRef(self); // self_or_null PyObject *meth = ((PyMethodObject *)callable)->im_func; - PEEK(oparg + 2) = Py_NewRef(meth); // method + PEEK(oparg + 2) = Py_NewRef(meth); // callable Py_DECREF(callable); GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); } - inst(CALL_PY_EXACT_ARGS, (unused/1, func_version/2, method, callable, args[oparg] -- unused)) { + inst(CALL_PY_EXACT_ARGS, (unused/1, func_version/2, callable, self_or_null, args[oparg] -- unused)) { ASSERT_KWNAMES_IS_NULL(); DEOPT_IF(tstate->interp->eval_frame, CALL); - int is_meth = method != NULL; int argcount = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; argcount++; } @@ -2950,13 +2976,11 @@ dummy_func( DISPATCH_INLINED(new_frame); } - inst(CALL_PY_WITH_DEFAULTS, (unused/1, func_version/2, method, callable, args[oparg] -- unused)) { + inst(CALL_PY_WITH_DEFAULTS, (unused/1, func_version/2, callable, self_or_null, args[oparg] -- unused)) { ASSERT_KWNAMES_IS_NULL(); DEOPT_IF(tstate->interp->eval_frame, CALL); - int is_meth = method != NULL; int argcount = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; argcount++; } @@ -2988,7 +3012,7 @@ dummy_func( DISPATCH_INLINED(new_frame); } - inst(CALL_NO_KW_TYPE_1, (unused/1, unused/2, null, callable, args[oparg] -- res)) { + inst(CALL_NO_KW_TYPE_1, (unused/1, unused/2, callable, null, args[oparg] -- res)) { ASSERT_KWNAMES_IS_NULL(); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3000,7 +3024,7 @@ dummy_func( Py_DECREF(&PyType_Type); // I.e., callable } - inst(CALL_NO_KW_STR_1, (unused/1, unused/2, null, callable, args[oparg] -- res)) { + inst(CALL_NO_KW_STR_1, (unused/1, unused/2, callable, null, args[oparg] -- res)) { ASSERT_KWNAMES_IS_NULL(); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3014,7 +3038,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_TUPLE_1, (unused/1, unused/2, null, callable, args[oparg] -- res)) { + inst(CALL_NO_KW_TUPLE_1, (unused/1, unused/2, callable, null, args[oparg] -- res)) { ASSERT_KWNAMES_IS_NULL(); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3028,7 +3052,7 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_ALLOC_AND_ENTER_INIT, (unused/1, unused/2, null, callable, args[oparg] -- unused)) { + inst(CALL_NO_KW_ALLOC_AND_ENTER_INIT, (unused/1, unused/2, callable, null, args[oparg] -- unused)) { /* This instruction does the following: * 1. Creates the object (by calling ``object.__new__``) * 2. Pushes a shim frame to the frame stack (to cleanup after ``__init__``) @@ -3091,11 +3115,9 @@ dummy_func( } } - inst(CALL_BUILTIN_CLASS, (unused/1, unused/2, method, callable, args[oparg] -- res)) { - int is_meth = method != NULL; + inst(CALL_BUILTIN_CLASS, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -3116,13 +3138,11 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_BUILTIN_O, (unused/1, unused/2, method, callable, args[oparg] -- res)) { + inst(CALL_NO_KW_BUILTIN_O, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { /* Builtin METH_O functions */ ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -3147,13 +3167,11 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_BUILTIN_FAST, (unused/1, unused/2, method, callable, args[oparg] -- res)) { + inst(CALL_NO_KW_BUILTIN_FAST, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { /* Builtin METH_FASTCALL functions, without keywords */ ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -3182,12 +3200,10 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_BUILTIN_FAST_WITH_KEYWORDS, (unused/1, unused/2, method, callable, args[oparg] -- res)) { + inst(CALL_BUILTIN_FAST_WITH_KEYWORDS, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -3217,18 +3233,16 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_LEN, (unused/1, unused/2, method, callable, args[oparg] -- res)) { + inst(CALL_NO_KW_LEN, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { ASSERT_KWNAMES_IS_NULL(); /* len(o) */ - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } DEOPT_IF(total_args != 1, CALL); - PyInterpreterState *interp = _PyInterpreterState_GET(); + PyInterpreterState *interp = tstate->interp; DEOPT_IF(callable != interp->callable_cache.len, CALL); STAT_INC(CALL, hit); PyObject *arg = args[0]; @@ -3244,18 +3258,16 @@ dummy_func( ERROR_IF(res == NULL, error); } - inst(CALL_NO_KW_ISINSTANCE, (unused/1, unused/2, method, callable, args[oparg] -- res)) { + inst(CALL_NO_KW_ISINSTANCE, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { ASSERT_KWNAMES_IS_NULL(); /* isinstance(o, o2) */ - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } DEOPT_IF(total_args != 2, CALL); - PyInterpreterState *interp = _PyInterpreterState_GET(); + PyInterpreterState *interp = tstate->interp; DEOPT_IF(callable != interp->callable_cache.isinstance, CALL); STAT_INC(CALL, hit); PyObject *cls = args[1]; @@ -3274,19 +3286,19 @@ dummy_func( } // This is secretly a super-instruction - inst(CALL_NO_KW_LIST_APPEND, (unused/1, unused/2, method, self, args[oparg] -- unused)) { + inst(CALL_NO_KW_LIST_APPEND, (unused/1, unused/2, callable, self, args[oparg] -- unused)) { ASSERT_KWNAMES_IS_NULL(); assert(oparg == 1); - assert(method != NULL); - PyInterpreterState *interp = _PyInterpreterState_GET(); - DEOPT_IF(method != interp->callable_cache.list_append, CALL); + assert(self != NULL); + PyInterpreterState *interp = tstate->interp; + DEOPT_IF(callable != interp->callable_cache.list_append, CALL); DEOPT_IF(!PyList_Check(self), CALL); STAT_INC(CALL, hit); if (_PyList_AppendTakeRef((PyListObject *)self, args[0]) < 0) { goto pop_1_error; // Since arg is DECREF'ed already } Py_DECREF(self); - Py_DECREF(method); + Py_DECREF(callable); STACK_SHRINK(3); // CALL + POP_TOP SKIP_OVER(INLINE_CACHE_ENTRIES_CALL + 1); @@ -3294,23 +3306,21 @@ dummy_func( DISPATCH(); } - inst(CALL_NO_KW_METHOD_DESCRIPTOR_O, (unused/1, unused/2, method, unused, args[oparg] -- res)) { + inst(CALL_NO_KW_METHOD_DESCRIPTOR_O, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { + if (self_or_null != NULL) { args--; total_args++; } - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); + PyMethodDescrObject *method = (PyMethodDescrObject *)callable; DEOPT_IF(total_args != 2, CALL); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; + DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = method->d_method; DEOPT_IF(meth->ml_flags != METH_O, CALL); PyObject *arg = args[1]; PyObject *self = args[0]; - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); STAT_INC(CALL, hit); PyCFunction cfunc = meth->ml_meth; // This is slower but CPython promises to check all non-vectorcall @@ -3328,19 +3338,17 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, (unused/1, unused/2, method, unused, args[oparg] -- res)) { - int is_meth = method != NULL; + inst(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { int total_args = oparg; - if (is_meth) { + if (self_or_null != NULL) { args--; total_args++; } - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; + PyMethodDescrObject *method = (PyMethodDescrObject *)callable; + DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = method->d_method; DEOPT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS), CALL); - PyTypeObject *d_type = callable->d_common.d_type; + PyTypeObject *d_type = method->d_common.d_type; PyObject *self = args[0]; DEOPT_IF(!Py_IS_TYPE(self, d_type), CALL); STAT_INC(CALL, hit); @@ -3360,21 +3368,20 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS, (unused/1, unused/2, method, unused, args[oparg] -- res)) { + inst(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { ASSERT_KWNAMES_IS_NULL(); assert(oparg == 0 || oparg == 1); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { + if (self_or_null != NULL) { args--; total_args++; } DEOPT_IF(total_args != 1, CALL); - PyMethodDescrObject *callable = (PyMethodDescrObject *)SECOND(); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; + PyMethodDescrObject *method = (PyMethodDescrObject *)callable; + DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = method->d_method; PyObject *self = args[0]; - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); DEOPT_IF(meth->ml_flags != METH_NOARGS, CALL); STAT_INC(CALL, hit); PyCFunction cfunc = meth->ml_meth; @@ -3392,22 +3399,20 @@ dummy_func( CHECK_EVAL_BREAKER(); } - inst(CALL_NO_KW_METHOD_DESCRIPTOR_FAST, (unused/1, unused/2, method, unused, args[oparg] -- res)) { + inst(CALL_NO_KW_METHOD_DESCRIPTOR_FAST, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { + if (self_or_null != NULL) { args--; total_args++; } - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); + PyMethodDescrObject *method = (PyMethodDescrObject *)callable; /* Builtin METH_FASTCALL methods, without keywords */ - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; + DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = method->d_method; DEOPT_IF(meth->ml_flags != METH_FASTCALL, CALL); PyObject *self = args[0]; - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); STAT_INC(CALL, hit); _PyCFunctionFast cfunc = (_PyCFunctionFast)(void(*)(void))meth->ml_meth; @@ -3427,7 +3432,7 @@ dummy_func( GO_TO_INSTRUCTION(CALL_FUNCTION_EX); } - inst(CALL_FUNCTION_EX, (unused, func, callargs, kwargs if (oparg & 1) -- result)) { + inst(CALL_FUNCTION_EX, (func, unused, callargs, kwargs if (oparg & 1) -- result)) { // DICT_MERGE is called before this opcode if there are kwargs. // It converts all dict subtypes in kwargs into regular dicts. assert(kwargs == NULL || PyDict_CheckExact(kwargs)); @@ -3490,7 +3495,7 @@ dummy_func( result = PyObject_Call(func, callargs, kwargs); } DECREF_INPUTS(); - assert(PEEK(3 + (oparg & 1)) == NULL); + assert(PEEK(2 + (oparg & 1)) == NULL); ERROR_IF(result == NULL, error); CHECK_EVAL_BREAKER(); } @@ -3610,10 +3615,10 @@ dummy_func( STAT_INC(BINARY_OP, deferred); DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ - assert(0 <= oparg); - assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); - assert(binary_ops[oparg]); - res = binary_ops[oparg](lhs, rhs); + assert(NB_ADD <= oparg); + assert(oparg <= NB_INPLACE_XOR); + assert(_PyEval_BinaryOps[oparg]); + res = _PyEval_BinaryOps[oparg](lhs, rhs); DECREF_INPUTS(); ERROR_IF(res == NULL, error); } diff --git a/Python/ceval.c b/Python/ceval.c index b56ddfb4bd286d..b966399a342d08 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -7,24 +7,25 @@ #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_ceval.h" // _PyEval_SignalAsyncExc() #include "pycore_code.h" +#include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS #include "pycore_function.h" +#include "pycore_instruments.h" #include "pycore_intrinsics.h" #include "pycore_long.h" // _PyLong_GetZero() -#include "pycore_instruments.h" -#include "pycore_object.h" // _PyObject_GC_TRACK() #include "pycore_moduleobject.h" // PyModuleObject +#include "pycore_object.h" // _PyObject_GC_TRACK() #include "pycore_opcode.h" // EXTRA_CASES #include "pycore_opcode_metadata.h" #include "pycore_opcode_utils.h" // MAKE_FUNCTION_* #include "pycore_pyerrors.h" // _PyErr_GetRaisedException() #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_range.h" // _PyRangeIterObject +#include "pycore_setobject.h" // _PySet_Update() #include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs #include "pycore_sysmodule.h" // _PySys_Audit() #include "pycore_tuple.h" // _PyTuple_ITEMS() #include "pycore_typeobject.h" // _PySuper_Lookup() #include "pycore_uops.h" // _PyUOpExecutorObject -#include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS #include "pycore_dict.h" #include "dictobject.h" @@ -33,7 +34,7 @@ #include "opcode.h" #include "pydtrace.h" #include "setobject.h" -#include "structmember.h" // struct PyMemberDef, T_OFFSET_EX + #include #include @@ -96,13 +97,6 @@ } while (0) #endif -// GH-89279: Similar to above, force inlining by using a macro. -#if defined(_MSC_VER) && SIZEOF_INT == 4 -#define _Py_atomic_load_relaxed_int32(ATOMIC_VAL) (assert(sizeof((ATOMIC_VAL)->_value) == 4), *((volatile int*)&((ATOMIC_VAL)->_value))) -#else -#define _Py_atomic_load_relaxed_int32(ATOMIC_VAL) _Py_atomic_load_relaxed(ATOMIC_VAL) -#endif - #ifdef LLTRACE static void @@ -115,11 +109,24 @@ dump_stack(_PyInterpreterFrame *frame, PyObject **stack_pointer) if (ptr != stack_base) { printf(", "); } - if (PyObject_Print(*ptr, stdout, 0) != 0) { + if (*ptr == NULL) { + printf(""); + continue; + } + if ( + *ptr == Py_None + || PyBool_Check(*ptr) + || PyLong_CheckExact(*ptr) + || PyFloat_CheckExact(*ptr) + || PyUnicode_CheckExact(*ptr) + ) { + if (PyObject_Print(*ptr, stdout, 0) == 0) { + continue; + } PyErr_Clear(); - printf("<%s object at %p>", - Py_TYPE(*ptr)->tp_name, (void *)(*ptr)); } + // Don't call __repr__(), it might recurse into the interpreter. + printf("<%s at %p>", Py_TYPE(*ptr)->tp_name, (void *)(*ptr)); } printf("]\n"); fflush(stdout); @@ -134,9 +141,6 @@ lltrace_instruction(_PyInterpreterFrame *frame, if (frame->owner == FRAME_OWNED_BY_CSTACK) { return; } - /* This dump_stack() operation is risky, since the repr() of some - objects enters the interpreter recursively. It is also slow. - So you might want to comment it out. */ dump_stack(frame, stack_pointer); int oparg = next_instr->op.arg; int opcode = next_instr->op.code; @@ -190,13 +194,16 @@ lltrace_resume_frame(_PyInterpreterFrame *frame) static void monitor_raise(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); +static void monitor_reraise(PyThreadState *tstate, + _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr); static int monitor_stop_iteration(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); static void monitor_unwind(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr); -static void monitor_handled(PyThreadState *tstate, +static int monitor_handled(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *exc); static void monitor_throw(PyThreadState *tstate, @@ -206,13 +213,7 @@ static void monitor_throw(PyThreadState *tstate, static PyObject * import_name(PyThreadState *, _PyInterpreterFrame *, PyObject *, PyObject *, PyObject *); static PyObject * import_from(PyThreadState *, PyObject *, PyObject *); -static void format_exc_check_arg(PyThreadState *, PyObject *, const char *, PyObject *); -static void format_exc_unbound(PyThreadState *tstate, PyCodeObject *co, int oparg); static int check_args_iterable(PyThreadState *, PyObject *func, PyObject *vararg); -static int check_except_type_valid(PyThreadState *tstate, PyObject* right); -static int check_except_star_type_valid(PyThreadState *tstate, PyObject* right); -static void format_kwargs_error(PyThreadState *, PyObject *func, PyObject *kwargs); -static void format_awaitable_error(PyThreadState *, PyTypeObject *, int); static int get_exception_handler(PyCodeObject *, int, int*, int*, int*); static _PyInterpreterFrame * _PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func, @@ -224,12 +225,6 @@ _PyEvalFramePushAndInit_Ex(PyThreadState *tstate, PyFunctionObject *func, static void _PyEvalFrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame); -#define UNBOUNDLOCAL_ERROR_MSG \ - "cannot access local variable '%s' where it is not associated with a value" -#define UNBOUNDFREE_ERROR_MSG \ - "cannot access free variable '%s' where it is not associated with a" \ - " value in enclosing scope" - #ifdef HAVE_ERRNO_H #include #endif @@ -286,7 +281,7 @@ _Py_CheckRecursiveCall(PyThreadState *tstate, const char *where) } -static const binaryfunc binary_ops[] = { +const binaryfunc _PyEval_BinaryOps[] = { [NB_ADD] = PyNumber_Add, [NB_AND] = PyNumber_And, [NB_FLOOR_DIVIDE] = PyNumber_FloorDivide, @@ -321,8 +316,8 @@ static const binaryfunc binary_ops[] = { // Return a tuple of values corresponding to keys, with error checks for // duplicate/missing keys. -static PyObject* -match_keys(PyThreadState *tstate, PyObject *map, PyObject *keys) +PyObject * +_PyEval_MatchKeys(PyThreadState *tstate, PyObject *map, PyObject *keys) { assert(PyTuple_CheckExact(keys)); Py_ssize_t nkeys = PyTuple_GET_SIZE(keys); @@ -403,7 +398,7 @@ match_keys(PyThreadState *tstate, PyObject *map, PyObject *keys) // Extract a named attribute from the subject, with additional bookkeeping to // raise TypeErrors for repeated lookups. On failure, return NULL (with no // error set). Use _PyErr_Occurred(tstate) to disambiguate. -static PyObject* +static PyObject * match_class_attr(PyThreadState *tstate, PyObject *subject, PyObject *type, PyObject *name, PyObject *seen) { @@ -425,9 +420,9 @@ match_class_attr(PyThreadState *tstate, PyObject *subject, PyObject *type, // On success (match), return a tuple of extracted attributes. On failure (no // match), return NULL. Use _PyErr_Occurred(tstate) to disambiguate. -static PyObject* -match_class(PyThreadState *tstate, PyObject *subject, PyObject *type, - Py_ssize_t nargs, PyObject *kwargs) +PyObject* +_PyEval_MatchClass(PyThreadState *tstate, PyObject *subject, PyObject *type, + Py_ssize_t nargs, PyObject *kwargs) { if (!PyType_Check(type)) { const char *e = "called match pattern must be a class"; @@ -533,11 +528,6 @@ match_class(PyThreadState *tstate, PyObject *subject, PyObject *type, static int do_raise(PyThreadState *tstate, PyObject *exc, PyObject *cause); -static int exception_group_match( - PyObject* exc_value, PyObject *match_type, - PyObject **match, PyObject **rest); - -static int unpack_iterable(PyThreadState *, PyObject *, int, int, PyObject **); PyObject * PyEval_EvalCode(PyObject *co, PyObject *globals, PyObject *locals) @@ -644,6 +634,11 @@ extern const struct _PyCode_DEF(8) _Py_InitCleanup; # pragma warning(disable:4102) #endif + +/* _PyEval_EvalFrameDefault() is a *big* function, + * so consume 3 units of C stack */ +#define PY_EVAL_C_STACK_UNITS 2 + PyObject* _Py_HOT_FUNCTION _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag) { @@ -696,6 +691,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int frame->previous = &entry_frame; cframe.current_frame = frame; + tstate->c_recursion_remaining -= (PY_EVAL_C_STACK_UNITS - 1); if (_Py_EnterRecursiveCallTstate(tstate, "")) { tstate->c_recursion_remaining--; tstate->py_recursion_remaining--; @@ -743,6 +739,13 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int goto exit_unwind; } lltrace = r; + if (!lltrace) { + // When tracing executed uops, also trace bytecode + char *uop_debug = Py_GETENV("PYTHONUOPSDEBUG"); + if (uop_debug != NULL && *uop_debug >= '0') { + lltrace = (*uop_debug - '0') >= 4; // TODO: Parse an int and all that + } + } } if (lltrace) { lltrace_resume_frame(frame); @@ -827,7 +830,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int unbound_local_error: { - format_exc_check_arg(tstate, PyExc_UnboundLocalError, + _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, UNBOUNDLOCAL_ERROR_MSG, PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) ); @@ -863,7 +866,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int } } monitor_raise(tstate, frame, next_instr-1); - exception_unwind: { /* We can't use frame->f_lasti here, as RERAISE may have set it */ @@ -907,8 +909,15 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int PyObject *exc = _PyErr_GetRaisedException(tstate); PUSH(exc); JUMPTO(handler); - monitor_handled(tstate, frame, next_instr, exc); + if (monitor_handled(tstate, frame, next_instr, exc) < 0) { + goto exception_unwind; + } /* Resume normal execution */ +#ifdef LLTRACE + if (lltrace) { + lltrace_resume_frame(frame); + } +#endif DISPATCH(); } } @@ -926,7 +935,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int /* Restore previous cframe and exit */ tstate->cframe = cframe.previous; assert(tstate->cframe->current_frame == frame->previous); - _Py_LeaveRecursiveCallTstate(tstate); + tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; return NULL; } @@ -1278,7 +1287,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func, if (co->co_flags & CO_VARARGS) { PyObject *u = NULL; if (argcount == n) { - u = Py_NewRef(&_Py_SINGLETON(tuple_empty)); + u = (PyObject *)&_Py_SINGLETON(tuple_empty); } else { assert(args != NULL); @@ -1485,6 +1494,7 @@ clear_gen_frame(PyThreadState *tstate, _PyInterpreterFrame * frame) tstate->c_recursion_remaining--; assert(frame->frame_obj == NULL || frame->frame_obj->f_frame == frame); _PyFrame_ClearExceptCode(frame); + _PyErr_ClearExcState(&gen->gi_exc_state); tstate->c_recursion_remaining++; frame->previous = NULL; } @@ -1777,9 +1787,9 @@ do_raise(PyThreadState *tstate, PyObject *exc, PyObject *cause) complicated for inlining). */ -static int -exception_group_match(PyObject* exc_value, PyObject *match_type, - PyObject **match, PyObject **rest) +int +_PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type, + PyObject **match, PyObject **rest) { if (Py_IsNone(exc_value)) { *match = Py_NewRef(Py_None); @@ -1840,9 +1850,9 @@ exception_group_match(PyObject* exc_value, PyObject *match_type, with a variable target. */ -static int -unpack_iterable(PyThreadState *tstate, PyObject *v, - int argcnt, int argcntafter, PyObject **sp) +int +_PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v, + int argcnt, int argcntafter, PyObject **sp) { int i = 0, j = 0; Py_ssize_t ll = 0; @@ -1937,7 +1947,7 @@ static int do_monitor_exc(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, int event) { - assert(event < PY_MONITORING_UNGROUPED_EVENTS); + assert(event < _PY_MONITORING_UNGROUPED_EVENTS); PyObject *exc = PyErr_GetRaisedException(); assert(exc != NULL); int err = _Py_call_instrumentation_arg(tstate, event, frame, instr, exc); @@ -1945,6 +1955,7 @@ do_monitor_exc(PyThreadState *tstate, _PyInterpreterFrame *frame, PyErr_SetRaisedException(exc); } else { + assert(PyErr_Occurred()); Py_DECREF(exc); } return err; @@ -1977,6 +1988,16 @@ monitor_raise(PyThreadState *tstate, _PyInterpreterFrame *frame, do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_RAISE); } +static void +monitor_reraise(PyThreadState *tstate, _PyInterpreterFrame *frame, + _Py_CODEUNIT *instr) +{ + if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_RERAISE)) { + return; + } + do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_RERAISE); +} + static int monitor_stop_iteration(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr) @@ -1995,19 +2016,19 @@ monitor_unwind(PyThreadState *tstate, if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_PY_UNWIND)) { return; } - _Py_call_instrumentation_exc0(tstate, PY_MONITORING_EVENT_PY_UNWIND, frame, instr); + do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_PY_UNWIND); } -static void +static int monitor_handled(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *instr, PyObject *exc) { if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_EXCEPTION_HANDLED)) { - return; + return 0; } - _Py_call_instrumentation_arg(tstate, PY_MONITORING_EVENT_EXCEPTION_HANDLED, frame, instr, exc); + return _Py_call_instrumentation_arg(tstate, PY_MONITORING_EVENT_EXCEPTION_HANDLED, frame, instr, exc); } static void @@ -2018,7 +2039,7 @@ monitor_throw(PyThreadState *tstate, if (no_tools_for_event(tstate, frame, PY_MONITORING_EVENT_PY_THROW)) { return; } - _Py_call_instrumentation_exc0(tstate, PY_MONITORING_EVENT_PY_THROW, frame, instr); + do_monitor_exc(tstate, frame, instr, PY_MONITORING_EVENT_PY_THROW); } void @@ -2487,8 +2508,8 @@ import_from(PyThreadState *tstate, PyObject *v, PyObject *name) #define CANNOT_EXCEPT_STAR_EG "catching ExceptionGroup with except* "\ "is not allowed. Use except instead." -static int -check_except_type_valid(PyThreadState *tstate, PyObject* right) +int +_PyEval_CheckExceptTypeValid(PyThreadState *tstate, PyObject* right) { if (PyTuple_Check(right)) { Py_ssize_t i, length; @@ -2512,10 +2533,10 @@ check_except_type_valid(PyThreadState *tstate, PyObject* right) return 0; } -static int -check_except_star_type_valid(PyThreadState *tstate, PyObject* right) +int +_PyEval_CheckExceptStarTypeValid(PyThreadState *tstate, PyObject* right) { - if (check_except_type_valid(tstate, right) < 0) { + if (_PyEval_CheckExceptTypeValid(tstate, right) < 0) { return -1; } @@ -2569,8 +2590,8 @@ check_args_iterable(PyThreadState *tstate, PyObject *func, PyObject *args) return 0; } -static void -format_kwargs_error(PyThreadState *tstate, PyObject *func, PyObject *kwargs) +void +_PyEval_FormatKwargsError(PyThreadState *tstate, PyObject *func, PyObject *kwargs) { /* _PyDict_MergeEx raises attribute * error (percolated from an attempt @@ -2611,9 +2632,9 @@ format_kwargs_error(PyThreadState *tstate, PyObject *func, PyObject *kwargs) } } -static void -format_exc_check_arg(PyThreadState *tstate, PyObject *exc, - const char *format_str, PyObject *obj) +void +_PyEval_FormatExcCheckArg(PyThreadState *tstate, PyObject *exc, + const char *format_str, PyObject *obj) { const char *obj_str; @@ -2640,8 +2661,8 @@ format_exc_check_arg(PyThreadState *tstate, PyObject *exc, } } -static void -format_exc_unbound(PyThreadState *tstate, PyCodeObject *co, int oparg) +void +_PyEval_FormatExcUnbound(PyThreadState *tstate, PyCodeObject *co, int oparg) { PyObject *name; /* Don't stomp existing exception */ @@ -2649,16 +2670,16 @@ format_exc_unbound(PyThreadState *tstate, PyCodeObject *co, int oparg) return; name = PyTuple_GET_ITEM(co->co_localsplusnames, oparg); if (oparg < PyCode_GetFirstFree(co)) { - format_exc_check_arg(tstate, PyExc_UnboundLocalError, - UNBOUNDLOCAL_ERROR_MSG, name); + _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, + UNBOUNDLOCAL_ERROR_MSG, name); } else { - format_exc_check_arg(tstate, PyExc_NameError, - UNBOUNDFREE_ERROR_MSG, name); + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + UNBOUNDFREE_ERROR_MSG, name); } } -static void -format_awaitable_error(PyThreadState *tstate, PyTypeObject *type, int oparg) +void +_PyEval_FormatAwaitableError(PyThreadState *tstate, PyTypeObject *type, int oparg) { if (type->tp_as_async == NULL || type->tp_as_async->am_await == NULL) { if (oparg == 1) { @@ -2703,110 +2724,3 @@ void Py_LeaveRecursiveCall(void) { _Py_LeaveRecursiveCall(); } - -///////////////////// Experimental UOp Interpreter ///////////////////// - -#undef ASSERT_KWNAMES_IS_NULL -#define ASSERT_KWNAMES_IS_NULL() (void)0 - -#undef DEOPT_IF -#define DEOPT_IF(COND, INSTNAME) \ - if ((COND)) { \ - goto deoptimize; \ - } - -_PyInterpreterFrame * -_PyUopExecute(_PyExecutorObject *executor, _PyInterpreterFrame *frame, PyObject **stack_pointer) -{ -#ifdef Py_DEBUG - char *uop_debug = Py_GETENV("PYTHONUOPSDEBUG"); - int lltrace = 0; - if (uop_debug != NULL && *uop_debug >= '0') { - lltrace = *uop_debug - '0'; // TODO: Parse an int and all that - } -#define DPRINTF(level, ...) \ - if (lltrace >= (level)) { fprintf(stderr, __VA_ARGS__); } -#else -#define DPRINTF(level, ...) -#endif - - DPRINTF(3, - "Entering _PyUopExecute for %s (%s:%d) at byte offset %ld\n", - PyUnicode_AsUTF8(_PyFrame_GetCode(frame)->co_qualname), - PyUnicode_AsUTF8(_PyFrame_GetCode(frame)->co_filename), - _PyFrame_GetCode(frame)->co_firstlineno, - 2 * (long)(frame->prev_instr + 1 - - (_Py_CODEUNIT *)_PyFrame_GetCode(frame)->co_code_adaptive)); - - PyThreadState *tstate = _PyThreadState_GET(); - _PyUOpExecutorObject *self = (_PyUOpExecutorObject *)executor; - - CHECK_EVAL_BREAKER(); - - OBJECT_STAT_INC(optimization_traces_executed); - _Py_CODEUNIT *ip_offset = (_Py_CODEUNIT *)_PyFrame_GetCode(frame)->co_code_adaptive; - int pc = 0; - int opcode; - int oparg; - uint64_t operand; - - for (;;) { - opcode = self->trace[pc].opcode; - oparg = self->trace[pc].oparg; - operand = self->trace[pc].operand; - DPRINTF(3, - "%4d: uop %s, oparg %d, operand %" PRIu64 ", stack_level %d\n", - pc, - opcode < 256 ? _PyOpcode_OpName[opcode] : _PyOpcode_uop_name[opcode], - oparg, - operand, - (int)(stack_pointer - _PyFrame_Stackbase(frame))); - pc++; - OBJECT_STAT_INC(optimization_uops_executed); - switch (opcode) { - -#undef ENABLE_SPECIALIZATION -#define ENABLE_SPECIALIZATION 0 -#include "executor_cases.c.h" - - default: - { - fprintf(stderr, "Unknown uop %d, operand %" PRIu64 "\n", opcode, operand); - Py_FatalError("Unknown uop"); - } - - } - } - -unbound_local_error: - format_exc_check_arg(tstate, PyExc_UnboundLocalError, - UNBOUNDLOCAL_ERROR_MSG, - PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) - ); - goto error; - -pop_4_error: - STACK_SHRINK(1); -pop_3_error: - STACK_SHRINK(1); -pop_2_error: - STACK_SHRINK(1); -pop_1_error: - STACK_SHRINK(1); -error: - // On ERROR_IF we return NULL as the frame. - // The caller recovers the frame from cframe.current_frame. - DPRINTF(2, "Error: [Opcode %d, operand %" PRIu64 "]\n", opcode, operand); - _PyFrame_SetStackPointer(frame, stack_pointer); - Py_DECREF(self); - return NULL; - -deoptimize: - // On DEOPT_IF we just repeat the last instruction. - // This presumes nothing was popped from the stack (nor pushed). - DPRINTF(2, "DEOPT: [Opcode %d, operand %" PRIu64 "]\n", opcode, operand); - frame->prev_instr--; // Back up to just before destination - _PyFrame_SetStackPointer(frame, stack_pointer); - Py_DECREF(self); - return frame; -} diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index c2c323317d10f9..8dc8b754485856 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -1,4 +1,4 @@ -// Macros and other things needed by ceval.c and bytecodes.c +// Macros and other things needed by ceval.c, executor.c, and bytecodes.c /* Computed GOTOs, or the-optimization-commonly-but-improperly-known-as-"threaded code" @@ -304,6 +304,11 @@ GETITEM(PyObject *v, Py_ssize_t i) { (COUNTER) += (1 << ADAPTIVE_BACKOFF_BITS); \ } while (0); +#define UNBOUNDLOCAL_ERROR_MSG \ + "cannot access local variable '%s' where it is not associated with a value" +#define UNBOUNDFREE_ERROR_MSG \ + "cannot access free variable '%s' where it is not associated with a value" \ + " in enclosing scope" #define NAME_ERROR_MSG "name '%.200s' is not defined" #define KWNAMES_LEN() \ @@ -352,3 +357,10 @@ static const convertion_func_ptr CONVERSION_FUNCTIONS[4] = { }; #define ASSERT_KWNAMES_IS_NULL() assert(kwnames == NULL) + +// GH-89279: Force inlining by using a macro. +#if defined(_MSC_VER) && SIZEOF_INT == 4 +#define _Py_atomic_load_relaxed_int32(ATOMIC_VAL) (assert(sizeof((ATOMIC_VAL)->_value) == 4), *((volatile int*)&((ATOMIC_VAL)->_value))) +#else +#define _Py_atomic_load_relaxed_int32(ATOMIC_VAL) _Py_atomic_load_relaxed(ATOMIC_VAL) +#endif diff --git a/Python/compile.c b/Python/compile.c index d5405b46561820..83cf45550e2588 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -28,12 +28,13 @@ #define NEED_OPCODE_TABLES #include "pycore_opcode_utils.h" #undef NEED_OPCODE_TABLES -#include "pycore_flowgraph.h" #include "pycore_code.h" // _PyCode_New() #include "pycore_compile.h" +#include "pycore_flowgraph.h" #include "pycore_intrinsics.h" #include "pycore_long.h" // _PyLong_GetZero() #include "pycore_pystate.h" // _Py_GetConfig() +#include "pycore_setobject.h" // _PySet_NextEntry() #include "pycore_symtable.h" // PySTEntryObject, _PyFuture_FromAST() #define NEED_OPCODE_METADATA @@ -69,9 +70,7 @@ && ((C)->u->u_ste->ste_type == ModuleBlock)) typedef _PyCompilerSrcLocation location; -typedef _PyCfgInstruction cfg_instr; -typedef _PyCfgBasicblock basicblock; -typedef _PyCfgBuilder cfg_builder; +typedef struct _PyCfgBuilder cfg_builder; #define LOCATION(LNO, END_LNO, COL, END_COL) \ ((const _PyCompilerSrcLocation){(LNO), (END_LNO), (COL), (END_COL)}) @@ -100,7 +99,7 @@ static jump_target_label NO_LABEL = {-1}; } #define USE_LABEL(C, LBL) \ - RETURN_IF_ERROR(instr_sequence_use_label(INSTR_SEQUENCE(C), (LBL).id)) + RETURN_IF_ERROR(_PyCompile_InstructionSequence_UseLabel(INSTR_SEQUENCE(C), (LBL).id)) /* fblockinfo tracks the current frame block. @@ -217,8 +216,9 @@ instr_sequence_new_label(instr_sequence *seq) return lbl; } -static int -instr_sequence_use_label(instr_sequence *seq, int lbl) { +int +_PyCompile_InstructionSequence_UseLabel(instr_sequence *seq, int lbl) +{ int old_size = seq->s_labelmap_size; RETURN_IF_ERROR( _PyCompile_EnsureArrayLargeEnough(lbl, @@ -237,8 +237,9 @@ instr_sequence_use_label(instr_sequence *seq, int lbl) { #define MAX_OPCODE 511 -static int -instr_sequence_addop(instr_sequence *seq, int opcode, int oparg, location loc) +int +_PyCompile_InstructionSequence_Addop(instr_sequence *seq, int opcode, int oparg, + location loc) { assert(0 <= opcode && opcode <= MAX_OPCODE); assert(IS_WITHIN_OPCODE_RANGE(opcode)); @@ -287,10 +288,12 @@ instr_sequence_fini(instr_sequence *seq) { seq->s_instrs = NULL; } -static int -instr_sequence_to_cfg(instr_sequence *seq, cfg_builder *g) { - memset(g, 0, sizeof(cfg_builder)); - RETURN_IF_ERROR(_PyCfgBuilder_Init(g)); +static cfg_builder* +instr_sequence_to_cfg(instr_sequence *seq) { + cfg_builder *g = _PyCfgBuilder_New(); + if (g == NULL) { + return NULL; + } /* There can be more than one label for the same offset. The * offset2lbl maping selects one of them which we use consistently. @@ -299,7 +302,7 @@ instr_sequence_to_cfg(instr_sequence *seq, cfg_builder *g) { int *offset2lbl = PyMem_Malloc(seq->s_used * sizeof(int)); if (offset2lbl == NULL) { PyErr_NoMemory(); - return ERROR; + goto error; } for (int i = 0; i < seq->s_used; i++) { offset2lbl[i] = -1; @@ -335,23 +338,17 @@ instr_sequence_to_cfg(instr_sequence *seq, cfg_builder *g) { goto error; } } - PyMem_Free(offset2lbl); - - int nblocks = 0; - for (basicblock *b = g->g_block_list; b != NULL; b = b->b_list) { - nblocks++; - } - if ((size_t)nblocks > SIZE_MAX / sizeof(basicblock *)) { - PyErr_NoMemory(); - return ERROR; + if (_PyCfgBuilder_CheckSize(g) < 0) { + goto error; } - return SUCCESS; + PyMem_Free(offset2lbl); + return g; error: + _PyCfgBuilder_Free(g); PyMem_Free(offset2lbl); - return ERROR; + return NULL; } - /* The following items change on entry and exit of code blocks. They must be saved and restored when returning to a block. */ @@ -919,7 +916,7 @@ codegen_addop_noarg(instr_sequence *seq, int opcode, location loc) { assert(!OPCODE_HAS_ARG(opcode)); assert(!IS_ASSEMBLER_OPCODE(opcode)); - return instr_sequence_addop(seq, opcode, 0, loc); + return _PyCompile_InstructionSequence_Addop(seq, opcode, 0, loc); } static Py_ssize_t @@ -1152,7 +1149,7 @@ codegen_addop_i(instr_sequence *seq, int opcode, Py_ssize_t oparg, location loc) int oparg_ = Py_SAFE_DOWNCAST(oparg, Py_ssize_t, int); assert(!IS_ASSEMBLER_OPCODE(opcode)); - return instr_sequence_addop(seq, opcode, oparg_, loc); + return _PyCompile_InstructionSequence_Addop(seq, opcode, oparg_, loc); } static int @@ -1162,7 +1159,7 @@ codegen_addop_j(instr_sequence *seq, location loc, assert(IS_LABEL(target)); assert(OPCODE_HAS_JUMP(opcode) || IS_BLOCK_PUSH_OPCODE(opcode)); assert(!IS_ASSEMBLER_OPCODE(opcode)); - return instr_sequence_addop(seq, opcode, target.id, loc); + return _PyCompile_InstructionSequence_Addop(seq, opcode, target.id, loc); } #define RETURN_IF_ERROR_IN_SCOPE(C, CALL) { \ @@ -1279,7 +1276,7 @@ compiler_enter_scope(struct compiler *c, identifier name, u->u_metadata.u_argcount = 0; u->u_metadata.u_posonlyargcount = 0; u->u_metadata.u_kwonlyargcount = 0; - u->u_ste = PySymtable_Lookup(c->c_st, key); + u->u_ste = _PySymtable_Lookup(c->c_st, key); if (!u->u_ste) { compiler_unit_free(u); return ERROR; @@ -2360,11 +2357,6 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) int is_generic = asdl_seq_LEN(type_params) > 0; - if (is_generic) { - // Used by the CALL to the type parameters function. - ADDOP(c, loc, PUSH_NULL); - } - funcflags = compiler_default_arguments(c, loc, args); if (funcflags == -1) { return ERROR; @@ -2435,8 +2427,12 @@ compiler_function(struct compiler *c, stmt_ty s, int is_async) Py_DECREF(co); if (num_typeparam_args > 0) { ADDOP_I(c, loc, SWAP, num_typeparam_args + 1); + ADDOP_I(c, loc, CALL, num_typeparam_args - 1); + } + else { + ADDOP(c, loc, PUSH_NULL); + ADDOP_I(c, loc, CALL, 0); } - ADDOP_I(c, loc, CALL, num_typeparam_args); } RETURN_IF_ERROR(compiler_apply_decorators(c, decos)); @@ -2564,8 +2560,8 @@ compiler_class_body(struct compiler *c, stmt_ty s, int firstlineno) // these instructions should be attributed to the class line, // not a decorator line loc = LOC(s); - ADDOP(c, loc, PUSH_NULL); ADDOP(c, loc, LOAD_BUILD_CLASS); + ADDOP(c, loc, PUSH_NULL); /* 3. load a function (or closure) made from the code object */ if (compiler_make_closure(c, loc, co, 0) < 0) { @@ -2597,7 +2593,6 @@ compiler_class(struct compiler *c, stmt_ty s) int is_generic = asdl_seq_LEN(type_params) > 0; if (is_generic) { Py_XSETREF(c->u->u_private, Py_NewRef(s->v.ClassDef.name)); - ADDOP(c, loc, PUSH_NULL); PyObject *type_params_name = PyUnicode_FromFormat("", s->v.ClassDef.name); if (!type_params_name) { @@ -2665,6 +2660,7 @@ compiler_class(struct compiler *c, stmt_ty s) return ERROR; } Py_DECREF(co); + ADDOP(c, loc, PUSH_NULL); ADDOP_I(c, loc, CALL, 0); } else { RETURN_IF_ERROR(compiler_call_helper(c, loc, 2, @@ -2715,7 +2711,6 @@ compiler_typealias(struct compiler *c, stmt_ty s) int is_generic = asdl_seq_LEN(type_params) > 0; PyObject *name = s->v.TypeAlias.name->v.Name.id; if (is_generic) { - ADDOP(c, loc, PUSH_NULL); PyObject *type_params_name = PyUnicode_FromFormat("", name); if (!type_params_name) { @@ -2755,6 +2750,7 @@ compiler_typealias(struct compiler *c, stmt_ty s) return ERROR; } Py_DECREF(co); + ADDOP(c, loc, PUSH_NULL); ADDOP_I(c, loc, CALL, 0); } RETURN_IF_ERROR(compiler_nameop(c, loc, name, Store)); @@ -4993,9 +4989,9 @@ compiler_call(struct compiler *c, expr_ty e) return SUCCESS; } RETURN_IF_ERROR(check_caller(c, e->v.Call.func)); + VISIT(c, expr, e->v.Call.func); location loc = LOC(e->v.Call.func); ADDOP(c, loc, PUSH_NULL); - VISIT(c, expr, e->v.Call.func); loc = LOC(e); return compiler_call_helper(c, loc, 0, e->v.Call.args, @@ -5684,7 +5680,7 @@ compiler_comprehension(struct compiler *c, expr_ty e, int type, comprehension_ty outermost; int scope_type = c->u->u_scope_type; int is_top_level_await = IS_TOP_LEVEL_AWAIT(c); - PySTEntryObject *entry = PySymtable_Lookup(c->c_st, (void *)e); + PySTEntryObject *entry = _PySymtable_Lookup(c->c_st, (void *)e); if (entry == NULL) { goto error; } @@ -7492,194 +7488,6 @@ _PyCompile_ConstCacheMergeOne(PyObject *const_cache, PyObject **obj) return SUCCESS; } - -static int * -build_cellfixedoffsets(_PyCompile_CodeUnitMetadata *umd) -{ - int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); - int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); - int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); - - int noffsets = ncellvars + nfreevars; - int *fixed = PyMem_New(int, noffsets); - if (fixed == NULL) { - PyErr_NoMemory(); - return NULL; - } - for (int i = 0; i < noffsets; i++) { - fixed[i] = nlocals + i; - } - - PyObject *varname, *cellindex; - Py_ssize_t pos = 0; - while (PyDict_Next(umd->u_cellvars, &pos, &varname, &cellindex)) { - PyObject *varindex = PyDict_GetItem(umd->u_varnames, varname); - if (varindex != NULL) { - assert(PyLong_AS_LONG(cellindex) < INT_MAX); - assert(PyLong_AS_LONG(varindex) < INT_MAX); - int oldindex = (int)PyLong_AS_LONG(cellindex); - int argoffset = (int)PyLong_AS_LONG(varindex); - fixed[oldindex] = argoffset; - } - } - - return fixed; -} - -static int -insert_prefix_instructions(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock, - int *fixed, int nfreevars, int code_flags) -{ - assert(umd->u_firstlineno > 0); - - /* Add the generator prefix instructions. */ - if (code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) { - cfg_instr make_gen = { - .i_opcode = RETURN_GENERATOR, - .i_oparg = 0, - .i_loc = LOCATION(umd->u_firstlineno, umd->u_firstlineno, -1, -1), - .i_target = NULL, - }; - RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, 0, &make_gen)); - cfg_instr pop_top = { - .i_opcode = POP_TOP, - .i_oparg = 0, - .i_loc = NO_LOCATION, - .i_target = NULL, - }; - RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, 1, &pop_top)); - } - - /* Set up cells for any variable that escapes, to be put in a closure. */ - const int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); - if (ncellvars) { - // umd->u_cellvars has the cells out of order so we sort them - // before adding the MAKE_CELL instructions. Note that we - // adjust for arg cells, which come first. - const int nvars = ncellvars + (int)PyDict_GET_SIZE(umd->u_varnames); - int *sorted = PyMem_RawCalloc(nvars, sizeof(int)); - if (sorted == NULL) { - PyErr_NoMemory(); - return ERROR; - } - for (int i = 0; i < ncellvars; i++) { - sorted[fixed[i]] = i + 1; - } - for (int i = 0, ncellsused = 0; ncellsused < ncellvars; i++) { - int oldindex = sorted[i] - 1; - if (oldindex == -1) { - continue; - } - cfg_instr make_cell = { - .i_opcode = MAKE_CELL, - // This will get fixed in offset_derefs(). - .i_oparg = oldindex, - .i_loc = NO_LOCATION, - .i_target = NULL, - }; - if (_PyBasicblock_InsertInstruction(entryblock, ncellsused, &make_cell) < 0) { - PyMem_RawFree(sorted); - return ERROR; - } - ncellsused += 1; - } - PyMem_RawFree(sorted); - } - - if (nfreevars) { - cfg_instr copy_frees = { - .i_opcode = COPY_FREE_VARS, - .i_oparg = nfreevars, - .i_loc = NO_LOCATION, - .i_target = NULL, - }; - RETURN_IF_ERROR(_PyBasicblock_InsertInstruction(entryblock, 0, ©_frees)); - } - - return SUCCESS; -} - -static int -fix_cell_offsets(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock, int *fixedmap) -{ - int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); - int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); - int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); - int noffsets = ncellvars + nfreevars; - - // First deal with duplicates (arg cells). - int numdropped = 0; - for (int i = 0; i < noffsets ; i++) { - if (fixedmap[i] == i + nlocals) { - fixedmap[i] -= numdropped; - } - else { - // It was a duplicate (cell/arg). - numdropped += 1; - } - } - - // Then update offsets, either relative to locals or by cell2arg. - for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - for (int i = 0; i < b->b_iused; i++) { - cfg_instr *inst = &b->b_instr[i]; - // This is called before extended args are generated. - assert(inst->i_opcode != EXTENDED_ARG); - int oldoffset = inst->i_oparg; - switch(inst->i_opcode) { - case MAKE_CELL: - case LOAD_CLOSURE: - case LOAD_DEREF: - case STORE_DEREF: - case DELETE_DEREF: - case LOAD_FROM_DICT_OR_DEREF: - assert(oldoffset >= 0); - assert(oldoffset < noffsets); - assert(fixedmap[oldoffset] >= 0); - inst->i_oparg = fixedmap[oldoffset]; - } - } - } - - return numdropped; -} - - -static int -prepare_localsplus(_PyCompile_CodeUnitMetadata *umd, cfg_builder *g, int code_flags) -{ - assert(PyDict_GET_SIZE(umd->u_varnames) < INT_MAX); - assert(PyDict_GET_SIZE(umd->u_cellvars) < INT_MAX); - assert(PyDict_GET_SIZE(umd->u_freevars) < INT_MAX); - int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); - int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); - int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); - assert(INT_MAX - nlocals - ncellvars > 0); - assert(INT_MAX - nlocals - ncellvars - nfreevars > 0); - int nlocalsplus = nlocals + ncellvars + nfreevars; - int* cellfixedoffsets = build_cellfixedoffsets(umd); - if (cellfixedoffsets == NULL) { - return ERROR; - } - - - // This must be called before fix_cell_offsets(). - if (insert_prefix_instructions(umd, g->g_entryblock, cellfixedoffsets, nfreevars, code_flags)) { - PyMem_Free(cellfixedoffsets); - return ERROR; - } - - int numdropped = fix_cell_offsets(umd, g->g_entryblock, cellfixedoffsets); - PyMem_Free(cellfixedoffsets); // At this point we're done with it. - cellfixedoffsets = NULL; - if (numdropped < 0) { - return ERROR; - } - - nlocalsplus -= numdropped; - return nlocalsplus; -} - static int add_return_at_end(struct compiler *c, int addNone) { @@ -7693,12 +7501,11 @@ add_return_at_end(struct compiler *c, int addNone) return SUCCESS; } -static int cfg_to_instr_sequence(cfg_builder *g, instr_sequence *seq); - static PyCodeObject * optimize_and_assemble_code_unit(struct compiler_unit *u, PyObject *const_cache, int code_flags, PyObject *filename) { + cfg_builder *g = NULL; instr_sequence optimized_instrs; memset(&optimized_instrs, 0, sizeof(instr_sequence)); @@ -7707,51 +7514,37 @@ optimize_and_assemble_code_unit(struct compiler_unit *u, PyObject *const_cache, if (consts == NULL) { goto error; } - cfg_builder g; - if (instr_sequence_to_cfg(&u->u_instr_sequence, &g) < 0) { + g = instr_sequence_to_cfg(&u->u_instr_sequence); + if (g == NULL) { goto error; } - int nparams = (int)PyList_GET_SIZE(u->u_ste->ste_varnames); int nlocals = (int)PyDict_GET_SIZE(u->u_metadata.u_varnames); + int nparams = (int)PyList_GET_SIZE(u->u_ste->ste_varnames); assert(u->u_metadata.u_firstlineno); - if (_PyCfg_OptimizeCodeUnit(&g, consts, const_cache, code_flags, nlocals, - nparams, u->u_metadata.u_firstlineno) < 0) { - goto error; - } - /** Assembly **/ - int nlocalsplus = prepare_localsplus(&u->u_metadata, &g, code_flags); - if (nlocalsplus < 0) { + if (_PyCfg_OptimizeCodeUnit(g, consts, const_cache, nlocals, + nparams, u->u_metadata.u_firstlineno) < 0) { goto error; } - int maxdepth = _PyCfg_Stackdepth(g.g_entryblock, code_flags); - if (maxdepth < 0) { + int stackdepth; + int nlocalsplus; + if (_PyCfg_OptimizedCfgToInstructionSequence(g, &u->u_metadata, code_flags, + &stackdepth, &nlocalsplus, + &optimized_instrs) < 0) { goto error; } - _PyCfg_ConvertPseudoOps(g.g_entryblock); - - /* Order of basic blocks must have been determined by now */ - - if (_PyCfg_ResolveJumps(&g) < 0) { - goto error; - } - - /* Can't modify the bytecode after computing jump offsets. */ - - if (cfg_to_instr_sequence(&g, &optimized_instrs) < 0) { - goto error; - } + /** Assembly **/ co = _PyAssemble_MakeCodeObject(&u->u_metadata, const_cache, consts, - maxdepth, &optimized_instrs, nlocalsplus, + stackdepth, &optimized_instrs, nlocalsplus, code_flags, filename); error: Py_XDECREF(consts); instr_sequence_fini(&optimized_instrs); - _PyCfgBuilder_Fini(&g); + _PyCfgBuilder_Free(g); return co; } @@ -7774,39 +7567,6 @@ optimize_and_assemble(struct compiler *c, int addNone) return optimize_and_assemble_code_unit(u, const_cache, code_flags, filename); } -static int -cfg_to_instr_sequence(cfg_builder *g, instr_sequence *seq) -{ - int lbl = 0; - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - b->b_label = (jump_target_label){lbl}; - lbl += b->b_iused; - } - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - RETURN_IF_ERROR(instr_sequence_use_label(seq, b->b_label.id)); - for (int i = 0; i < b->b_iused; i++) { - cfg_instr *instr = &b->b_instr[i]; - if (OPCODE_HAS_JUMP(instr->i_opcode)) { - instr->i_oparg = instr->i_target->b_label.id; - } - RETURN_IF_ERROR( - instr_sequence_addop(seq, instr->i_opcode, instr->i_oparg, instr->i_loc)); - - _PyCompile_ExceptHandlerInfo *hi = &seq->s_instrs[seq->s_used-1].i_except_handler_info; - if (instr->i_except != NULL) { - hi->h_label = instr->i_except->b_label.id; - hi->h_startdepth = instr->i_except->b_startdepth; - hi->h_preserve_lasti = instr->i_except->b_preserve_lasti; - } - else { - hi->h_label = -1; - } - } - } - return SUCCESS; -} - - /* Access to compiler optimizations for unit tests. * * _PyCompile_CodeGen takes and AST, applies code-gen and @@ -7856,7 +7616,7 @@ instructions_to_instr_sequence(PyObject *instructions, instr_sequence *seq) for (int i = 0; i < num_insts; i++) { if (is_target[i]) { - if (instr_sequence_use_label(seq, i) < 0) { + if (_PyCompile_InstructionSequence_UseLabel(seq, i) < 0) { goto error; } } @@ -7896,7 +7656,7 @@ instructions_to_instr_sequence(PyObject *instructions, instr_sequence *seq) if (PyErr_Occurred()) { goto error; } - if (instr_sequence_addop(seq, opcode, oparg, loc) < 0) { + if (_PyCompile_InstructionSequence_Addop(seq, opcode, oparg, loc) < 0) { goto error; } } @@ -7907,23 +7667,26 @@ instructions_to_instr_sequence(PyObject *instructions, instr_sequence *seq) return ERROR; } -static int -instructions_to_cfg(PyObject *instructions, cfg_builder *g) +static cfg_builder* +instructions_to_cfg(PyObject *instructions) { + cfg_builder *g = NULL; instr_sequence seq; memset(&seq, 0, sizeof(instr_sequence)); if (instructions_to_instr_sequence(instructions, &seq) < 0) { goto error; } - if (instr_sequence_to_cfg(&seq, g) < 0) { + g = instr_sequence_to_cfg(&seq); + if (g == NULL) { goto error; } instr_sequence_fini(&seq); - return SUCCESS; + return g; error: + _PyCfgBuilder_Free(g); instr_sequence_fini(&seq); - return ERROR; + return NULL; } static PyObject * @@ -7962,42 +7725,14 @@ instr_sequence_to_instructions(instr_sequence *seq) static PyObject * cfg_to_instructions(cfg_builder *g) { - PyObject *instructions = PyList_New(0); - if (instructions == NULL) { + instr_sequence seq; + memset(&seq, 0, sizeof(seq)); + if (_PyCfg_ToInstructionSequence(g, &seq) < 0) { return NULL; } - int lbl = 0; - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - b->b_label = (jump_target_label){lbl}; - lbl += b->b_iused; - } - for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - for (int i = 0; i < b->b_iused; i++) { - cfg_instr *instr = &b->b_instr[i]; - location loc = instr->i_loc; - int arg = HAS_TARGET(instr->i_opcode) ? - instr->i_target->b_label.id : instr->i_oparg; - - PyObject *inst_tuple = Py_BuildValue( - "(iiiiii)", instr->i_opcode, arg, - loc.lineno, loc.end_lineno, - loc.col_offset, loc.end_col_offset); - if (inst_tuple == NULL) { - goto error; - } - - if (PyList_Append(instructions, inst_tuple) != 0) { - Py_DECREF(inst_tuple); - goto error; - } - Py_DECREF(inst_tuple); - } - } - - return instructions; -error: - Py_DECREF(instructions); - return NULL; + PyObject *res = instr_sequence_to_instructions(&seq); + instr_sequence_fini(&seq); + return res; } // C implementation of inspect.cleandoc() @@ -8053,6 +7788,12 @@ _PyCompile_CleanDoc(PyObject *doc) } char *buff = PyMem_Malloc(doc_size); + if (buff == NULL){ + Py_DECREF(doc); + PyErr_NoMemory(); + return NULL; + } + char *w = buff; while (p < pend) { @@ -8173,34 +7914,36 @@ _PyCompile_CodeGen(PyObject *ast, PyObject *filename, PyCompilerFlags *pflags, PyObject * _PyCompile_OptimizeCfg(PyObject *instructions, PyObject *consts, int nlocals) { + cfg_builder *g = NULL; PyObject *res = NULL; PyObject *const_cache = PyDict_New(); if (const_cache == NULL) { return NULL; } - cfg_builder g; - if (instructions_to_cfg(instructions, &g) < 0) { + g = instructions_to_cfg(instructions); + if (g == NULL) { goto error; } - int code_flags = 0, nparams = 0, firstlineno = 1; - if (_PyCfg_OptimizeCodeUnit(&g, consts, const_cache, code_flags, nlocals, + int nparams = 0, firstlineno = 1; + if (_PyCfg_OptimizeCodeUnit(g, consts, const_cache, nlocals, nparams, firstlineno) < 0) { goto error; } - res = cfg_to_instructions(&g); + res = cfg_to_instructions(g); error: Py_DECREF(const_cache); - _PyCfgBuilder_Fini(&g); + _PyCfgBuilder_Free(g); return res; } -int _PyCfg_JumpLabelsToTargets(basicblock *entryblock); +int _PyCfg_JumpLabelsToTargets(cfg_builder *g); PyCodeObject * _PyCompile_Assemble(_PyCompile_CodeUnitMetadata *umd, PyObject *filename, PyObject *instructions) { + cfg_builder *g = NULL; PyCodeObject *co = NULL; instr_sequence optimized_instrs; memset(&optimized_instrs, 0, sizeof(instr_sequence)); @@ -8210,37 +7953,20 @@ _PyCompile_Assemble(_PyCompile_CodeUnitMetadata *umd, PyObject *filename, return NULL; } - cfg_builder g; - if (instructions_to_cfg(instructions, &g) < 0) { + g = instructions_to_cfg(instructions); + if (g == NULL) { goto error; } - if (_PyCfg_JumpLabelsToTargets(g.g_entryblock) < 0) { + if (_PyCfg_JumpLabelsToTargets(g) < 0) { goto error; } int code_flags = 0; - int nlocalsplus = prepare_localsplus(umd, &g, code_flags); - if (nlocalsplus < 0) { - goto error; - } - - int maxdepth = _PyCfg_Stackdepth(g.g_entryblock, code_flags); - if (maxdepth < 0) { - goto error; - } - - _PyCfg_ConvertPseudoOps(g.g_entryblock); - - /* Order of basic blocks must have been determined by now */ - - if (_PyCfg_ResolveJumps(&g) < 0) { - goto error; - } - - /* Can't modify the bytecode after computing jump offsets. */ - - if (cfg_to_instr_sequence(&g, &optimized_instrs) < 0) { + int stackdepth, nlocalsplus; + if (_PyCfg_OptimizedCfgToInstructionSequence(g, umd, code_flags, + &stackdepth, &nlocalsplus, + &optimized_instrs) < 0) { goto error; } @@ -8249,13 +7975,13 @@ _PyCompile_Assemble(_PyCompile_CodeUnitMetadata *umd, PyObject *filename, goto error; } co = _PyAssemble_MakeCodeObject(umd, const_cache, - consts, maxdepth, &optimized_instrs, + consts, stackdepth, &optimized_instrs, nlocalsplus, code_flags, filename); Py_DECREF(consts); error: Py_DECREF(const_cache); - _PyCfgBuilder_Fini(&g); + _PyCfgBuilder_Free(g); instr_sequence_fini(&optimized_instrs); return co; } diff --git a/Python/context.c b/Python/context.c index 1ffae9871be7b3..c94c014219d0e4 100644 --- a/Python/context.c +++ b/Python/context.c @@ -7,7 +7,7 @@ #include "pycore_object.h" #include "pycore_pyerrors.h" #include "pycore_pystate.h" // _PyThreadState_GET() -#include "structmember.h" // PyMemberDef + #include "clinic/context.c.h" @@ -1042,7 +1042,7 @@ _contextvars_ContextVar_reset(PyContextVar *self, PyObject *token) static PyMemberDef PyContextVar_members[] = { - {"name", T_OBJECT, offsetof(PyContextVar, var_name), READONLY}, + {"name", _Py_T_OBJECT, offsetof(PyContextVar, var_name), Py_READONLY}, {NULL} }; @@ -1267,7 +1267,7 @@ PyTypeObject _PyContextTokenMissing_Type = { static PyObject * get_token_missing(void) { - return Py_NewRef(&_Py_SINGLETON(context_token_missing)); + return (PyObject *)&_Py_SINGLETON(context_token_missing); } diff --git a/Python/executor.c b/Python/executor.c new file mode 100644 index 00000000000000..4a18618c0c6c0c --- /dev/null +++ b/Python/executor.c @@ -0,0 +1,126 @@ +#include "Python.h" + +#include "pycore_call.h" +#include "pycore_ceval.h" +#include "pycore_dict.h" +#include "pycore_emscripten_signal.h" +#include "pycore_intrinsics.h" +#include "pycore_long.h" +#include "pycore_object.h" +#include "pycore_opcode_metadata.h" +#include "pycore_opcode_utils.h" +#include "pycore_pyerrors.h" +#include "pycore_range.h" +#include "pycore_setobject.h" // _PySet_Update() +#include "pycore_sliceobject.h" +#include "pycore_uops.h" + +#include "ceval_macros.h" + + +#undef ASSERT_KWNAMES_IS_NULL +#define ASSERT_KWNAMES_IS_NULL() (void)0 + +#undef DEOPT_IF +#define DEOPT_IF(COND, INSTNAME) \ + if ((COND)) { \ + goto deoptimize; \ + } + +#undef ENABLE_SPECIALIZATION +#define ENABLE_SPECIALIZATION 0 + + +_PyInterpreterFrame * +_PyUopExecute(_PyExecutorObject *executor, _PyInterpreterFrame *frame, PyObject **stack_pointer) +{ +#ifdef Py_DEBUG + char *uop_debug = Py_GETENV("PYTHONUOPSDEBUG"); + int lltrace = 0; + if (uop_debug != NULL && *uop_debug >= '0') { + lltrace = *uop_debug - '0'; // TODO: Parse an int and all that + } + #define DPRINTF(level, ...) \ + if (lltrace >= (level)) { printf(__VA_ARGS__); } +#else + #define DPRINTF(level, ...) +#endif + + DPRINTF(3, + "Entering _PyUopExecute for %s (%s:%d) at byte offset %ld\n", + PyUnicode_AsUTF8(_PyFrame_GetCode(frame)->co_qualname), + PyUnicode_AsUTF8(_PyFrame_GetCode(frame)->co_filename), + _PyFrame_GetCode(frame)->co_firstlineno, + 2 * (long)(frame->prev_instr + 1 - + (_Py_CODEUNIT *)_PyFrame_GetCode(frame)->co_code_adaptive)); + + PyThreadState *tstate = _PyThreadState_GET(); + _PyUOpExecutorObject *self = (_PyUOpExecutorObject *)executor; + + CHECK_EVAL_BREAKER(); + + OBJECT_STAT_INC(optimization_traces_executed); + _Py_CODEUNIT *ip_offset = (_Py_CODEUNIT *)_PyFrame_GetCode(frame)->co_code_adaptive; + int pc = 0; + int opcode; + int oparg; + uint64_t operand; + + for (;;) { + opcode = self->trace[pc].opcode; + oparg = self->trace[pc].oparg; + operand = self->trace[pc].operand; + DPRINTF(3, + "%4d: uop %s, oparg %d, operand %" PRIu64 ", stack_level %d\n", + pc, + opcode < 256 ? _PyOpcode_OpName[opcode] : _PyOpcode_uop_name[opcode], + oparg, + operand, + (int)(stack_pointer - _PyFrame_Stackbase(frame))); + pc++; + OBJECT_STAT_INC(optimization_uops_executed); + switch (opcode) { + +#include "executor_cases.c.h" + + default: + { + fprintf(stderr, "Unknown uop %d, operand %" PRIu64 "\n", opcode, operand); + Py_FatalError("Unknown uop"); + } + + } + } + +unbound_local_error: + _PyEval_FormatExcCheckArg(tstate, PyExc_UnboundLocalError, + UNBOUNDLOCAL_ERROR_MSG, + PyTuple_GetItem(_PyFrame_GetCode(frame)->co_localsplusnames, oparg) + ); + goto error; + +pop_4_error: + STACK_SHRINK(1); +pop_3_error: + STACK_SHRINK(1); +pop_2_error: + STACK_SHRINK(1); +pop_1_error: + STACK_SHRINK(1); +error: + // On ERROR_IF we return NULL as the frame. + // The caller recovers the frame from cframe.current_frame. + DPRINTF(2, "Error: [Opcode %d, operand %" PRIu64 "]\n", opcode, operand); + _PyFrame_SetStackPointer(frame, stack_pointer); + Py_DECREF(self); + return NULL; + +deoptimize: + // On DEOPT_IF we just repeat the last instruction. + // This presumes nothing was popped from the stack (nor pushed). + DPRINTF(2, "DEOPT: [Opcode %d, operand %" PRIu64 "]\n", opcode, operand); + frame->prev_instr--; // Back up to just before destination + _PyFrame_SetStackPointer(frame, stack_pointer); + Py_DECREF(self); + return frame; +} diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index e1f8b9f208c76d..a7b5054417ed8f 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -47,14 +47,16 @@ } case STORE_FAST: { - PyObject *value = stack_pointer[-1]; + PyObject *value; + value = stack_pointer[-1]; SETLOCAL(oparg, value); STACK_SHRINK(1); break; } case POP_TOP: { - PyObject *value = stack_pointer[-1]; + PyObject *value; + value = stack_pointer[-1]; Py_DECREF(value); STACK_SHRINK(1); break; @@ -69,8 +71,10 @@ } case END_SEND: { - PyObject *value = stack_pointer[-1]; - PyObject *receiver = stack_pointer[-2]; + PyObject *value; + PyObject *receiver; + value = stack_pointer[-1]; + receiver = stack_pointer[-2]; Py_DECREF(receiver); STACK_SHRINK(1); stack_pointer[-1] = value; @@ -78,8 +82,9 @@ } case UNARY_NEGATIVE: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; res = PyNumber_Negative(value); Py_DECREF(value); if (res == NULL) goto pop_1_error; @@ -88,8 +93,9 @@ } case UNARY_NOT: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; assert(PyBool_Check(value)); res = Py_IsFalse(value) ? Py_True : Py_False; stack_pointer[-1] = res; @@ -98,8 +104,9 @@ case TO_BOOL: { static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; #if ENABLE_SPECIALIZATION _PyToBoolCache *cache = (_PyToBoolCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -119,15 +126,17 @@ } case TO_BOOL_BOOL: { - PyObject *value = stack_pointer[-1]; + PyObject *value; + value = stack_pointer[-1]; DEOPT_IF(!PyBool_Check(value), TO_BOOL); STAT_INC(TO_BOOL, hit); break; } case TO_BOOL_INT: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; DEOPT_IF(!PyLong_CheckExact(value), TO_BOOL); STAT_INC(TO_BOOL, hit); if (_PyLong_IsZero((PyLongObject *)value)) { @@ -143,8 +152,9 @@ } case TO_BOOL_LIST: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; DEOPT_IF(!PyList_CheckExact(value), TO_BOOL); STAT_INC(TO_BOOL, hit); res = Py_SIZE(value) ? Py_True : Py_False; @@ -154,8 +164,9 @@ } case TO_BOOL_NONE: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; // This one is a bit weird, because we expect *some* failures: DEOPT_IF(!Py_IsNone(value), TO_BOOL); STAT_INC(TO_BOOL, hit); @@ -165,8 +176,9 @@ } case TO_BOOL_STR: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; DEOPT_IF(!PyUnicode_CheckExact(value), TO_BOOL); STAT_INC(TO_BOOL, hit); if (value == &_Py_STR(empty)) { @@ -183,8 +195,9 @@ } case TO_BOOL_ALWAYS_TRUE: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; uint32_t version = (uint32_t)operand; // This one is a bit weird, because we expect *some* failures: assert(version); @@ -197,8 +210,9 @@ } case UNARY_INVERT: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; res = PyNumber_Invert(value); Py_DECREF(value); if (res == NULL) goto pop_1_error; @@ -207,17 +221,21 @@ } case _GUARD_BOTH_INT: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; + right = stack_pointer[-1]; + left = stack_pointer[-2]; DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); break; } case _BINARY_OP_MULTIPLY_INT: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; STAT_INC(BINARY_OP, hit); res = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); @@ -229,9 +247,11 @@ } case _BINARY_OP_ADD_INT: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; STAT_INC(BINARY_OP, hit); res = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); @@ -243,9 +263,11 @@ } case _BINARY_OP_SUBTRACT_INT: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; STAT_INC(BINARY_OP, hit); res = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); @@ -257,17 +279,21 @@ } case _GUARD_BOTH_FLOAT: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; + right = stack_pointer[-1]; + left = stack_pointer[-2]; DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); break; } case _BINARY_OP_MULTIPLY_FLOAT: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval * @@ -279,9 +305,11 @@ } case _BINARY_OP_ADD_FLOAT: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval + @@ -293,9 +321,11 @@ } case _BINARY_OP_SUBTRACT_FLOAT: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval - @@ -307,17 +337,21 @@ } case _GUARD_BOTH_UNICODE: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; + right = stack_pointer[-1]; + left = stack_pointer[-2]; DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(!PyUnicode_CheckExact(right), BINARY_OP); break; } case _BINARY_OP_ADD_UNICODE: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; STAT_INC(BINARY_OP, hit); res = PyUnicode_Concat(left, right); _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); @@ -330,9 +364,11 @@ case BINARY_SUBSCR: { static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 1, "incorrect cache size"); - PyObject *sub = stack_pointer[-1]; - PyObject *container = stack_pointer[-2]; + PyObject *sub; + PyObject *container; PyObject *res; + sub = stack_pointer[-1]; + container = stack_pointer[-2]; #if ENABLE_SPECIALIZATION _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -353,10 +389,13 @@ } case BINARY_SLICE: { - PyObject *stop = stack_pointer[-1]; - PyObject *start = stack_pointer[-2]; - PyObject *container = stack_pointer[-3]; + PyObject *stop; + PyObject *start; + PyObject *container; PyObject *res; + stop = stack_pointer[-1]; + start = stack_pointer[-2]; + container = stack_pointer[-3]; PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); // Can't use ERROR_IF() here, because we haven't // DECREF'ed container yet, and we still own slice. @@ -375,10 +414,14 @@ } case STORE_SLICE: { - PyObject *stop = stack_pointer[-1]; - PyObject *start = stack_pointer[-2]; - PyObject *container = stack_pointer[-3]; - PyObject *v = stack_pointer[-4]; + PyObject *stop; + PyObject *start; + PyObject *container; + PyObject *v; + stop = stack_pointer[-1]; + start = stack_pointer[-2]; + container = stack_pointer[-3]; + v = stack_pointer[-4]; PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); int err; if (slice == NULL) { @@ -396,9 +439,11 @@ } case BINARY_SUBSCR_LIST_INT: { - PyObject *sub = stack_pointer[-1]; - PyObject *list = stack_pointer[-2]; + PyObject *sub; + PyObject *list; PyObject *res; + sub = stack_pointer[-1]; + list = stack_pointer[-2]; DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); @@ -417,10 +462,35 @@ break; } + case BINARY_SUBSCR_STR_INT: { + PyObject *sub; + PyObject *str; + PyObject *res; + sub = stack_pointer[-1]; + str = stack_pointer[-2]; + DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); + DEOPT_IF(!PyUnicode_CheckExact(str), BINARY_SUBSCR); + DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), BINARY_SUBSCR); + Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; + DEOPT_IF(PyUnicode_GET_LENGTH(str) <= index, BINARY_SUBSCR); + // Specialize for reading an ASCII character from any string: + Py_UCS4 c = PyUnicode_READ_CHAR(str, index); + DEOPT_IF(Py_ARRAY_LENGTH(_Py_SINGLETON(strings).ascii) <= c, BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = (PyObject*)&_Py_SINGLETON(strings).ascii[c]; + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(str); + STACK_SHRINK(1); + stack_pointer[-1] = res; + break; + } + case BINARY_SUBSCR_TUPLE_INT: { - PyObject *sub = stack_pointer[-1]; - PyObject *tuple = stack_pointer[-2]; + PyObject *sub; + PyObject *tuple; PyObject *res; + sub = stack_pointer[-1]; + tuple = stack_pointer[-2]; DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); @@ -440,9 +510,11 @@ } case BINARY_SUBSCR_DICT: { - PyObject *sub = stack_pointer[-1]; - PyObject *dict = stack_pointer[-2]; + PyObject *sub; + PyObject *dict; PyObject *res; + sub = stack_pointer[-1]; + dict = stack_pointer[-2]; DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); STAT_INC(BINARY_SUBSCR, hit); res = PyDict_GetItemWithError(dict, sub); @@ -463,16 +535,20 @@ } case LIST_APPEND: { - PyObject *v = stack_pointer[-1]; - PyObject *list = stack_pointer[-(2 + (oparg-1))]; + PyObject *v; + PyObject *list; + v = stack_pointer[-1]; + list = stack_pointer[-2 - (oparg-1)]; if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) goto pop_1_error; STACK_SHRINK(1); break; } case SET_ADD: { - PyObject *v = stack_pointer[-1]; - PyObject *set = stack_pointer[-(2 + (oparg-1))]; + PyObject *v; + PyObject *set; + v = stack_pointer[-1]; + set = stack_pointer[-2 - (oparg-1)]; int err = PySet_Add(set, v); Py_DECREF(v); if (err) goto pop_1_error; @@ -482,9 +558,12 @@ case STORE_SUBSCR: { static_assert(INLINE_CACHE_ENTRIES_STORE_SUBSCR == 1, "incorrect cache size"); - PyObject *sub = stack_pointer[-1]; - PyObject *container = stack_pointer[-2]; - PyObject *v = stack_pointer[-3]; + PyObject *sub; + PyObject *container; + PyObject *v; + sub = stack_pointer[-1]; + container = stack_pointer[-2]; + v = stack_pointer[-3]; #if ENABLE_SPECIALIZATION _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -506,9 +585,12 @@ } case STORE_SUBSCR_LIST_INT: { - PyObject *sub = stack_pointer[-1]; - PyObject *list = stack_pointer[-2]; - PyObject *value = stack_pointer[-3]; + PyObject *sub; + PyObject *list; + PyObject *value; + sub = stack_pointer[-1]; + list = stack_pointer[-2]; + value = stack_pointer[-3]; DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); @@ -530,9 +612,12 @@ } case STORE_SUBSCR_DICT: { - PyObject *sub = stack_pointer[-1]; - PyObject *dict = stack_pointer[-2]; - PyObject *value = stack_pointer[-3]; + PyObject *sub; + PyObject *dict; + PyObject *value; + sub = stack_pointer[-1]; + dict = stack_pointer[-2]; + value = stack_pointer[-3]; DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); STAT_INC(STORE_SUBSCR, hit); int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); @@ -543,8 +628,10 @@ } case DELETE_SUBSCR: { - PyObject *sub = stack_pointer[-1]; - PyObject *container = stack_pointer[-2]; + PyObject *sub; + PyObject *container; + sub = stack_pointer[-1]; + container = stack_pointer[-2]; /* del container[sub] */ int err = PyObject_DelItem(container, sub); Py_DECREF(container); @@ -555,10 +642,11 @@ } case CALL_INTRINSIC_1: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; assert(oparg <= MAX_INTRINSIC_1); - res = _PyIntrinsics_UnaryFunctions[oparg](tstate, value); + res = _PyIntrinsics_UnaryFunctions[oparg].func(tstate, value); Py_DECREF(value); if (res == NULL) goto pop_1_error; stack_pointer[-1] = res; @@ -566,11 +654,13 @@ } case CALL_INTRINSIC_2: { - PyObject *value1 = stack_pointer[-1]; - PyObject *value2 = stack_pointer[-2]; + PyObject *value1; + PyObject *value2; PyObject *res; + value1 = stack_pointer[-1]; + value2 = stack_pointer[-2]; assert(oparg <= MAX_INTRINSIC_2); - res = _PyIntrinsics_BinaryFunctions[oparg](tstate, value2, value1); + res = _PyIntrinsics_BinaryFunctions[oparg].func(tstate, value2, value1); Py_DECREF(value2); Py_DECREF(value1); if (res == NULL) goto pop_2_error; @@ -580,8 +670,9 @@ } case GET_AITER: { - PyObject *obj = stack_pointer[-1]; + PyObject *obj; PyObject *iter; + obj = stack_pointer[-1]; unaryfunc getter = NULL; PyTypeObject *type = Py_TYPE(obj); @@ -617,8 +708,9 @@ } case GET_ANEXT: { - PyObject *aiter = stack_pointer[-1]; + PyObject *aiter; PyObject *awaitable; + aiter = stack_pointer[-1]; unaryfunc getter = NULL; PyObject *next_iter = NULL; PyTypeObject *type = Py_TYPE(aiter); @@ -667,12 +759,13 @@ } case GET_AWAITABLE: { - PyObject *iterable = stack_pointer[-1]; + PyObject *iterable; PyObject *iter; + iterable = stack_pointer[-1]; iter = _PyCoro_GetAwaitableIter(iterable); if (iter == NULL) { - format_awaitable_error(tstate, Py_TYPE(iterable), oparg); + _PyEval_FormatAwaitableError(tstate, Py_TYPE(iterable), oparg); } Py_DECREF(iterable); @@ -697,7 +790,8 @@ } case POP_EXCEPT: { - PyObject *exc_value = stack_pointer[-1]; + PyObject *exc_value; + exc_value = stack_pointer[-1]; _PyErr_StackItem *exc_info = tstate->exc_info; Py_XSETREF(exc_info->exc_value, exc_value); STACK_SHRINK(1); @@ -726,7 +820,8 @@ } case STORE_NAME: { - PyObject *v = stack_pointer[-1]; + PyObject *v; + v = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); PyObject *ns = LOCALS(); int err; @@ -758,9 +853,9 @@ err = PyObject_DelItem(ns, name); // Can't use ERROR_IF here. if (err != 0) { - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, - name); + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + NAME_ERROR_MSG, + name); goto error; } break; @@ -768,7 +863,8 @@ case UNPACK_SEQUENCE: { static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size"); - PyObject *seq = stack_pointer[-1]; + PyObject *seq; + seq = stack_pointer[-1]; #if ENABLE_SPECIALIZATION _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -780,7 +876,7 @@ DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ PyObject **top = stack_pointer + oparg - 1; - int res = unpack_iterable(tstate, seq, oparg, -1, top); + int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top); Py_DECREF(seq); if (res == 0) goto pop_1_error; STACK_SHRINK(1); @@ -789,8 +885,10 @@ } case UNPACK_SEQUENCE_TWO_TUPLE: { - PyObject *seq = stack_pointer[-1]; - PyObject **values = stack_pointer - (1); + PyObject *seq; + PyObject **values; + seq = stack_pointer[-1]; + values = stack_pointer - 1; DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE); assert(oparg == 2); @@ -804,8 +902,10 @@ } case UNPACK_SEQUENCE_TUPLE: { - PyObject *seq = stack_pointer[-1]; - PyObject **values = stack_pointer - (1); + PyObject *seq; + PyObject **values; + seq = stack_pointer[-1]; + values = stack_pointer - 1; DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); STAT_INC(UNPACK_SEQUENCE, hit); @@ -820,8 +920,10 @@ } case UNPACK_SEQUENCE_LIST: { - PyObject *seq = stack_pointer[-1]; - PyObject **values = stack_pointer - (1); + PyObject *seq; + PyObject **values; + seq = stack_pointer[-1]; + values = stack_pointer - 1; DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); STAT_INC(UNPACK_SEQUENCE, hit); @@ -836,10 +938,11 @@ } case UNPACK_EX: { - PyObject *seq = stack_pointer[-1]; + PyObject *seq; + seq = stack_pointer[-1]; int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); PyObject **top = stack_pointer + totalargs - 1; - int res = unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top); + int res = _PyEval_UnpackIterable(tstate, seq, oparg & 0xFF, oparg >> 8, top); Py_DECREF(seq); if (res == 0) goto pop_1_error; STACK_GROW((oparg & 0xFF) + (oparg >> 8)); @@ -848,8 +951,10 @@ case STORE_ATTR: { static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); - PyObject *owner = stack_pointer[-1]; - PyObject *v = stack_pointer[-2]; + PyObject *owner; + PyObject *v; + owner = stack_pointer[-1]; + v = stack_pointer[-2]; #if ENABLE_SPECIALIZATION _PyAttrCache *cache = (_PyAttrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -871,7 +976,8 @@ } case DELETE_ATTR: { - PyObject *owner = stack_pointer[-1]; + PyObject *owner; + owner = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); int err = PyObject_DelAttr(owner, name); Py_DECREF(owner); @@ -881,7 +987,8 @@ } case STORE_GLOBAL: { - PyObject *v = stack_pointer[-1]; + PyObject *v; + v = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); int err = PyDict_SetItem(GLOBALS(), name, v); Py_DECREF(v); @@ -897,8 +1004,8 @@ // Can't use ERROR_IF here. if (err != 0) { if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); } goto error; } @@ -920,8 +1027,9 @@ } case _LOAD_FROM_DICT_OR_GLOBALS: { - PyObject *mod_or_class_dict = stack_pointer[-1]; + PyObject *mod_or_class_dict; PyObject *v; + mod_or_class_dict = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) { Py_DECREF(mod_or_class_dict); @@ -941,7 +1049,7 @@ goto error; } if (v == NULL) { - format_exc_check_arg( + _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); goto error; @@ -954,8 +1062,8 @@ case LOAD_GLOBAL: { static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size"); + PyObject *res; PyObject *null = NULL; - PyObject *v; #if ENABLE_SPECIALIZATION _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -971,30 +1079,30 @@ if (PyDict_CheckExact(GLOBALS()) && PyDict_CheckExact(BUILTINS())) { - v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), - (PyDictObject *)BUILTINS(), - name); - if (v == NULL) { + res = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), + (PyDictObject *)BUILTINS(), + name); + if (res == NULL) { if (!_PyErr_Occurred(tstate)) { /* _PyDict_LoadGlobal() returns NULL without raising * an exception if the key doesn't exist */ - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); } if (true) goto error; } - Py_INCREF(v); + Py_INCREF(res); } else { /* Slow-path if globals or builtins is not a dict */ /* namespace 1: globals */ - if (PyMapping_GetOptionalItem(GLOBALS(), name, &v) < 0) goto error; - if (v == NULL) { + if (PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0) goto error; + if (res == NULL) { /* namespace 2: builtins */ - if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) goto error; - if (v == NULL) { - format_exc_check_arg( + if (PyMapping_GetOptionalItem(BUILTINS(), name, &res) < 0) goto error; + if (res == NULL) { + _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); if (true) goto error; @@ -1004,12 +1112,8 @@ null = NULL; STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = v; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = null; } - break; - } - - case _SKIP_CACHE: { + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = res; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } break; } @@ -1032,8 +1136,8 @@ } case _LOAD_GLOBAL_MODULE: { - PyObject *null = NULL; PyObject *res; + PyObject *null = NULL; uint16_t index = (uint16_t)operand; PyDictObject *dict = (PyDictObject *)GLOBALS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); @@ -1044,14 +1148,14 @@ null = NULL; STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = null; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = res; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } break; } case _LOAD_GLOBAL_BUILTINS: { - PyObject *null = NULL; PyObject *res; + PyObject *null = NULL; uint16_t index = (uint16_t)operand; PyDictObject *bdict = (PyDictObject *)BUILTINS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys); @@ -1062,8 +1166,8 @@ null = NULL; STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = null; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = res; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } break; } @@ -1080,7 +1184,7 @@ // Can't use ERROR_IF here. // Fortunately we don't need its superpower. if (oldobj == NULL) { - format_exc_unbound(tstate, _PyFrame_GetCode(frame), oparg); + _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); goto error; } PyCell_SET(cell, NULL); @@ -1089,8 +1193,9 @@ } case LOAD_FROM_DICT_OR_DEREF: { - PyObject *class_dict = stack_pointer[-1]; + PyObject *class_dict; PyObject *value; + class_dict = stack_pointer[-1]; PyObject *name; assert(class_dict); assert(oparg >= 0 && oparg < _PyFrame_GetCode(frame)->co_nlocalsplus); @@ -1104,7 +1209,7 @@ PyObject *cell = GETLOCAL(oparg); value = PyCell_GET(cell); if (value == NULL) { - format_exc_unbound(tstate, _PyFrame_GetCode(frame), oparg); + _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); goto error; } Py_INCREF(value); @@ -1118,7 +1223,7 @@ PyObject *cell = GETLOCAL(oparg); value = PyCell_GET(cell); if (value == NULL) { - format_exc_unbound(tstate, _PyFrame_GetCode(frame), oparg); + _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); if (true) goto error; } Py_INCREF(value); @@ -1128,7 +1233,8 @@ } case STORE_DEREF: { - PyObject *v = stack_pointer[-1]; + PyObject *v; + v = stack_pointer[-1]; PyObject *cell = GETLOCAL(oparg); PyObject *oldobj = PyCell_GET(cell); PyCell_SET(cell, v); @@ -1152,8 +1258,9 @@ } case BUILD_STRING: { - PyObject **pieces = (stack_pointer - oparg); + PyObject **pieces; PyObject *str; + pieces = stack_pointer - oparg; str = _PyUnicode_JoinArray(&_Py_STR(empty), pieces, oparg); for (int _i = oparg; --_i >= 0;) { Py_DECREF(pieces[_i]); @@ -1166,8 +1273,9 @@ } case BUILD_TUPLE: { - PyObject **values = (stack_pointer - oparg); + PyObject **values; PyObject *tup; + values = stack_pointer - oparg; tup = _PyTuple_FromArraySteal(values, oparg); if (tup == NULL) { STACK_SHRINK(oparg); goto error; } STACK_SHRINK(oparg); @@ -1177,8 +1285,9 @@ } case BUILD_LIST: { - PyObject **values = (stack_pointer - oparg); + PyObject **values; PyObject *list; + values = stack_pointer - oparg; list = _PyList_FromArraySteal(values, oparg); if (list == NULL) { STACK_SHRINK(oparg); goto error; } STACK_SHRINK(oparg); @@ -1188,8 +1297,10 @@ } case LIST_EXTEND: { - PyObject *iterable = stack_pointer[-1]; - PyObject *list = stack_pointer[-(2 + (oparg-1))]; + PyObject *iterable; + PyObject *list; + iterable = stack_pointer[-1]; + list = stack_pointer[-2 - (oparg-1)]; PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); if (none_val == NULL) { if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) && @@ -1210,8 +1321,10 @@ } case SET_UPDATE: { - PyObject *iterable = stack_pointer[-1]; - PyObject *set = stack_pointer[-(2 + (oparg-1))]; + PyObject *iterable; + PyObject *set; + iterable = stack_pointer[-1]; + set = stack_pointer[-2 - (oparg-1)]; int err = _PySet_Update(set, iterable); Py_DECREF(iterable); if (err < 0) goto pop_1_error; @@ -1220,8 +1333,9 @@ } case BUILD_SET: { - PyObject **values = (stack_pointer - oparg); + PyObject **values; PyObject *set; + values = stack_pointer - oparg; set = PySet_New(NULL); if (set == NULL) goto error; @@ -1243,8 +1357,9 @@ } case BUILD_MAP: { - PyObject **values = (stack_pointer - oparg*2); + PyObject **values; PyObject *map; + values = stack_pointer - oparg*2; map = _PyDict_FromItems( values, 2, values+1, 2, @@ -1304,9 +1419,11 @@ } case BUILD_CONST_KEY_MAP: { - PyObject *keys = stack_pointer[-1]; - PyObject **values = (stack_pointer - (1 + oparg)); + PyObject *keys; + PyObject **values; PyObject *map; + keys = stack_pointer[-1]; + values = stack_pointer - 1 - oparg; if (!PyTuple_CheckExact(keys) || PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { _PyErr_SetString(tstate, PyExc_SystemError, @@ -1327,8 +1444,10 @@ } case DICT_UPDATE: { - PyObject *update = stack_pointer[-1]; - PyObject *dict = PEEK(oparg + 1); // update is still on the stack + PyObject *update; + PyObject *dict; + update = stack_pointer[-1]; + dict = stack_pointer[-2 - (oparg - 1)]; if (PyDict_Update(dict, update) < 0) { if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { _PyErr_Format(tstate, PyExc_TypeError, @@ -1344,11 +1463,14 @@ } case DICT_MERGE: { - PyObject *update = stack_pointer[-1]; - PyObject *dict = PEEK(oparg + 1); // update is still on the stack - + PyObject *update; + PyObject *dict; + PyObject *callable; + update = stack_pointer[-1]; + dict = stack_pointer[-2 - (oparg - 1)]; + callable = stack_pointer[-5 - (oparg - 1)]; if (_PyDict_MergeEx(dict, update, 2) < 0) { - format_kwargs_error(tstate, PEEK(3 + oparg), update); + _PyEval_FormatKwargsError(tstate, callable, update); Py_DECREF(update); if (true) goto pop_1_error; } @@ -1358,9 +1480,12 @@ } case MAP_ADD: { - PyObject *value = stack_pointer[-1]; - PyObject *key = stack_pointer[-2]; - PyObject *dict = PEEK(oparg + 2); // key, value are still on the stack + PyObject *value; + PyObject *key; + PyObject *dict; + value = stack_pointer[-1]; + key = stack_pointer[-2]; + dict = stack_pointer[-3 - (oparg - 1)]; assert(PyDict_CheckExact(dict)); /* dict[key] = value */ // Do not DECREF INPUTS because the function steals the references @@ -1370,34 +1495,37 @@ } case LOAD_SUPER_ATTR_ATTR: { - PyObject *self = stack_pointer[-1]; - PyObject *class = stack_pointer[-2]; - PyObject *global_super = stack_pointer[-3]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *self; + PyObject *class; + PyObject *global_super; + PyObject *attr; + self = stack_pointer[-1]; + class = stack_pointer[-2]; + global_super = stack_pointer[-3]; assert(!(oparg & 1)); DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); STAT_INC(LOAD_SUPER_ATTR, hit); PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); - res = _PySuper_Lookup((PyTypeObject *)class, self, name, NULL); + attr = _PySuper_Lookup((PyTypeObject *)class, self, name, NULL); Py_DECREF(global_super); Py_DECREF(class); Py_DECREF(self); - if (res == NULL) goto pop_3_error; + if (attr == NULL) goto pop_3_error; STACK_SHRINK(2); - STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + stack_pointer[-1 - (0 ? 1 : 0)] = attr; break; } case LOAD_SUPER_ATTR_METHOD: { - PyObject *self = stack_pointer[-1]; - PyObject *class = stack_pointer[-2]; - PyObject *global_super = stack_pointer[-3]; - PyObject *res2; - PyObject *res; + PyObject *self; + PyObject *class; + PyObject *global_super; + PyObject *attr; + PyObject *self_or_null; + self = stack_pointer[-1]; + class = stack_pointer[-2]; + global_super = stack_pointer[-3]; assert(oparg & 1); DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); @@ -1405,32 +1533,32 @@ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); PyTypeObject *cls = (PyTypeObject *)class; int method_found = 0; - res2 = _PySuper_Lookup(cls, self, name, - cls->tp_getattro == PyObject_GenericGetAttr ? &method_found : NULL); + attr = _PySuper_Lookup(cls, self, name, + Py_TYPE(self)->tp_getattro == PyObject_GenericGetAttr ? &method_found : NULL); Py_DECREF(global_super); Py_DECREF(class); - if (res2 == NULL) { + if (attr == NULL) { Py_DECREF(self); if (true) goto pop_3_error; } if (method_found) { - res = self; // transfer ownership + self_or_null = self; // transfer ownership } else { Py_DECREF(self); - res = res2; - res2 = NULL; + self_or_null = NULL; } STACK_SHRINK(1); - stack_pointer[-1] = res; - stack_pointer[-2] = res2; + stack_pointer[-2] = attr; + stack_pointer[-1] = self_or_null; break; } case LOAD_ATTR: { static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); - PyObject *owner = stack_pointer[-1]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *owner; + PyObject *attr; + PyObject *self_or_null = NULL; + owner = stack_pointer[-1]; #if ENABLE_SPECIALIZATION _PyAttrCache *cache = (_PyAttrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1445,16 +1573,15 @@ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); if (oparg & 1) { /* Designed to work in tandem with CALL, pushes two values. */ - PyObject* meth = NULL; - if (_PyObject_GetMethod(owner, name, &meth)) { + attr = NULL; + if (_PyObject_GetMethod(owner, name, &attr)) { /* We can bypass temporary bound method object. meth is unbound method and obj is self. meth | self | arg1 | ... | argN */ - assert(meth != NULL); // No errors on this branch - res2 = meth; - res = owner; // Transfer ownership + assert(attr != NULL); // No errors on this branch + self_or_null = owner; // Transfer ownership } else { /* meth is not an unbound method (but a regular attr, or @@ -1465,25 +1592,25 @@ NULL | meth | arg1 | ... | argN */ Py_DECREF(owner); - if (meth == NULL) goto pop_1_error; - res2 = NULL; - res = meth; + if (attr == NULL) goto pop_1_error; + self_or_null = NULL; } } else { /* Classic, pushes one value. */ - res = PyObject_GetAttr(owner, name); + attr = PyObject_GetAttr(owner, name); Py_DECREF(owner); - if (res == NULL) goto pop_1_error; + if (attr == NULL) goto pop_1_error; } STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = self_or_null; } break; } case _GUARD_TYPE_VERSION: { - PyObject *owner = stack_pointer[-1]; + PyObject *owner; + owner = stack_pointer[-1]; uint32_t type_version = (uint32_t)operand; PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); @@ -1492,37 +1619,43 @@ } case _CHECK_MANAGED_OBJECT_HAS_VALUES: { - PyObject *owner = stack_pointer[-1]; + PyObject *owner; + owner = stack_pointer[-1]; assert(Py_TYPE(owner)->tp_dictoffset < 0); assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); + PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && + !_PyObject_MakeInstanceAttributesFromDict(owner, dorv), + LOAD_ATTR); break; } case _LOAD_ATTR_INSTANCE_VALUE: { - PyObject *owner = stack_pointer[-1]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *owner; + PyObject *attr; + PyObject *null = NULL; + owner = stack_pointer[-1]; uint16_t index = (uint16_t)operand; PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - res = _PyDictOrValues_GetValues(dorv)->values[index]; - DEOPT_IF(res == NULL, LOAD_ATTR); + attr = _PyDictOrValues_GetValues(dorv)->values[index]; + DEOPT_IF(attr == NULL, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - res2 = NULL; + Py_INCREF(attr); + null = NULL; Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } break; } case COMPARE_OP: { static_assert(INLINE_CACHE_ENTRIES_COMPARE_OP == 1, "incorrect cache size"); - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; #if ENABLE_SPECIALIZATION _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1550,9 +1683,11 @@ } case COMPARE_OP_FLOAT: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); STAT_INC(COMPARE_OP, hit); @@ -1570,9 +1705,11 @@ } case COMPARE_OP_INT: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left), COMPARE_OP); @@ -1594,9 +1731,11 @@ } case COMPARE_OP_STR: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); STAT_INC(COMPARE_OP, hit); @@ -1615,9 +1754,11 @@ } case IS_OP: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *b; + right = stack_pointer[-1]; + left = stack_pointer[-2]; int res = Py_Is(left, right) ^ oparg; Py_DECREF(left); Py_DECREF(right); @@ -1628,9 +1769,11 @@ } case CONTAINS_OP: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *b; + right = stack_pointer[-1]; + left = stack_pointer[-2]; int res = PySequence_Contains(right, left); Py_DECREF(left); Py_DECREF(right); @@ -1642,11 +1785,13 @@ } case CHECK_EG_MATCH: { - PyObject *match_type = stack_pointer[-1]; - PyObject *exc_value = stack_pointer[-2]; + PyObject *match_type; + PyObject *exc_value; PyObject *rest; PyObject *match; - if (check_except_star_type_valid(tstate, match_type) < 0) { + match_type = stack_pointer[-1]; + exc_value = stack_pointer[-2]; + if (_PyEval_CheckExceptStarTypeValid(tstate, match_type) < 0) { Py_DECREF(exc_value); Py_DECREF(match_type); if (true) goto pop_2_error; @@ -1654,8 +1799,8 @@ match = NULL; rest = NULL; - int res = exception_group_match(exc_value, match_type, - &match, &rest); + int res = _PyEval_ExceptionGroupMatch(exc_value, match_type, + &match, &rest); Py_DECREF(exc_value); Py_DECREF(match_type); if (res < 0) goto pop_2_error; @@ -1666,17 +1811,19 @@ if (!Py_IsNone(match)) { PyErr_SetHandledException(match); } - stack_pointer[-1] = match; stack_pointer[-2] = rest; + stack_pointer[-1] = match; break; } case CHECK_EXC_MATCH: { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *b; + right = stack_pointer[-1]; + left = stack_pointer[-2]; assert(PyExceptionInstance_Check(left)); - if (check_except_type_valid(tstate, right) < 0) { + if (_PyEval_CheckExceptTypeValid(tstate, right) < 0) { Py_DECREF(right); if (true) goto pop_1_error; } @@ -1689,8 +1836,9 @@ } case IS_NONE: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *b; + value = stack_pointer[-1]; if (Py_IsNone(value)) { b = Py_True; } @@ -1703,8 +1851,9 @@ } case GET_LEN: { - PyObject *obj = stack_pointer[-1]; + PyObject *obj; PyObject *len_o; + obj = stack_pointer[-1]; // PUSH(len(TOS)) Py_ssize_t len_i = PyObject_Length(obj); if (len_i < 0) goto error; @@ -1716,14 +1865,17 @@ } case MATCH_CLASS: { - PyObject *names = stack_pointer[-1]; - PyObject *type = stack_pointer[-2]; - PyObject *subject = stack_pointer[-3]; + PyObject *names; + PyObject *type; + PyObject *subject; PyObject *attrs; + names = stack_pointer[-1]; + type = stack_pointer[-2]; + subject = stack_pointer[-3]; // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or // None on failure. assert(PyTuple_CheckExact(names)); - attrs = match_class(tstate, subject, type, oparg, names); + attrs = _PyEval_MatchClass(tstate, subject, type, oparg, names); Py_DECREF(subject); Py_DECREF(type); Py_DECREF(names); @@ -1740,8 +1892,9 @@ } case MATCH_MAPPING: { - PyObject *subject = stack_pointer[-1]; + PyObject *subject; PyObject *res; + subject = stack_pointer[-1]; int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; res = match ? Py_True : Py_False; STACK_GROW(1); @@ -1750,8 +1903,9 @@ } case MATCH_SEQUENCE: { - PyObject *subject = stack_pointer[-1]; + PyObject *subject; PyObject *res; + subject = stack_pointer[-1]; int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; res = match ? Py_True : Py_False; STACK_GROW(1); @@ -1760,11 +1914,13 @@ } case MATCH_KEYS: { - PyObject *keys = stack_pointer[-1]; - PyObject *subject = stack_pointer[-2]; + PyObject *keys; + PyObject *subject; PyObject *values_or_none; + keys = stack_pointer[-1]; + subject = stack_pointer[-2]; // On successful match, PUSH(values). Otherwise, PUSH(None). - values_or_none = match_keys(tstate, subject, keys); + values_or_none = _PyEval_MatchKeys(tstate, subject, keys); if (values_or_none == NULL) goto error; STACK_GROW(1); stack_pointer[-1] = values_or_none; @@ -1772,8 +1928,9 @@ } case GET_ITER: { - PyObject *iterable = stack_pointer[-1]; + PyObject *iterable; PyObject *iter; + iterable = stack_pointer[-1]; /* before: [obj]; after [getiter(obj)] */ iter = PyObject_GetIter(iterable); Py_DECREF(iterable); @@ -1783,8 +1940,9 @@ } case GET_YIELD_FROM_ITER: { - PyObject *iterable = stack_pointer[-1]; + PyObject *iterable; PyObject *iter; + iterable = stack_pointer[-1]; /* before: [obj]; after [getiter(obj)] */ if (PyCoro_CheckExact(iterable)) { /* `iterable` is a coroutine */ @@ -1814,14 +1972,16 @@ } case _ITER_CHECK_LIST: { - PyObject *iter = stack_pointer[-1]; + PyObject *iter; + iter = stack_pointer[-1]; DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER); break; } case _IS_ITER_EXHAUSTED_LIST: { - PyObject *iter = stack_pointer[-1]; + PyObject *iter; PyObject *exhausted; + iter = stack_pointer[-1]; _PyListIterObject *it = (_PyListIterObject *)iter; assert(Py_TYPE(iter) == &PyListIter_Type); PyListObject *seq = it->it_seq; @@ -1842,8 +2002,9 @@ } case _ITER_NEXT_LIST: { - PyObject *iter = stack_pointer[-1]; + PyObject *iter; PyObject *next; + iter = stack_pointer[-1]; _PyListIterObject *it = (_PyListIterObject *)iter; assert(Py_TYPE(iter) == &PyListIter_Type); PyListObject *seq = it->it_seq; @@ -1856,14 +2017,16 @@ } case _ITER_CHECK_TUPLE: { - PyObject *iter = stack_pointer[-1]; + PyObject *iter; + iter = stack_pointer[-1]; DEOPT_IF(Py_TYPE(iter) != &PyTupleIter_Type, FOR_ITER); break; } case _IS_ITER_EXHAUSTED_TUPLE: { - PyObject *iter = stack_pointer[-1]; + PyObject *iter; PyObject *exhausted; + iter = stack_pointer[-1]; _PyTupleIterObject *it = (_PyTupleIterObject *)iter; assert(Py_TYPE(iter) == &PyTupleIter_Type); PyTupleObject *seq = it->it_seq; @@ -1884,8 +2047,9 @@ } case _ITER_NEXT_TUPLE: { - PyObject *iter = stack_pointer[-1]; + PyObject *iter; PyObject *next; + iter = stack_pointer[-1]; _PyTupleIterObject *it = (_PyTupleIterObject *)iter; assert(Py_TYPE(iter) == &PyTupleIter_Type); PyTupleObject *seq = it->it_seq; @@ -1898,15 +2062,17 @@ } case _ITER_CHECK_RANGE: { - PyObject *iter = stack_pointer[-1]; + PyObject *iter; + iter = stack_pointer[-1]; _PyRangeIterObject *r = (_PyRangeIterObject *)iter; DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); break; } case _IS_ITER_EXHAUSTED_RANGE: { - PyObject *iter = stack_pointer[-1]; + PyObject *iter; PyObject *exhausted; + iter = stack_pointer[-1]; _PyRangeIterObject *r = (_PyRangeIterObject *)iter; assert(Py_TYPE(r) == &PyRangeIter_Type); exhausted = r->len <= 0 ? Py_True : Py_False; @@ -1916,8 +2082,9 @@ } case _ITER_NEXT_RANGE: { - PyObject *iter = stack_pointer[-1]; + PyObject *iter; PyObject *next; + iter = stack_pointer[-1]; _PyRangeIterObject *r = (_PyRangeIterObject *)iter; assert(Py_TYPE(r) == &PyRangeIter_Type); assert(r->len > 0); @@ -1932,10 +2099,13 @@ } case WITH_EXCEPT_START: { - PyObject *val = stack_pointer[-1]; - PyObject *lasti = stack_pointer[-3]; - PyObject *exit_func = stack_pointer[-4]; + PyObject *val; + PyObject *lasti; + PyObject *exit_func; PyObject *res; + val = stack_pointer[-1]; + lasti = stack_pointer[-3]; + exit_func = stack_pointer[-4]; /* At the top of the stack are 4 values: - val: TOP = exc_info() - unused: SECOND = previous exception @@ -1949,7 +2119,12 @@ assert(val && PyExceptionInstance_Check(val)); exc = PyExceptionInstance_Class(val); tb = PyException_GetTraceback(val); - Py_XDECREF(tb); + if (tb == NULL) { + tb = Py_None; + } + else { + Py_DECREF(tb); + } assert(PyLong_Check(lasti)); (void)lasti; // Shut up compiler warning if asserts are off PyObject *stack[4] = {NULL, exc, val, tb}; @@ -1962,8 +2137,9 @@ } case PUSH_EXC_INFO: { - PyObject *new_exc = stack_pointer[-1]; + PyObject *new_exc; PyObject *prev_exc; + new_exc = stack_pointer[-1]; _PyErr_StackItem *exc_info = tstate->exc_info; if (exc_info->exc_value != NULL) { prev_exc = exc_info->exc_value; @@ -1974,16 +2150,19 @@ assert(PyExceptionInstance_Check(new_exc)); exc_info->exc_value = Py_NewRef(new_exc); STACK_GROW(1); - stack_pointer[-1] = new_exc; stack_pointer[-2] = prev_exc; + stack_pointer[-1] = new_exc; break; } case CALL_NO_KW_TYPE_1: { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *null = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -2000,10 +2179,13 @@ } case CALL_NO_KW_STR_1: { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *null = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -2017,14 +2199,18 @@ STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; + CHECK_EVAL_BREAKER(); break; } case CALL_NO_KW_TUPLE_1: { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *null = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -2038,11 +2224,13 @@ STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; + CHECK_EVAL_BREAKER(); break; } case EXIT_INIT_CHECK: { - PyObject *should_be_none = stack_pointer[-1]; + PyObject *should_be_none; + should_be_none = stack_pointer[-1]; assert(STACK_LEVEL() == 2); if (should_be_none != Py_None) { PyErr_Format(PyExc_TypeError, @@ -2055,16 +2243,17 @@ } case CALL_NO_KW_BUILTIN_O: { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; /* Builtin METH_O functions */ ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -2089,20 +2278,22 @@ STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; + CHECK_EVAL_BREAKER(); break; } case CALL_NO_KW_BUILTIN_FAST: { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; /* Builtin METH_FASTCALL functions, without keywords */ ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -2131,25 +2322,27 @@ STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; + CHECK_EVAL_BREAKER(); break; } case CALL_NO_KW_LEN: { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); /* len(o) */ - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } DEOPT_IF(total_args != 1, CALL); - PyInterpreterState *interp = _PyInterpreterState_GET(); + PyInterpreterState *interp = tstate->interp; DEOPT_IF(callable != interp->callable_cache.len, CALL); STAT_INC(CALL, hit); PyObject *arg = args[0]; @@ -2170,21 +2363,22 @@ } case CALL_NO_KW_ISINSTANCE: { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); /* isinstance(o, o2) */ - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } DEOPT_IF(total_args != 2, CALL); - PyInterpreterState *interp = _PyInterpreterState_GET(); + PyInterpreterState *interp = tstate->interp; DEOPT_IF(callable != interp->callable_cache.isinstance, CALL); STAT_INC(CALL, hit); PyObject *cls = args[1]; @@ -2207,25 +2401,27 @@ } case CALL_NO_KW_METHOD_DESCRIPTOR_O: { - PyObject **args = (stack_pointer - oparg); - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { + if (self_or_null != NULL) { args--; total_args++; } - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); + PyMethodDescrObject *method = (PyMethodDescrObject *)callable; DEOPT_IF(total_args != 2, CALL); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; + DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = method->d_method; DEOPT_IF(meth->ml_flags != METH_O, CALL); PyObject *arg = args[1]; PyObject *self = args[0]; - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); STAT_INC(CALL, hit); PyCFunction cfunc = meth->ml_meth; // This is slower but CPython promises to check all non-vectorcall @@ -2243,27 +2439,31 @@ STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; + CHECK_EVAL_BREAKER(); break; } case CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS: { - PyObject **args = (stack_pointer - oparg); - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); assert(oparg == 0 || oparg == 1); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { + if (self_or_null != NULL) { args--; total_args++; } DEOPT_IF(total_args != 1, CALL); - PyMethodDescrObject *callable = (PyMethodDescrObject *)SECOND(); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; + PyMethodDescrObject *method = (PyMethodDescrObject *)callable; + DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = method->d_method; PyObject *self = args[0]; - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); DEOPT_IF(meth->ml_flags != METH_NOARGS, CALL); STAT_INC(CALL, hit); PyCFunction cfunc = meth->ml_meth; @@ -2281,28 +2481,31 @@ STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; + CHECK_EVAL_BREAKER(); break; } case CALL_NO_KW_METHOD_DESCRIPTOR_FAST: { - PyObject **args = (stack_pointer - oparg); - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { + if (self_or_null != NULL) { args--; total_args++; } - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); + PyMethodDescrObject *method = (PyMethodDescrObject *)callable; /* Builtin METH_FASTCALL methods, without keywords */ - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; + DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = method->d_method; DEOPT_IF(meth->ml_flags != METH_FASTCALL, CALL); PyObject *self = args[0]; - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); STAT_INC(CALL, hit); _PyCFunctionFast cfunc = (_PyCFunctionFast)(void(*)(void))meth->ml_meth; @@ -2318,12 +2521,14 @@ STACK_SHRINK(oparg); STACK_SHRINK(1); stack_pointer[-1] = res; + CHECK_EVAL_BREAKER(); break; } case MAKE_FUNCTION: { - PyObject *codeobj = stack_pointer[-1]; + PyObject *codeobj; PyObject *func; + codeobj = stack_pointer[-1]; PyFunctionObject *func_obj = (PyFunctionObject *) PyFunction_New(codeobj, GLOBALS()); @@ -2340,8 +2545,10 @@ } case SET_FUNCTION_ATTRIBUTE: { - PyObject *func = stack_pointer[-1]; - PyObject *attr = stack_pointer[-2]; + PyObject *func; + PyObject *attr; + func = stack_pointer[-1]; + attr = stack_pointer[-2]; assert(PyFunction_Check(func)); PyFunctionObject *func_obj = (PyFunctionObject *)func; switch(oparg) { @@ -2372,10 +2579,13 @@ } case BUILD_SLICE: { - PyObject *step = (oparg == 3) ? stack_pointer[-(((oparg == 3) ? 1 : 0))] : NULL; - PyObject *stop = stack_pointer[-(1 + ((oparg == 3) ? 1 : 0))]; - PyObject *start = stack_pointer[-(2 + ((oparg == 3) ? 1 : 0))]; + PyObject *step = NULL; + PyObject *stop; + PyObject *start; PyObject *slice; + if (oparg == 3) { step = stack_pointer[-(oparg == 3 ? 1 : 0)]; } + stop = stack_pointer[-1 - (oparg == 3 ? 1 : 0)]; + start = stack_pointer[-2 - (oparg == 3 ? 1 : 0)]; slice = PySlice_New(start, stop, step); Py_DECREF(start); Py_DECREF(stop); @@ -2388,8 +2598,9 @@ } case CONVERT_VALUE: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *result; + value = stack_pointer[-1]; convertion_func_ptr conv_fn; assert(oparg >= FVC_STR && oparg <= FVC_ASCII); conv_fn = CONVERSION_FUNCTIONS[oparg]; @@ -2401,8 +2612,9 @@ } case FORMAT_SIMPLE: { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; /* If value is a unicode object, then we know the result * of format(value) is value itself. */ if (!PyUnicode_CheckExact(value)) { @@ -2418,9 +2630,11 @@ } case FORMAT_WITH_SPEC: { - PyObject *fmt_spec = stack_pointer[-1]; - PyObject *value = stack_pointer[-2]; + PyObject *fmt_spec; + PyObject *value; PyObject *res; + fmt_spec = stack_pointer[-1]; + value = stack_pointer[-2]; res = PyObject_Format(value, fmt_spec); Py_DECREF(value); Py_DECREF(fmt_spec); @@ -2431,8 +2645,9 @@ } case COPY: { - PyObject *bottom = stack_pointer[-(1 + (oparg-1))]; + PyObject *bottom; PyObject *top; + bottom = stack_pointer[-1 - (oparg-1)]; assert(oparg > 0); top = Py_NewRef(bottom); STACK_GROW(1); @@ -2442,9 +2657,11 @@ case BINARY_OP: { static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); - PyObject *rhs = stack_pointer[-1]; - PyObject *lhs = stack_pointer[-2]; + PyObject *rhs; + PyObject *lhs; PyObject *res; + rhs = stack_pointer[-1]; + lhs = stack_pointer[-2]; #if ENABLE_SPECIALIZATION _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -2455,10 +2672,10 @@ STAT_INC(BINARY_OP, deferred); DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ - assert(0 <= oparg); - assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); - assert(binary_ops[oparg]); - res = binary_ops[oparg](lhs, rhs); + assert(NB_ADD <= oparg); + assert(oparg <= NB_INPLACE_XOR); + assert(_PyEval_BinaryOps[oparg]); + res = _PyEval_BinaryOps[oparg](lhs, rhs); Py_DECREF(lhs); Py_DECREF(rhs); if (res == NULL) goto pop_2_error; @@ -2468,16 +2685,19 @@ } case SWAP: { - PyObject *top = stack_pointer[-1]; - PyObject *bottom = stack_pointer[-(2 + (oparg-2))]; + PyObject *top; + PyObject *bottom; + top = stack_pointer[-1]; + bottom = stack_pointer[-2 - (oparg-2)]; assert(oparg >= 2); + stack_pointer[-2 - (oparg-2)] = top; stack_pointer[-1] = bottom; - stack_pointer[-(2 + (oparg-2))] = top; break; } case _POP_JUMP_IF_FALSE: { - PyObject *flag = stack_pointer[-1]; + PyObject *flag; + flag = stack_pointer[-1]; if (Py_IsFalse(flag)) { pc = oparg; } @@ -2486,7 +2706,8 @@ } case _POP_JUMP_IF_TRUE: { - PyObject *flag = stack_pointer[-1]; + PyObject *flag; + flag = stack_pointer[-1]; if (Py_IsTrue(flag)) { pc = oparg; } @@ -2496,6 +2717,7 @@ case JUMP_TO_TOP: { pc = 0; + CHECK_EVAL_BREAKER(); break; } diff --git a/Python/fileutils.c b/Python/fileutils.c index f262c3e095c9ba..19b23f6bd18b30 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -105,7 +105,7 @@ _Py_device_encoding(int fd) #else if (_PyRuntime.preconfig.utf8_mode) { _Py_DECLARE_STR(utf_8, "utf-8"); - return Py_NewRef(&_Py_STR(utf_8)); + return &_Py_STR(utf_8); } return _Py_GetLocaleEncodingObject(); #endif diff --git a/Python/flowgraph.c b/Python/flowgraph.c index e485ed103147a1..9d7865661a8036 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -24,15 +24,75 @@ typedef _PyCompilerSrcLocation location; typedef _PyCfgJumpTargetLabel jump_target_label; -typedef _PyCfgBasicblock basicblock; -typedef _PyCfgBuilder cfg_builder; -typedef _PyCfgInstruction cfg_instr; + +typedef struct _PyCfgInstruction { + int i_opcode; + int i_oparg; + _PyCompilerSrcLocation i_loc; + struct _PyCfgBasicblock *i_target; /* target block (if jump instruction) */ + struct _PyCfgBasicblock *i_except; /* target block when exception is raised */ +} cfg_instr; + +typedef struct _PyCfgBasicblock { + /* Each basicblock in a compilation unit is linked via b_list in the + reverse order that the block are allocated. b_list points to the next + block in this list, not to be confused with b_next, which is next by + control flow. */ + struct _PyCfgBasicblock *b_list; + /* The label of this block if it is a jump target, -1 otherwise */ + _PyCfgJumpTargetLabel b_label; + /* Exception stack at start of block, used by assembler to create the exception handling table */ + struct _PyCfgExceptStack *b_exceptstack; + /* pointer to an array of instructions, initially NULL */ + cfg_instr *b_instr; + /* If b_next is non-NULL, it is a pointer to the next + block reached by normal control flow. */ + struct _PyCfgBasicblock *b_next; + /* number of instructions used */ + int b_iused; + /* length of instruction array (b_instr) */ + int b_ialloc; + /* Used by add_checks_for_loads_of_unknown_variables */ + uint64_t b_unsafe_locals_mask; + /* Number of predecessors that a block has. */ + int b_predecessors; + /* depth of stack upon entry of block, computed by stackdepth() */ + int b_startdepth; + /* Basic block is an exception handler that preserves lasti */ + unsigned b_preserve_lasti : 1; + /* Used by compiler passes to mark whether they have visited a basic block. */ + unsigned b_visited : 1; + /* b_except_handler is used by the cold-detection algorithm to mark exception targets */ + unsigned b_except_handler : 1; + /* b_cold is true if this block is not perf critical (like an exception handler) */ + unsigned b_cold : 1; + /* b_warm is used by the cold-detection algorithm to mark blocks which are definitely not cold */ + unsigned b_warm : 1; +} basicblock; + + +struct _PyCfgBuilder { + /* The entryblock, at which control flow begins. All blocks of the + CFG are reachable through the b_next links */ + struct _PyCfgBasicblock *g_entryblock; + /* Pointer to the most recently allocated block. By following + b_list links, you can reach all allocated blocks. */ + struct _PyCfgBasicblock *g_block_list; + /* pointer to the block currently being constructed */ + struct _PyCfgBasicblock *g_curblock; + /* label for the next instruction to be placed */ + _PyCfgJumpTargetLabel g_current_label; +}; + +typedef struct _PyCfgBuilder cfg_builder; static const jump_target_label NO_LABEL = {-1}; #define SAME_LABEL(L1, L2) ((L1).id == (L2).id) #define IS_LABEL(L) (!SAME_LABEL((L), (NO_LABEL))) +#define LOCATION(LNO, END_LNO, COL, END_COL) \ + ((const _PyCompilerSrcLocation){(LNO), (END_LNO), (COL), (END_COL)}) static inline int is_block_push(cfg_instr *i) @@ -50,7 +110,7 @@ is_jump(cfg_instr *i) #define INSTR_SET_OP1(I, OP, ARG) \ do { \ assert(OPCODE_HAS_ARG(OP)); \ - _PyCfgInstruction *_instr__ptr_ = (I); \ + cfg_instr *_instr__ptr_ = (I); \ _instr__ptr_->i_opcode = (OP); \ _instr__ptr_->i_oparg = (ARG); \ } while (0); @@ -59,7 +119,7 @@ is_jump(cfg_instr *i) #define INSTR_SET_OP0(I, OP) \ do { \ assert(!OPCODE_HAS_ARG(OP)); \ - _PyCfgInstruction *_instr__ptr_ = (I); \ + cfg_instr *_instr__ptr_ = (I); \ _instr__ptr_->i_opcode = (OP); \ _instr__ptr_->i_oparg = 0; \ } while (0); @@ -137,6 +197,27 @@ basicblock_append_instructions(basicblock *target, basicblock *source) return SUCCESS; } +static cfg_instr * +basicblock_last_instr(const basicblock *b) { + assert(b->b_iused >= 0); + if (b->b_iused > 0) { + assert(b->b_instr != NULL); + return &b->b_instr[b->b_iused - 1]; + } + return NULL; +} + +static inline int +basicblock_nofallthrough(const basicblock *b) { + cfg_instr *last = basicblock_last_instr(b); + return (last && + (IS_SCOPE_EXIT_OPCODE(last->i_opcode) || + IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode))); +} + +#define BB_NO_FALLTHROUGH(B) (basicblock_nofallthrough(B)) +#define BB_HAS_FALLTHROUGH(B) (!basicblock_nofallthrough(B)) + static basicblock * copy_basicblock(cfg_builder *g, basicblock *block) { @@ -154,8 +235,8 @@ copy_basicblock(cfg_builder *g, basicblock *block) return result; } -int -_PyBasicblock_InsertInstruction(basicblock *block, int pos, cfg_instr *instr) { +static int +basicblock_insert_instruction(basicblock *block, int pos, cfg_instr *instr) { RETURN_IF_ERROR(basicblock_next_instr(block)); for (int i = block->b_iused - 1; i > pos; i--) { block->b_instr[i] = block->b_instr[i-1]; @@ -186,7 +267,7 @@ dump_instr(cfg_instr *i) static inline int basicblock_returns(const basicblock *b) { - cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + cfg_instr *last = basicblock_last_instr(b); return last && (last->i_opcode == RETURN_VALUE || last->i_opcode == RETURN_CONST); } @@ -228,26 +309,16 @@ cfg_builder_use_next_block(cfg_builder *g, basicblock *block) return block; } -cfg_instr * -_PyCfg_BasicblockLastInstr(const basicblock *b) { - assert(b->b_iused >= 0); - if (b->b_iused > 0) { - assert(b->b_instr != NULL); - return &b->b_instr[b->b_iused - 1]; - } - return NULL; -} - static inline int basicblock_exits_scope(const basicblock *b) { - cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + cfg_instr *last = basicblock_last_instr(b); return last && IS_SCOPE_EXIT_OPCODE(last->i_opcode); } static bool cfg_builder_current_block_is_terminated(cfg_builder *g) { - cfg_instr *last = _PyCfg_BasicblockLastInstr(g->g_curblock); + cfg_instr *last = basicblock_last_instr(g->g_curblock); if (last && IS_TERMINATOR_OPCODE(last->i_opcode)) { return true; } @@ -300,8 +371,8 @@ cfg_builder_check(cfg_builder *g) } #endif -int -_PyCfgBuilder_Init(cfg_builder *g) +static int +init_cfg_builder(cfg_builder *g) { g->g_block_list = NULL; basicblock *block = cfg_builder_new_block(g); @@ -313,9 +384,28 @@ _PyCfgBuilder_Init(cfg_builder *g) return SUCCESS; } +cfg_builder * +_PyCfgBuilder_New(void) +{ + cfg_builder *g = PyMem_Malloc(sizeof(cfg_builder)); + if (g == NULL) { + PyErr_NoMemory(); + return NULL; + } + memset(g, 0, sizeof(cfg_builder)); + if (init_cfg_builder(g) < 0) { + PyMem_Free(g); + return NULL; + } + return g; +} + void -_PyCfgBuilder_Fini(cfg_builder* g) +_PyCfgBuilder_Free(cfg_builder *g) { + if (g == NULL) { + return; + } assert(cfg_builder_check(g)); basicblock *b = g->g_block_list; while (b != NULL) { @@ -326,6 +416,21 @@ _PyCfgBuilder_Fini(cfg_builder* g) PyObject_Free((void *)b); b = next; } + PyMem_Free(g); +} + +int +_PyCfgBuilder_CheckSize(cfg_builder *g) +{ + int nblocks = 0; + for (basicblock *b = g->g_block_list; b != NULL; b = b->b_list) { + nblocks++; + } + if ((size_t)nblocks > SIZE_MAX / sizeof(basicblock *)) { + PyErr_NoMemory(); + return ERROR; + } + return SUCCESS; } int @@ -371,7 +476,7 @@ no_empty_basic_blocks(cfg_builder *g) { static bool no_redundant_jumps(cfg_builder *g) { for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + cfg_instr *last = basicblock_last_instr(b); if (last != NULL) { if (IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) { assert(last->i_target != b->b_next); @@ -390,7 +495,7 @@ no_redundant_jumps(cfg_builder *g) { static int normalize_jumps_in_block(cfg_builder *g, basicblock *b) { - cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + cfg_instr *last = basicblock_last_instr(b); if (last == NULL || !is_jump(last) || IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) { return SUCCESS; @@ -439,7 +544,7 @@ normalize_jumps_in_block(cfg_builder *g, basicblock *b) { static int -normalize_jumps(_PyCfgBuilder *g) +normalize_jumps(cfg_builder *g) { basicblock *entryblock = g->g_entryblock; for (basicblock *b = entryblock; b != NULL; b = b->b_next) { @@ -452,14 +557,6 @@ normalize_jumps(_PyCfgBuilder *g) return SUCCESS; } -int -_PyCfg_ResolveJumps(_PyCfgBuilder *g) -{ - RETURN_IF_ERROR(normalize_jumps(g)); - assert(no_redundant_jumps(g)); - return SUCCESS; -} - static int check_cfg(cfg_builder *g) { for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { @@ -518,9 +615,9 @@ translate_jump_labels_to_targets(basicblock *entryblock) } int -_PyCfg_JumpLabelsToTargets(basicblock *entryblock) +_PyCfg_JumpLabelsToTargets(cfg_builder *g) { - return translate_jump_labels_to_targets(entryblock); + return translate_jump_labels_to_targets(g->g_entryblock); } static int @@ -542,10 +639,14 @@ mark_except_handlers(basicblock *entryblock) { } -typedef _PyCfgExceptStack ExceptStack; +struct _PyCfgExceptStack { + basicblock *handlers[CO_MAXBLOCKS+1]; + int depth; +}; + static basicblock * -push_except_block(ExceptStack *stack, cfg_instr *setup) { +push_except_block(struct _PyCfgExceptStack *stack, cfg_instr *setup) { assert(is_block_push(setup)); int opcode = setup->i_opcode; basicblock * target = setup->i_target; @@ -557,19 +658,19 @@ push_except_block(ExceptStack *stack, cfg_instr *setup) { } static basicblock * -pop_except_block(ExceptStack *stack) { +pop_except_block(struct _PyCfgExceptStack *stack) { assert(stack->depth > 0); return stack->handlers[--stack->depth]; } static basicblock * -except_stack_top(ExceptStack *stack) { +except_stack_top(struct _PyCfgExceptStack *stack) { return stack->handlers[stack->depth]; } -static ExceptStack * +static struct _PyCfgExceptStack * make_except_stack(void) { - ExceptStack *new = PyMem_Malloc(sizeof(ExceptStack)); + struct _PyCfgExceptStack *new = PyMem_Malloc(sizeof(struct _PyCfgExceptStack)); if (new == NULL) { PyErr_NoMemory(); return NULL; @@ -579,14 +680,14 @@ make_except_stack(void) { return new; } -static ExceptStack * -copy_except_stack(ExceptStack *stack) { - ExceptStack *copy = PyMem_Malloc(sizeof(ExceptStack)); +static struct _PyCfgExceptStack * +copy_except_stack(struct _PyCfgExceptStack *stack) { + struct _PyCfgExceptStack *copy = PyMem_Malloc(sizeof(struct _PyCfgExceptStack)); if (copy == NULL) { PyErr_NoMemory(); return NULL; } - memcpy(copy, stack, sizeof(ExceptStack)); + memcpy(copy, stack, sizeof(struct _PyCfgExceptStack)); return copy; } @@ -604,23 +705,28 @@ make_cfg_traversal_stack(basicblock *entryblock) { return stack; } -Py_LOCAL_INLINE(void) +Py_LOCAL_INLINE(int) stackdepth_push(basicblock ***sp, basicblock *b, int depth) { - assert(b->b_startdepth < 0 || b->b_startdepth == depth); + if (!(b->b_startdepth < 0 || b->b_startdepth == depth)) { + PyErr_Format(PyExc_ValueError, "Invalid CFG, inconsistent stackdepth"); + return ERROR; + } if (b->b_startdepth < depth && b->b_startdepth < 100) { assert(b->b_startdepth < 0); b->b_startdepth = depth; *(*sp)++ = b; } + return SUCCESS; } /* Find the flow path that needs the largest stack. We assume that * cycles in the flow graph have no net effect on the stack depth. */ -int -_PyCfg_Stackdepth(basicblock *entryblock, int code_flags) +static int +calculate_stackdepth(cfg_builder *g) { + basicblock *entryblock = g->g_entryblock; for (basicblock *b = entryblock; b != NULL; b = b->b_next) { b->b_startdepth = INT_MIN; } @@ -629,14 +735,13 @@ _PyCfg_Stackdepth(basicblock *entryblock, int code_flags) return ERROR; } + + int stackdepth = -1; int maxdepth = 0; basicblock **sp = stack; - if (code_flags & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) { - stackdepth_push(&sp, entryblock, 1); - } else { - stackdepth_push(&sp, entryblock, 0); + if (stackdepth_push(&sp, entryblock, 0) < 0) { + goto error; } - while (sp != stack) { basicblock *b = *--sp; int depth = b->b_startdepth; @@ -644,27 +749,40 @@ _PyCfg_Stackdepth(basicblock *entryblock, int code_flags) basicblock *next = b->b_next; for (int i = 0; i < b->b_iused; i++) { cfg_instr *instr = &b->b_instr[i]; - int effect = PyCompile_OpcodeStackEffectWithJump(instr->i_opcode, instr->i_oparg, 0); + int effect = PyCompile_OpcodeStackEffectWithJump( + instr->i_opcode, instr->i_oparg, 0); if (effect == PY_INVALID_STACK_EFFECT) { PyErr_Format(PyExc_SystemError, - "compiler PyCompile_OpcodeStackEffectWithJump(opcode=%d, arg=%i) failed", + "Invalid stack effect for opcode=%d, arg=%i", instr->i_opcode, instr->i_oparg); - return ERROR; + goto error; } int new_depth = depth + effect; - assert(new_depth >= 0); /* invalid code or bug in stackdepth() */ + if (new_depth < 0) { + PyErr_Format(PyExc_ValueError, + "Invalid CFG, stack underflow"); + goto error; + } if (new_depth > maxdepth) { maxdepth = new_depth; } if (HAS_TARGET(instr->i_opcode)) { - effect = PyCompile_OpcodeStackEffectWithJump(instr->i_opcode, instr->i_oparg, 1); - assert(effect != PY_INVALID_STACK_EFFECT); + effect = PyCompile_OpcodeStackEffectWithJump( + instr->i_opcode, instr->i_oparg, 1); + if (effect == PY_INVALID_STACK_EFFECT) { + PyErr_Format(PyExc_SystemError, + "Invalid stack effect for opcode=%d, arg=%i", + instr->i_opcode, instr->i_oparg); + goto error; + } int target_depth = depth + effect; assert(target_depth >= 0); /* invalid code or bug in stackdepth() */ if (target_depth > maxdepth) { maxdepth = target_depth; } - stackdepth_push(&sp, instr->i_target, target_depth); + if (stackdepth_push(&sp, instr->i_target, target_depth) < 0) { + goto error; + } } depth = new_depth; assert(!IS_ASSEMBLER_OPCODE(instr->i_opcode)); @@ -678,11 +796,15 @@ _PyCfg_Stackdepth(basicblock *entryblock, int code_flags) } if (next != NULL) { assert(BB_HAS_FALLTHROUGH(b)); - stackdepth_push(&sp, next, depth); + if (stackdepth_push(&sp, next, depth) < 0) { + goto error; + } } } + stackdepth = maxdepth; +error: PyMem_Free(stack); - return maxdepth; + return stackdepth; } static int @@ -691,7 +813,7 @@ label_exception_targets(basicblock *entryblock) { if (todo_stack == NULL) { return ERROR; } - ExceptStack *except_stack = make_except_stack(); + struct _PyCfgExceptStack *except_stack = make_except_stack(); if (except_stack == NULL) { PyMem_Free(todo_stack); PyErr_NoMemory(); @@ -715,7 +837,7 @@ label_exception_targets(basicblock *entryblock) { cfg_instr *instr = &b->b_instr[i]; if (is_block_push(instr)) { if (!instr->i_target->b_visited) { - ExceptStack *copy = copy_except_stack(except_stack); + struct _PyCfgExceptStack *copy = copy_except_stack(except_stack); if (copy == NULL) { goto error; } @@ -734,7 +856,7 @@ label_exception_targets(basicblock *entryblock) { assert(i == b->b_iused -1); if (!instr->i_target->b_visited) { if (BB_HAS_FALLTHROUGH(b)) { - ExceptStack *copy = copy_except_stack(except_stack); + struct _PyCfgExceptStack *copy = copy_except_stack(except_stack); if (copy == NULL) { goto error; } @@ -953,7 +1075,7 @@ remove_redundant_jumps(cfg_builder *g) { */ assert(no_empty_basic_blocks(g)); for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { - cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + cfg_instr *last = basicblock_last_instr(b); assert(last != NULL); assert(!IS_ASSEMBLER_OPCODE(last->i_opcode)); if (IS_UNCONDITIONAL_JUMP_OPCODE(last->i_opcode)) { @@ -979,7 +1101,7 @@ remove_redundant_jumps(cfg_builder *g) { */ static int inline_small_exit_blocks(basicblock *bb) { - cfg_instr *last = _PyCfg_BasicblockLastInstr(bb); + cfg_instr *last = basicblock_last_instr(bb); if (last == NULL) { return 0; } @@ -1504,10 +1626,10 @@ optimize_basic_block(PyObject *const_cache, basicblock *bb, PyObject *consts) break; case KW_NAMES: break; - case PUSH_NULL: - if (nextop == LOAD_GLOBAL && (inst[1].i_opcode & 1) == 0) { - INSTR_SET_OP0(inst, NOP); - inst[1].i_oparg |= 1; + case LOAD_GLOBAL: + if (nextop == PUSH_NULL && (oparg & 1) == 0) { + INSTR_SET_OP1(inst, LOAD_GLOBAL, oparg | 1); + INSTR_SET_OP0(&bb->b_instr[i + 1], NOP); } break; case COMPARE_OP: @@ -1715,7 +1837,7 @@ scan_block_for_locals(basicblock *b, basicblock ***sp) if (b->b_next && BB_HAS_FALLTHROUGH(b)) { maybe_push(b->b_next, unsafe_mask, sp); } - cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + cfg_instr *last = basicblock_last_instr(b); if (last && is_jump(last)) { assert(last->i_target != NULL); maybe_push(last->i_target, unsafe_mask, sp); @@ -1998,7 +2120,7 @@ mark_cold(basicblock *entryblock) { static int -push_cold_blocks_to_end(cfg_builder *g, int code_flags) { +push_cold_blocks_to_end(cfg_builder *g) { basicblock *entryblock = g->g_entryblock; if (entryblock->b_next == NULL) { /* single basicblock, no need to reorder */ @@ -2020,7 +2142,7 @@ push_cold_blocks_to_end(cfg_builder *g, int code_flags) { b->b_next = explicit_jump; /* set target */ - cfg_instr *last = _PyCfg_BasicblockLastInstr(explicit_jump); + cfg_instr *last = basicblock_last_instr(explicit_jump); last->i_target = explicit_jump->b_next; } } @@ -2071,8 +2193,8 @@ push_cold_blocks_to_end(cfg_builder *g, int code_flags) { return SUCCESS; } -void -_PyCfg_ConvertPseudoOps(basicblock *entryblock) +static void +convert_pseudo_ops(basicblock *entryblock) { for (basicblock *b = entryblock; b != NULL; b = b->b_next) { for (int i = 0; i < b->b_iused; i++) { @@ -2126,7 +2248,7 @@ duplicate_exits_without_lineno(cfg_builder *g) */ basicblock *entryblock = g->g_entryblock; for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + cfg_instr *last = basicblock_last_instr(b); assert(last != NULL); if (is_jump(last)) { basicblock *target = last->i_target; @@ -2150,7 +2272,7 @@ duplicate_exits_without_lineno(cfg_builder *g) for (basicblock *b = entryblock; b != NULL; b = b->b_next) { if (BB_HAS_FALLTHROUGH(b) && b->b_next && b->b_iused > 0) { if (is_exit_without_lineno(b->b_next)) { - cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + cfg_instr *last = basicblock_last_instr(b); assert(last != NULL); b->b_next->b_instr[0].i_loc = last->i_loc; } @@ -2170,7 +2292,7 @@ duplicate_exits_without_lineno(cfg_builder *g) static void propagate_line_numbers(basicblock *entryblock) { for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + cfg_instr *last = basicblock_last_instr(b); if (last == NULL) { continue; } @@ -2210,7 +2332,7 @@ guarantee_lineno_for_exits(basicblock *entryblock, int firstlineno) { int lineno = firstlineno; assert(lineno > 0); for (basicblock *b = entryblock; b != NULL; b = b->b_next) { - cfg_instr *last = _PyCfg_BasicblockLastInstr(b); + cfg_instr *last = basicblock_last_instr(b); if (last == NULL) { continue; } @@ -2240,7 +2362,7 @@ resolve_line_numbers(cfg_builder *g, int firstlineno) int _PyCfg_OptimizeCodeUnit(cfg_builder *g, PyObject *consts, PyObject *const_cache, - int code_flags, int nlocals, int nparams, int firstlineno) + int nlocals, int nparams, int firstlineno) { assert(cfg_builder_check(g)); /** Preprocessing **/ @@ -2257,7 +2379,274 @@ _PyCfg_OptimizeCodeUnit(cfg_builder *g, PyObject *consts, PyObject *const_cache, g->g_entryblock, nlocals, nparams)); insert_superinstructions(g); - RETURN_IF_ERROR(push_cold_blocks_to_end(g, code_flags)); + RETURN_IF_ERROR(push_cold_blocks_to_end(g)); RETURN_IF_ERROR(resolve_line_numbers(g, firstlineno)); return SUCCESS; } + +static int * +build_cellfixedoffsets(_PyCompile_CodeUnitMetadata *umd) +{ + int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); + int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); + int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); + + int noffsets = ncellvars + nfreevars; + int *fixed = PyMem_New(int, noffsets); + if (fixed == NULL) { + PyErr_NoMemory(); + return NULL; + } + for (int i = 0; i < noffsets; i++) { + fixed[i] = nlocals + i; + } + + PyObject *varname, *cellindex; + Py_ssize_t pos = 0; + while (PyDict_Next(umd->u_cellvars, &pos, &varname, &cellindex)) { + PyObject *varindex = PyDict_GetItem(umd->u_varnames, varname); + if (varindex != NULL) { + assert(PyLong_AS_LONG(cellindex) < INT_MAX); + assert(PyLong_AS_LONG(varindex) < INT_MAX); + int oldindex = (int)PyLong_AS_LONG(cellindex); + int argoffset = (int)PyLong_AS_LONG(varindex); + fixed[oldindex] = argoffset; + } + } + + return fixed; +} + +#define IS_GENERATOR(CF) \ + ((CF) & (CO_GENERATOR | CO_COROUTINE | CO_ASYNC_GENERATOR)) + +static int +insert_prefix_instructions(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock, + int *fixed, int nfreevars, int code_flags) +{ + assert(umd->u_firstlineno > 0); + + /* Add the generator prefix instructions. */ + if (IS_GENERATOR(code_flags)) { + /* Note that RETURN_GENERATOR + POP_TOP have a net stack effect + * of 0. This is because RETURN_GENERATOR pushes an element + * with _PyFrame_StackPush before switching stacks. + */ + cfg_instr make_gen = { + .i_opcode = RETURN_GENERATOR, + .i_oparg = 0, + .i_loc = LOCATION(umd->u_firstlineno, umd->u_firstlineno, -1, -1), + .i_target = NULL, + }; + RETURN_IF_ERROR(basicblock_insert_instruction(entryblock, 0, &make_gen)); + cfg_instr pop_top = { + .i_opcode = POP_TOP, + .i_oparg = 0, + .i_loc = NO_LOCATION, + .i_target = NULL, + }; + RETURN_IF_ERROR(basicblock_insert_instruction(entryblock, 1, &pop_top)); + } + + /* Set up cells for any variable that escapes, to be put in a closure. */ + const int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); + if (ncellvars) { + // umd->u_cellvars has the cells out of order so we sort them + // before adding the MAKE_CELL instructions. Note that we + // adjust for arg cells, which come first. + const int nvars = ncellvars + (int)PyDict_GET_SIZE(umd->u_varnames); + int *sorted = PyMem_RawCalloc(nvars, sizeof(int)); + if (sorted == NULL) { + PyErr_NoMemory(); + return ERROR; + } + for (int i = 0; i < ncellvars; i++) { + sorted[fixed[i]] = i + 1; + } + for (int i = 0, ncellsused = 0; ncellsused < ncellvars; i++) { + int oldindex = sorted[i] - 1; + if (oldindex == -1) { + continue; + } + cfg_instr make_cell = { + .i_opcode = MAKE_CELL, + // This will get fixed in offset_derefs(). + .i_oparg = oldindex, + .i_loc = NO_LOCATION, + .i_target = NULL, + }; + if (basicblock_insert_instruction(entryblock, ncellsused, &make_cell) < 0) { + PyMem_RawFree(sorted); + return ERROR; + } + ncellsused += 1; + } + PyMem_RawFree(sorted); + } + + if (nfreevars) { + cfg_instr copy_frees = { + .i_opcode = COPY_FREE_VARS, + .i_oparg = nfreevars, + .i_loc = NO_LOCATION, + .i_target = NULL, + }; + RETURN_IF_ERROR(basicblock_insert_instruction(entryblock, 0, ©_frees)); + } + + return SUCCESS; +} + +static int +fix_cell_offsets(_PyCompile_CodeUnitMetadata *umd, basicblock *entryblock, int *fixedmap) +{ + int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); + int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); + int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); + int noffsets = ncellvars + nfreevars; + + // First deal with duplicates (arg cells). + int numdropped = 0; + for (int i = 0; i < noffsets ; i++) { + if (fixedmap[i] == i + nlocals) { + fixedmap[i] -= numdropped; + } + else { + // It was a duplicate (cell/arg). + numdropped += 1; + } + } + + // Then update offsets, either relative to locals or by cell2arg. + for (basicblock *b = entryblock; b != NULL; b = b->b_next) { + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *inst = &b->b_instr[i]; + // This is called before extended args are generated. + assert(inst->i_opcode != EXTENDED_ARG); + int oldoffset = inst->i_oparg; + switch(inst->i_opcode) { + case MAKE_CELL: + case LOAD_CLOSURE: + case LOAD_DEREF: + case STORE_DEREF: + case DELETE_DEREF: + case LOAD_FROM_DICT_OR_DEREF: + assert(oldoffset >= 0); + assert(oldoffset < noffsets); + assert(fixedmap[oldoffset] >= 0); + inst->i_oparg = fixedmap[oldoffset]; + } + } + } + + return numdropped; +} + +static int +prepare_localsplus(_PyCompile_CodeUnitMetadata *umd, cfg_builder *g, int code_flags) +{ + assert(PyDict_GET_SIZE(umd->u_varnames) < INT_MAX); + assert(PyDict_GET_SIZE(umd->u_cellvars) < INT_MAX); + assert(PyDict_GET_SIZE(umd->u_freevars) < INT_MAX); + int nlocals = (int)PyDict_GET_SIZE(umd->u_varnames); + int ncellvars = (int)PyDict_GET_SIZE(umd->u_cellvars); + int nfreevars = (int)PyDict_GET_SIZE(umd->u_freevars); + assert(INT_MAX - nlocals - ncellvars > 0); + assert(INT_MAX - nlocals - ncellvars - nfreevars > 0); + int nlocalsplus = nlocals + ncellvars + nfreevars; + int* cellfixedoffsets = build_cellfixedoffsets(umd); + if (cellfixedoffsets == NULL) { + return ERROR; + } + + // This must be called before fix_cell_offsets(). + if (insert_prefix_instructions(umd, g->g_entryblock, cellfixedoffsets, nfreevars, code_flags)) { + PyMem_Free(cellfixedoffsets); + return ERROR; + } + + int numdropped = fix_cell_offsets(umd, g->g_entryblock, cellfixedoffsets); + PyMem_Free(cellfixedoffsets); // At this point we're done with it. + cellfixedoffsets = NULL; + if (numdropped < 0) { + return ERROR; + } + + nlocalsplus -= numdropped; + return nlocalsplus; +} + +int +_PyCfg_ToInstructionSequence(cfg_builder *g, _PyCompile_InstructionSequence *seq) +{ + int lbl = 0; + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + b->b_label = (jump_target_label){lbl}; + lbl += b->b_iused; + } + for (basicblock *b = g->g_entryblock; b != NULL; b = b->b_next) { + RETURN_IF_ERROR(_PyCompile_InstructionSequence_UseLabel(seq, b->b_label.id)); + for (int i = 0; i < b->b_iused; i++) { + cfg_instr *instr = &b->b_instr[i]; + if (OPCODE_HAS_JUMP(instr->i_opcode)) { + instr->i_oparg = instr->i_target->b_label.id; + } + RETURN_IF_ERROR( + _PyCompile_InstructionSequence_Addop( + seq, instr->i_opcode, instr->i_oparg, instr->i_loc)); + + _PyCompile_ExceptHandlerInfo *hi = &seq->s_instrs[seq->s_used-1].i_except_handler_info; + if (instr->i_except != NULL) { + hi->h_label = instr->i_except->b_label.id; + hi->h_startdepth = instr->i_except->b_startdepth; + hi->h_preserve_lasti = instr->i_except->b_preserve_lasti; + } + else { + hi->h_label = -1; + } + } + } + return SUCCESS; +} + + +int +_PyCfg_OptimizedCfgToInstructionSequence(cfg_builder *g, + _PyCompile_CodeUnitMetadata *umd, int code_flags, + int *stackdepth, int *nlocalsplus, + _PyCompile_InstructionSequence *seq) +{ + *stackdepth = calculate_stackdepth(g); + if (*stackdepth < 0) { + return ERROR; + } + + /* prepare_localsplus adds instructions for generators that push + * and pop an item on the stack. This assertion makes sure there + * is space on the stack for that. + * It should always be true, because a generator must have at + * least one expression or call to INTRINSIC_STOPITERATION_ERROR, + * which requires stackspace. + */ + assert(!(IS_GENERATOR(code_flags) && *stackdepth == 0)); + + *nlocalsplus = prepare_localsplus(umd, g, code_flags); + if (*nlocalsplus < 0) { + return ERROR; + } + + convert_pseudo_ops(g->g_entryblock); + + /* Order of basic blocks must have been determined by now */ + + RETURN_IF_ERROR(normalize_jumps(g)); + assert(no_redundant_jumps(g)); + + /* Can't modify the bytecode after computing jump offsets. */ + if (_PyCfg_ToInstructionSequence(g, seq) < 0) { + return ERROR; + } + + return SUCCESS; +} + diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index b2b0aa6ece4816..ccf43c727b9e0f 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -91,8 +91,8 @@ Py_INCREF(value1); Py_INCREF(value2); STACK_GROW(2); - stack_pointer[-1] = value2; stack_pointer[-2] = value1; + stack_pointer[-1] = value2; DISPATCH(); } @@ -106,15 +106,17 @@ } TARGET(STORE_FAST) { - PyObject *value = stack_pointer[-1]; + PyObject *value; + value = stack_pointer[-1]; SETLOCAL(oparg, value); STACK_SHRINK(1); DISPATCH(); } TARGET(STORE_FAST_LOAD_FAST) { - PyObject *value1 = stack_pointer[-1]; + PyObject *value1; PyObject *value2; + value1 = stack_pointer[-1]; uint32_t oparg1 = oparg >> 4; uint32_t oparg2 = oparg & 15; SETLOCAL(oparg1, value1); @@ -125,8 +127,10 @@ } TARGET(STORE_FAST_STORE_FAST) { - PyObject *value1 = stack_pointer[-1]; - PyObject *value2 = stack_pointer[-2]; + PyObject *value1; + PyObject *value2; + value1 = stack_pointer[-1]; + value2 = stack_pointer[-2]; uint32_t oparg1 = oparg >> 4; uint32_t oparg2 = oparg & 15; SETLOCAL(oparg1, value1); @@ -136,7 +140,8 @@ } TARGET(POP_TOP) { - PyObject *value = stack_pointer[-1]; + PyObject *value; + value = stack_pointer[-1]; Py_DECREF(value); STACK_SHRINK(1); DISPATCH(); @@ -151,14 +156,15 @@ } TARGET(END_FOR) { - PyObject *_tmp_1 = stack_pointer[-1]; - PyObject *_tmp_2 = stack_pointer[-2]; + PyObject *value; + // POP_TOP + value = stack_pointer[-1]; { - PyObject *value = _tmp_1; Py_DECREF(value); } + // POP_TOP + value = stack_pointer[-2]; { - PyObject *value = _tmp_2; Py_DECREF(value); } STACK_SHRINK(2); @@ -166,8 +172,10 @@ } TARGET(INSTRUMENTED_END_FOR) { - PyObject *value = stack_pointer[-1]; - PyObject *receiver = stack_pointer[-2]; + PyObject *value; + PyObject *receiver; + value = stack_pointer[-1]; + receiver = stack_pointer[-2]; /* Need to create a fake StopIteration error here, * to conform to PEP 380 */ if (PyGen_Check(receiver)) { @@ -184,8 +192,10 @@ } TARGET(END_SEND) { - PyObject *value = stack_pointer[-1]; - PyObject *receiver = stack_pointer[-2]; + PyObject *value; + PyObject *receiver; + value = stack_pointer[-1]; + receiver = stack_pointer[-2]; Py_DECREF(receiver); STACK_SHRINK(1); stack_pointer[-1] = value; @@ -193,8 +203,10 @@ } TARGET(INSTRUMENTED_END_SEND) { - PyObject *value = stack_pointer[-1]; - PyObject *receiver = stack_pointer[-2]; + PyObject *value; + PyObject *receiver; + value = stack_pointer[-1]; + receiver = stack_pointer[-2]; if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) { PyErr_SetObject(PyExc_StopIteration, value); if (monitor_stop_iteration(tstate, frame, next_instr-1)) { @@ -209,8 +221,9 @@ } TARGET(UNARY_NEGATIVE) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; res = PyNumber_Negative(value); Py_DECREF(value); if (res == NULL) goto pop_1_error; @@ -219,8 +232,9 @@ } TARGET(UNARY_NOT) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; assert(PyBool_Check(value)); res = Py_IsFalse(value) ? Py_True : Py_False; stack_pointer[-1] = res; @@ -230,8 +244,9 @@ TARGET(TO_BOOL) { PREDICTED(TO_BOOL); static_assert(INLINE_CACHE_ENTRIES_TO_BOOL == 3, "incorrect cache size"); - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; #if ENABLE_SPECIALIZATION _PyToBoolCache *cache = (_PyToBoolCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -252,7 +267,8 @@ } TARGET(TO_BOOL_BOOL) { - PyObject *value = stack_pointer[-1]; + PyObject *value; + value = stack_pointer[-1]; DEOPT_IF(!PyBool_Check(value), TO_BOOL); STAT_INC(TO_BOOL, hit); next_instr += 3; @@ -260,8 +276,9 @@ } TARGET(TO_BOOL_INT) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; DEOPT_IF(!PyLong_CheckExact(value), TO_BOOL); STAT_INC(TO_BOOL, hit); if (_PyLong_IsZero((PyLongObject *)value)) { @@ -278,8 +295,9 @@ } TARGET(TO_BOOL_LIST) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; DEOPT_IF(!PyList_CheckExact(value), TO_BOOL); STAT_INC(TO_BOOL, hit); res = Py_SIZE(value) ? Py_True : Py_False; @@ -290,8 +308,9 @@ } TARGET(TO_BOOL_NONE) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; // This one is a bit weird, because we expect *some* failures: DEOPT_IF(!Py_IsNone(value), TO_BOOL); STAT_INC(TO_BOOL, hit); @@ -302,8 +321,9 @@ } TARGET(TO_BOOL_STR) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; DEOPT_IF(!PyUnicode_CheckExact(value), TO_BOOL); STAT_INC(TO_BOOL, hit); if (value == &_Py_STR(empty)) { @@ -321,8 +341,9 @@ } TARGET(TO_BOOL_ALWAYS_TRUE) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; uint32_t version = read_u32(&next_instr[1].cache); // This one is a bit weird, because we expect *some* failures: assert(version); @@ -336,8 +357,9 @@ } TARGET(UNARY_INVERT) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; res = PyNumber_Invert(value); Py_DECREF(value); if (res == NULL) goto pop_1_error; @@ -346,215 +368,192 @@ } TARGET(BINARY_OP_MULTIPLY_INT) { - PyObject *_tmp_1 = stack_pointer[-1]; - PyObject *_tmp_2 = stack_pointer[-2]; + PyObject *right; + PyObject *left; + PyObject *res; + // _GUARD_BOTH_INT + right = stack_pointer[-1]; + left = stack_pointer[-2]; { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); - _tmp_2 = left; - _tmp_1 = right; } + // _BINARY_OP_MULTIPLY_INT { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; - PyObject *res; STAT_INC(BINARY_OP, hit); res = _PyLong_Multiply((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (res == NULL) goto pop_2_error; - _tmp_2 = res; } - next_instr += 1; STACK_SHRINK(1); - stack_pointer[-1] = _tmp_2; + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_ADD_INT) { - PyObject *_tmp_1 = stack_pointer[-1]; - PyObject *_tmp_2 = stack_pointer[-2]; + PyObject *right; + PyObject *left; + PyObject *res; + // _GUARD_BOTH_INT + right = stack_pointer[-1]; + left = stack_pointer[-2]; { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); - _tmp_2 = left; - _tmp_1 = right; } + // _BINARY_OP_ADD_INT { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; - PyObject *res; STAT_INC(BINARY_OP, hit); res = _PyLong_Add((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (res == NULL) goto pop_2_error; - _tmp_2 = res; } - next_instr += 1; STACK_SHRINK(1); - stack_pointer[-1] = _tmp_2; + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_SUBTRACT_INT) { - PyObject *_tmp_1 = stack_pointer[-1]; - PyObject *_tmp_2 = stack_pointer[-2]; + PyObject *right; + PyObject *left; + PyObject *res; + // _GUARD_BOTH_INT + right = stack_pointer[-1]; + left = stack_pointer[-2]; { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; DEOPT_IF(!PyLong_CheckExact(left), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right), BINARY_OP); - _tmp_2 = left; - _tmp_1 = right; } + // _BINARY_OP_SUBTRACT_INT { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; - PyObject *res; STAT_INC(BINARY_OP, hit); res = _PyLong_Subtract((PyLongObject *)left, (PyLongObject *)right); _Py_DECREF_SPECIALIZED(right, (destructor)PyObject_Free); _Py_DECREF_SPECIALIZED(left, (destructor)PyObject_Free); if (res == NULL) goto pop_2_error; - _tmp_2 = res; } - next_instr += 1; STACK_SHRINK(1); - stack_pointer[-1] = _tmp_2; + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_MULTIPLY_FLOAT) { - PyObject *_tmp_1 = stack_pointer[-1]; - PyObject *_tmp_2 = stack_pointer[-2]; + PyObject *right; + PyObject *left; + PyObject *res; + // _GUARD_BOTH_FLOAT + right = stack_pointer[-1]; + left = stack_pointer[-2]; { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); - _tmp_2 = left; - _tmp_1 = right; } + // _BINARY_OP_MULTIPLY_FLOAT { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; - PyObject *res; STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval * ((PyFloatObject *)right)->ob_fval; DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res); - _tmp_2 = res; } - next_instr += 1; STACK_SHRINK(1); - stack_pointer[-1] = _tmp_2; + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_ADD_FLOAT) { - PyObject *_tmp_1 = stack_pointer[-1]; - PyObject *_tmp_2 = stack_pointer[-2]; + PyObject *right; + PyObject *left; + PyObject *res; + // _GUARD_BOTH_FLOAT + right = stack_pointer[-1]; + left = stack_pointer[-2]; { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); - _tmp_2 = left; - _tmp_1 = right; } + // _BINARY_OP_ADD_FLOAT { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; - PyObject *res; STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval + ((PyFloatObject *)right)->ob_fval; DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res); - _tmp_2 = res; } - next_instr += 1; STACK_SHRINK(1); - stack_pointer[-1] = _tmp_2; + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_SUBTRACT_FLOAT) { - PyObject *_tmp_1 = stack_pointer[-1]; - PyObject *_tmp_2 = stack_pointer[-2]; + PyObject *right; + PyObject *left; + PyObject *res; + // _GUARD_BOTH_FLOAT + right = stack_pointer[-1]; + left = stack_pointer[-2]; { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; DEOPT_IF(!PyFloat_CheckExact(left), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right), BINARY_OP); - _tmp_2 = left; - _tmp_1 = right; } + // _BINARY_OP_SUBTRACT_FLOAT { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; - PyObject *res; STAT_INC(BINARY_OP, hit); double dres = ((PyFloatObject *)left)->ob_fval - ((PyFloatObject *)right)->ob_fval; DECREF_INPUTS_AND_REUSE_FLOAT(left, right, dres, res); - _tmp_2 = res; } - next_instr += 1; STACK_SHRINK(1); - stack_pointer[-1] = _tmp_2; + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_ADD_UNICODE) { - PyObject *_tmp_1 = stack_pointer[-1]; - PyObject *_tmp_2 = stack_pointer[-2]; + PyObject *right; + PyObject *left; + PyObject *res; + // _GUARD_BOTH_UNICODE + right = stack_pointer[-1]; + left = stack_pointer[-2]; { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(!PyUnicode_CheckExact(right), BINARY_OP); - _tmp_2 = left; - _tmp_1 = right; } + // _BINARY_OP_ADD_UNICODE { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; - PyObject *res; STAT_INC(BINARY_OP, hit); res = PyUnicode_Concat(left, right); _Py_DECREF_SPECIALIZED(left, _PyUnicode_ExactDealloc); _Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc); if (res == NULL) goto pop_2_error; - _tmp_2 = res; } - next_instr += 1; STACK_SHRINK(1); - stack_pointer[-1] = _tmp_2; + stack_pointer[-1] = res; + next_instr += 1; DISPATCH(); } TARGET(BINARY_OP_INPLACE_ADD_UNICODE) { - PyObject *_tmp_1 = stack_pointer[-1]; - PyObject *_tmp_2 = stack_pointer[-2]; + PyObject *right; + PyObject *left; + // _GUARD_BOTH_UNICODE + right = stack_pointer[-1]; + left = stack_pointer[-2]; { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; DEOPT_IF(!PyUnicode_CheckExact(left), BINARY_OP); DEOPT_IF(!PyUnicode_CheckExact(right), BINARY_OP); - _tmp_2 = left; - _tmp_1 = right; } + // _BINARY_OP_INPLACE_ADD_UNICODE { - PyObject *right = _tmp_1; - PyObject *left = _tmp_2; _Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP]; assert(true_next.op.code == STORE_FAST); PyObject **target_local = &GETLOCAL(true_next.op.arg); @@ -586,9 +585,11 @@ TARGET(BINARY_SUBSCR) { PREDICTED(BINARY_SUBSCR); static_assert(INLINE_CACHE_ENTRIES_BINARY_SUBSCR == 1, "incorrect cache size"); - PyObject *sub = stack_pointer[-1]; - PyObject *container = stack_pointer[-2]; + PyObject *sub; + PyObject *container; PyObject *res; + sub = stack_pointer[-1]; + container = stack_pointer[-2]; #if ENABLE_SPECIALIZATION _PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -610,10 +611,13 @@ } TARGET(BINARY_SLICE) { - PyObject *stop = stack_pointer[-1]; - PyObject *start = stack_pointer[-2]; - PyObject *container = stack_pointer[-3]; + PyObject *stop; + PyObject *start; + PyObject *container; PyObject *res; + stop = stack_pointer[-1]; + start = stack_pointer[-2]; + container = stack_pointer[-3]; PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); // Can't use ERROR_IF() here, because we haven't // DECREF'ed container yet, and we still own slice. @@ -632,10 +636,14 @@ } TARGET(STORE_SLICE) { - PyObject *stop = stack_pointer[-1]; - PyObject *start = stack_pointer[-2]; - PyObject *container = stack_pointer[-3]; - PyObject *v = stack_pointer[-4]; + PyObject *stop; + PyObject *start; + PyObject *container; + PyObject *v; + stop = stack_pointer[-1]; + start = stack_pointer[-2]; + container = stack_pointer[-3]; + v = stack_pointer[-4]; PyObject *slice = _PyBuildSlice_ConsumeRefs(start, stop); int err; if (slice == NULL) { @@ -653,9 +661,11 @@ } TARGET(BINARY_SUBSCR_LIST_INT) { - PyObject *sub = stack_pointer[-1]; - PyObject *list = stack_pointer[-2]; + PyObject *sub; + PyObject *list; PyObject *res; + sub = stack_pointer[-1]; + list = stack_pointer[-2]; DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); DEOPT_IF(!PyList_CheckExact(list), BINARY_SUBSCR); @@ -675,10 +685,36 @@ DISPATCH(); } + TARGET(BINARY_SUBSCR_STR_INT) { + PyObject *sub; + PyObject *str; + PyObject *res; + sub = stack_pointer[-1]; + str = stack_pointer[-2]; + DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); + DEOPT_IF(!PyUnicode_CheckExact(str), BINARY_SUBSCR); + DEOPT_IF(!_PyLong_IsNonNegativeCompact((PyLongObject *)sub), BINARY_SUBSCR); + Py_ssize_t index = ((PyLongObject*)sub)->long_value.ob_digit[0]; + DEOPT_IF(PyUnicode_GET_LENGTH(str) <= index, BINARY_SUBSCR); + // Specialize for reading an ASCII character from any string: + Py_UCS4 c = PyUnicode_READ_CHAR(str, index); + DEOPT_IF(Py_ARRAY_LENGTH(_Py_SINGLETON(strings).ascii) <= c, BINARY_SUBSCR); + STAT_INC(BINARY_SUBSCR, hit); + res = (PyObject*)&_Py_SINGLETON(strings).ascii[c]; + _Py_DECREF_SPECIALIZED(sub, (destructor)PyObject_Free); + Py_DECREF(str); + STACK_SHRINK(1); + stack_pointer[-1] = res; + next_instr += 1; + DISPATCH(); + } + TARGET(BINARY_SUBSCR_TUPLE_INT) { - PyObject *sub = stack_pointer[-1]; - PyObject *tuple = stack_pointer[-2]; + PyObject *sub; + PyObject *tuple; PyObject *res; + sub = stack_pointer[-1]; + tuple = stack_pointer[-2]; DEOPT_IF(!PyLong_CheckExact(sub), BINARY_SUBSCR); DEOPT_IF(!PyTuple_CheckExact(tuple), BINARY_SUBSCR); @@ -699,9 +735,11 @@ } TARGET(BINARY_SUBSCR_DICT) { - PyObject *sub = stack_pointer[-1]; - PyObject *dict = stack_pointer[-2]; + PyObject *sub; + PyObject *dict; PyObject *res; + sub = stack_pointer[-1]; + dict = stack_pointer[-2]; DEOPT_IF(!PyDict_CheckExact(dict), BINARY_SUBSCR); STAT_INC(BINARY_SUBSCR, hit); res = PyDict_GetItemWithError(dict, sub); @@ -723,8 +761,10 @@ } TARGET(BINARY_SUBSCR_GETITEM) { - PyObject *sub = stack_pointer[-1]; - PyObject *container = stack_pointer[-2]; + PyObject *sub; + PyObject *container; + sub = stack_pointer[-1]; + container = stack_pointer[-2]; DEOPT_IF(tstate->interp->eval_frame, BINARY_SUBSCR); PyTypeObject *tp = Py_TYPE(container); DEOPT_IF(!PyType_HasFeature(tp, Py_TPFLAGS_HEAPTYPE), BINARY_SUBSCR); @@ -747,19 +787,24 @@ SKIP_OVER(INLINE_CACHE_ENTRIES_BINARY_SUBSCR); frame->return_offset = 0; DISPATCH_INLINED(new_frame); + STACK_SHRINK(1); } TARGET(LIST_APPEND) { - PyObject *v = stack_pointer[-1]; - PyObject *list = stack_pointer[-(2 + (oparg-1))]; + PyObject *v; + PyObject *list; + v = stack_pointer[-1]; + list = stack_pointer[-2 - (oparg-1)]; if (_PyList_AppendTakeRef((PyListObject *)list, v) < 0) goto pop_1_error; STACK_SHRINK(1); DISPATCH(); } TARGET(SET_ADD) { - PyObject *v = stack_pointer[-1]; - PyObject *set = stack_pointer[-(2 + (oparg-1))]; + PyObject *v; + PyObject *set; + v = stack_pointer[-1]; + set = stack_pointer[-2 - (oparg-1)]; int err = PySet_Add(set, v); Py_DECREF(v); if (err) goto pop_1_error; @@ -770,9 +815,12 @@ TARGET(STORE_SUBSCR) { PREDICTED(STORE_SUBSCR); static_assert(INLINE_CACHE_ENTRIES_STORE_SUBSCR == 1, "incorrect cache size"); - PyObject *sub = stack_pointer[-1]; - PyObject *container = stack_pointer[-2]; - PyObject *v = stack_pointer[-3]; + PyObject *sub; + PyObject *container; + PyObject *v; + sub = stack_pointer[-1]; + container = stack_pointer[-2]; + v = stack_pointer[-3]; #if ENABLE_SPECIALIZATION _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -795,9 +843,12 @@ } TARGET(STORE_SUBSCR_LIST_INT) { - PyObject *sub = stack_pointer[-1]; - PyObject *list = stack_pointer[-2]; - PyObject *value = stack_pointer[-3]; + PyObject *sub; + PyObject *list; + PyObject *value; + sub = stack_pointer[-1]; + list = stack_pointer[-2]; + value = stack_pointer[-3]; DEOPT_IF(!PyLong_CheckExact(sub), STORE_SUBSCR); DEOPT_IF(!PyList_CheckExact(list), STORE_SUBSCR); @@ -820,9 +871,12 @@ } TARGET(STORE_SUBSCR_DICT) { - PyObject *sub = stack_pointer[-1]; - PyObject *dict = stack_pointer[-2]; - PyObject *value = stack_pointer[-3]; + PyObject *sub; + PyObject *dict; + PyObject *value; + sub = stack_pointer[-1]; + dict = stack_pointer[-2]; + value = stack_pointer[-3]; DEOPT_IF(!PyDict_CheckExact(dict), STORE_SUBSCR); STAT_INC(STORE_SUBSCR, hit); int err = _PyDict_SetItem_Take2((PyDictObject *)dict, sub, value); @@ -834,8 +888,10 @@ } TARGET(DELETE_SUBSCR) { - PyObject *sub = stack_pointer[-1]; - PyObject *container = stack_pointer[-2]; + PyObject *sub; + PyObject *container; + sub = stack_pointer[-1]; + container = stack_pointer[-2]; /* del container[sub] */ int err = PyObject_DelItem(container, sub); Py_DECREF(container); @@ -846,10 +902,11 @@ } TARGET(CALL_INTRINSIC_1) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; assert(oparg <= MAX_INTRINSIC_1); - res = _PyIntrinsics_UnaryFunctions[oparg](tstate, value); + res = _PyIntrinsics_UnaryFunctions[oparg].func(tstate, value); Py_DECREF(value); if (res == NULL) goto pop_1_error; stack_pointer[-1] = res; @@ -857,11 +914,13 @@ } TARGET(CALL_INTRINSIC_2) { - PyObject *value1 = stack_pointer[-1]; - PyObject *value2 = stack_pointer[-2]; + PyObject *value1; + PyObject *value2; PyObject *res; + value1 = stack_pointer[-1]; + value2 = stack_pointer[-2]; assert(oparg <= MAX_INTRINSIC_2); - res = _PyIntrinsics_BinaryFunctions[oparg](tstate, value2, value1); + res = _PyIntrinsics_BinaryFunctions[oparg].func(tstate, value2, value1); Py_DECREF(value2); Py_DECREF(value1); if (res == NULL) goto pop_2_error; @@ -871,7 +930,8 @@ } TARGET(RAISE_VARARGS) { - PyObject **args = (stack_pointer - oparg); + PyObject **args; + args = stack_pointer - oparg; PyObject *cause = NULL, *exc = NULL; switch (oparg) { case 2: @@ -881,7 +941,11 @@ exc = args[0]; /* fall through */ case 0: - if (do_raise(tstate, exc, cause)) { STACK_SHRINK(oparg); goto exception_unwind; } + if (do_raise(tstate, exc, cause)) { + assert(oparg == 0); + monitor_reraise(tstate, frame, next_instr-1); + goto exception_unwind; + } break; default: _PyErr_SetString(tstate, PyExc_SystemError, @@ -889,22 +953,26 @@ break; } if (true) { STACK_SHRINK(oparg); goto error; } + STACK_SHRINK(oparg); } TARGET(INTERPRETER_EXIT) { - PyObject *retval = stack_pointer[-1]; + PyObject *retval; + retval = stack_pointer[-1]; assert(frame == &entry_frame); assert(_PyFrame_IsIncomplete(frame)); /* Restore previous cframe and return. */ tstate->cframe = cframe.previous; assert(tstate->cframe->current_frame == frame->previous); assert(!_PyErr_Occurred(tstate)); - _Py_LeaveRecursiveCallTstate(tstate); + tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; return retval; + STACK_SHRINK(1); } TARGET(RETURN_VALUE) { - PyObject *retval = stack_pointer[-1]; + PyObject *retval; + retval = stack_pointer[-1]; STACK_SHRINK(1); assert(EMPTY()); _PyFrame_SetStackPointer(frame, stack_pointer); @@ -917,10 +985,12 @@ frame->prev_instr += frame->return_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; + STACK_SHRINK(1); } TARGET(INSTRUMENTED_RETURN_VALUE) { - PyObject *retval = stack_pointer[-1]; + PyObject *retval; + retval = stack_pointer[-1]; int err = _Py_call_instrumentation_arg( tstate, PY_MONITORING_EVENT_PY_RETURN, frame, next_instr-1, retval); @@ -937,6 +1007,7 @@ frame->prev_instr += frame->return_offset; _PyFrame_StackPush(frame, retval); goto resume_frame; + STACK_SHRINK(1); } TARGET(RETURN_CONST) { @@ -976,8 +1047,9 @@ } TARGET(GET_AITER) { - PyObject *obj = stack_pointer[-1]; + PyObject *obj; PyObject *iter; + obj = stack_pointer[-1]; unaryfunc getter = NULL; PyTypeObject *type = Py_TYPE(obj); @@ -1013,8 +1085,9 @@ } TARGET(GET_ANEXT) { - PyObject *aiter = stack_pointer[-1]; + PyObject *aiter; PyObject *awaitable; + aiter = stack_pointer[-1]; unaryfunc getter = NULL; PyObject *next_iter = NULL; PyTypeObject *type = Py_TYPE(aiter); @@ -1063,12 +1136,13 @@ } TARGET(GET_AWAITABLE) { - PyObject *iterable = stack_pointer[-1]; + PyObject *iterable; PyObject *iter; + iterable = stack_pointer[-1]; iter = _PyCoro_GetAwaitableIter(iterable); if (iter == NULL) { - format_awaitable_error(tstate, Py_TYPE(iterable), oparg); + _PyEval_FormatAwaitableError(tstate, Py_TYPE(iterable), oparg); } Py_DECREF(iterable); @@ -1095,9 +1169,11 @@ TARGET(SEND) { PREDICTED(SEND); static_assert(INLINE_CACHE_ENTRIES_SEND == 1, "incorrect cache size"); - PyObject *v = stack_pointer[-1]; - PyObject *receiver = stack_pointer[-2]; + PyObject *v; + PyObject *receiver; PyObject *retval; + v = stack_pointer[-1]; + receiver = stack_pointer[-2]; #if ENABLE_SPECIALIZATION _PySendCache *cache = (_PySendCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1150,8 +1226,10 @@ } TARGET(SEND_GEN) { - PyObject *v = stack_pointer[-1]; - PyObject *receiver = stack_pointer[-2]; + PyObject *v; + PyObject *receiver; + v = stack_pointer[-1]; + receiver = stack_pointer[-2]; DEOPT_IF(tstate->interp->eval_frame, SEND); PyGenObject *gen = (PyGenObject *)receiver; DEOPT_IF(Py_TYPE(gen) != &PyGen_Type && @@ -1170,7 +1248,8 @@ } TARGET(INSTRUMENTED_YIELD_VALUE) { - PyObject *retval = stack_pointer[-1]; + PyObject *retval; + retval = stack_pointer[-1]; assert(frame != &entry_frame); assert(oparg >= 0); /* make the generator identify this as HAS_ARG */ PyGenObject *gen = _PyFrame_GetGenerator(frame); @@ -1191,7 +1270,8 @@ } TARGET(YIELD_VALUE) { - PyObject *retval = stack_pointer[-1]; + PyObject *retval; + retval = stack_pointer[-1]; // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. @@ -1211,7 +1291,8 @@ } TARGET(POP_EXCEPT) { - PyObject *exc_value = stack_pointer[-1]; + PyObject *exc_value; + exc_value = stack_pointer[-1]; _PyErr_StackItem *exc_info = tstate->exc_info; Py_XSETREF(exc_info->exc_value, exc_value); STACK_SHRINK(1); @@ -1219,8 +1300,10 @@ } TARGET(RERAISE) { - PyObject *exc = stack_pointer[-1]; - PyObject **values = (stack_pointer - (1 + oparg)); + PyObject *exc; + PyObject **values; + exc = stack_pointer[-1]; + values = stack_pointer - 1 - oparg; assert(oparg >= 0 && oparg <= 2); if (oparg) { PyObject *lasti = values[0]; @@ -1237,12 +1320,16 @@ assert(exc && PyExceptionInstance_Check(exc)); Py_INCREF(exc); _PyErr_SetRaisedException(tstate, exc); + monitor_reraise(tstate, frame, next_instr-1); goto exception_unwind; + STACK_SHRINK(1); } TARGET(END_ASYNC_FOR) { - PyObject *exc = stack_pointer[-1]; - PyObject *awaitable = stack_pointer[-2]; + PyObject *exc; + PyObject *awaitable; + exc = stack_pointer[-1]; + awaitable = stack_pointer[-2]; assert(exc && PyExceptionInstance_Check(exc)); if (PyErr_GivenExceptionMatches(exc, PyExc_StopAsyncIteration)) { Py_DECREF(awaitable); @@ -1251,6 +1338,7 @@ else { Py_INCREF(exc); _PyErr_SetRaisedException(tstate, exc); + monitor_reraise(tstate, frame, next_instr-1); goto exception_unwind; } STACK_SHRINK(2); @@ -1258,11 +1346,14 @@ } TARGET(CLEANUP_THROW) { - PyObject *exc_value = stack_pointer[-1]; - PyObject *last_sent_val = stack_pointer[-2]; - PyObject *sub_iter = stack_pointer[-3]; + PyObject *exc_value; + PyObject *last_sent_val; + PyObject *sub_iter; PyObject *none; PyObject *value; + exc_value = stack_pointer[-1]; + last_sent_val = stack_pointer[-2]; + sub_iter = stack_pointer[-3]; assert(throwflag); assert(exc_value && PyExceptionInstance_Check(exc_value)); if (PyErr_GivenExceptionMatches(exc_value, PyExc_StopIteration)) { @@ -1274,11 +1365,12 @@ } else { _PyErr_SetRaisedException(tstate, Py_NewRef(exc_value)); + monitor_reraise(tstate, frame, next_instr-1); goto exception_unwind; } STACK_SHRINK(1); - stack_pointer[-1] = value; stack_pointer[-2] = none; + stack_pointer[-1] = value; DISPATCH(); } @@ -1304,7 +1396,8 @@ } TARGET(STORE_NAME) { - PyObject *v = stack_pointer[-1]; + PyObject *v; + v = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); PyObject *ns = LOCALS(); int err; @@ -1336,9 +1429,9 @@ err = PyObject_DelItem(ns, name); // Can't use ERROR_IF here. if (err != 0) { - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, - name); + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + NAME_ERROR_MSG, + name); goto error; } DISPATCH(); @@ -1347,7 +1440,8 @@ TARGET(UNPACK_SEQUENCE) { PREDICTED(UNPACK_SEQUENCE); static_assert(INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE == 1, "incorrect cache size"); - PyObject *seq = stack_pointer[-1]; + PyObject *seq; + seq = stack_pointer[-1]; #if ENABLE_SPECIALIZATION _PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1359,7 +1453,7 @@ DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ PyObject **top = stack_pointer + oparg - 1; - int res = unpack_iterable(tstate, seq, oparg, -1, top); + int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top); Py_DECREF(seq); if (res == 0) goto pop_1_error; STACK_SHRINK(1); @@ -1369,8 +1463,10 @@ } TARGET(UNPACK_SEQUENCE_TWO_TUPLE) { - PyObject *seq = stack_pointer[-1]; - PyObject **values = stack_pointer - (1); + PyObject *seq; + PyObject **values; + seq = stack_pointer[-1]; + values = stack_pointer - 1; DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyTuple_GET_SIZE(seq) != 2, UNPACK_SEQUENCE); assert(oparg == 2); @@ -1385,8 +1481,10 @@ } TARGET(UNPACK_SEQUENCE_TUPLE) { - PyObject *seq = stack_pointer[-1]; - PyObject **values = stack_pointer - (1); + PyObject *seq; + PyObject **values; + seq = stack_pointer[-1]; + values = stack_pointer - 1; DEOPT_IF(!PyTuple_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyTuple_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); STAT_INC(UNPACK_SEQUENCE, hit); @@ -1402,8 +1500,10 @@ } TARGET(UNPACK_SEQUENCE_LIST) { - PyObject *seq = stack_pointer[-1]; - PyObject **values = stack_pointer - (1); + PyObject *seq; + PyObject **values; + seq = stack_pointer[-1]; + values = stack_pointer - 1; DEOPT_IF(!PyList_CheckExact(seq), UNPACK_SEQUENCE); DEOPT_IF(PyList_GET_SIZE(seq) != oparg, UNPACK_SEQUENCE); STAT_INC(UNPACK_SEQUENCE, hit); @@ -1419,10 +1519,11 @@ } TARGET(UNPACK_EX) { - PyObject *seq = stack_pointer[-1]; + PyObject *seq; + seq = stack_pointer[-1]; int totalargs = 1 + (oparg & 0xFF) + (oparg >> 8); PyObject **top = stack_pointer + totalargs - 1; - int res = unpack_iterable(tstate, seq, oparg & 0xFF, oparg >> 8, top); + int res = _PyEval_UnpackIterable(tstate, seq, oparg & 0xFF, oparg >> 8, top); Py_DECREF(seq); if (res == 0) goto pop_1_error; STACK_GROW((oparg & 0xFF) + (oparg >> 8)); @@ -1432,8 +1533,10 @@ TARGET(STORE_ATTR) { PREDICTED(STORE_ATTR); static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size"); - PyObject *owner = stack_pointer[-1]; - PyObject *v = stack_pointer[-2]; + PyObject *owner; + PyObject *v; + owner = stack_pointer[-1]; + v = stack_pointer[-2]; #if ENABLE_SPECIALIZATION _PyAttrCache *cache = (_PyAttrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1456,7 +1559,8 @@ } TARGET(DELETE_ATTR) { - PyObject *owner = stack_pointer[-1]; + PyObject *owner; + owner = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); int err = PyObject_DelAttr(owner, name); Py_DECREF(owner); @@ -1466,7 +1570,8 @@ } TARGET(STORE_GLOBAL) { - PyObject *v = stack_pointer[-1]; + PyObject *v; + v = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); int err = PyDict_SetItem(GLOBALS(), name, v); Py_DECREF(v); @@ -1482,8 +1587,8 @@ // Can't use ERROR_IF here. if (err != 0) { if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) { - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); } goto error; } @@ -1491,27 +1596,25 @@ } TARGET(LOAD_LOCALS) { - PyObject *_tmp_1; - { - PyObject *locals; - locals = LOCALS(); - if (locals == NULL) { - _PyErr_SetString(tstate, PyExc_SystemError, - "no locals found"); - if (true) goto error; - } - Py_INCREF(locals); - _tmp_1 = locals; + PyObject *locals; + locals = LOCALS(); + if (locals == NULL) { + _PyErr_SetString(tstate, PyExc_SystemError, + "no locals found"); + if (true) goto error; } + Py_INCREF(locals); STACK_GROW(1); - stack_pointer[-1] = _tmp_1; + stack_pointer[-1] = locals; DISPATCH(); } TARGET(LOAD_NAME) { - PyObject *_tmp_1; + PyObject *locals; + PyObject *mod_or_class_dict; + PyObject *v; + // _LOAD_LOCALS { - PyObject *locals; locals = LOCALS(); if (locals == NULL) { _PyErr_SetString(tstate, PyExc_SystemError, @@ -1519,11 +1622,10 @@ if (true) goto error; } Py_INCREF(locals); - _tmp_1 = locals; } + // _LOAD_FROM_DICT_OR_GLOBALS + mod_or_class_dict = locals; { - PyObject *mod_or_class_dict = _tmp_1; - PyObject *v; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) { Py_DECREF(mod_or_class_dict); @@ -1543,62 +1645,58 @@ goto error; } if (v == NULL) { - format_exc_check_arg( + _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); goto error; } } } - _tmp_1 = v; } STACK_GROW(1); - stack_pointer[-1] = _tmp_1; + stack_pointer[-1] = v; DISPATCH(); } TARGET(LOAD_FROM_DICT_OR_GLOBALS) { - PyObject *_tmp_1 = stack_pointer[-1]; - { - PyObject *mod_or_class_dict = _tmp_1; - PyObject *v; - PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); - if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) { - Py_DECREF(mod_or_class_dict); + PyObject *mod_or_class_dict; + PyObject *v; + mod_or_class_dict = stack_pointer[-1]; + PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); + if (PyMapping_GetOptionalItem(mod_or_class_dict, name, &v) < 0) { + Py_DECREF(mod_or_class_dict); + goto error; + } + Py_DECREF(mod_or_class_dict); + if (v == NULL) { + v = PyDict_GetItemWithError(GLOBALS(), name); + if (v != NULL) { + Py_INCREF(v); + } + else if (_PyErr_Occurred(tstate)) { goto error; } - Py_DECREF(mod_or_class_dict); - if (v == NULL) { - v = PyDict_GetItemWithError(GLOBALS(), name); - if (v != NULL) { - Py_INCREF(v); - } - else if (_PyErr_Occurred(tstate)) { + else { + if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) { goto error; } - else { - if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) { - goto error; - } - if (v == NULL) { - format_exc_check_arg( - tstate, PyExc_NameError, - NAME_ERROR_MSG, name); - goto error; - } + if (v == NULL) { + _PyEval_FormatExcCheckArg( + tstate, PyExc_NameError, + NAME_ERROR_MSG, name); + goto error; } } - _tmp_1 = v; } - stack_pointer[-1] = _tmp_1; + stack_pointer[-1] = v; DISPATCH(); } TARGET(LOAD_GLOBAL) { PREDICTED(LOAD_GLOBAL); static_assert(INLINE_CACHE_ENTRIES_LOAD_GLOBAL == 4, "incorrect cache size"); + PyObject *res; PyObject *null = NULL; - PyObject *v; #if ENABLE_SPECIALIZATION _PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -1614,30 +1712,30 @@ if (PyDict_CheckExact(GLOBALS()) && PyDict_CheckExact(BUILTINS())) { - v = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), - (PyDictObject *)BUILTINS(), - name); - if (v == NULL) { + res = _PyDict_LoadGlobal((PyDictObject *)GLOBALS(), + (PyDictObject *)BUILTINS(), + name); + if (res == NULL) { if (!_PyErr_Occurred(tstate)) { /* _PyDict_LoadGlobal() returns NULL without raising * an exception if the key doesn't exist */ - format_exc_check_arg(tstate, PyExc_NameError, - NAME_ERROR_MSG, name); + _PyEval_FormatExcCheckArg(tstate, PyExc_NameError, + NAME_ERROR_MSG, name); } if (true) goto error; } - Py_INCREF(v); + Py_INCREF(res); } else { /* Slow-path if globals or builtins is not a dict */ /* namespace 1: globals */ - if (PyMapping_GetOptionalItem(GLOBALS(), name, &v) < 0) goto error; - if (v == NULL) { + if (PyMapping_GetOptionalItem(GLOBALS(), name, &res) < 0) goto error; + if (res == NULL) { /* namespace 2: builtins */ - if (PyMapping_GetOptionalItem(BUILTINS(), name, &v) < 0) goto error; - if (v == NULL) { - format_exc_check_arg( + if (PyMapping_GetOptionalItem(BUILTINS(), name, &res) < 0) goto error; + if (res == NULL) { + _PyEval_FormatExcCheckArg( tstate, PyExc_NameError, NAME_ERROR_MSG, name); if (true) goto error; @@ -1647,17 +1745,16 @@ null = NULL; STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = v; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = null; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = res; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } next_instr += 4; DISPATCH(); } TARGET(LOAD_GLOBAL_MODULE) { - PyObject *_tmp_1; - PyObject *_tmp_2; - { - } + PyObject *res; + PyObject *null = NULL; + // _GUARD_GLOBALS_VERSION { uint16_t version = read_u16(&next_instr[1].cache); PyDictObject *dict = (PyDictObject *)GLOBALS(); @@ -1665,11 +1762,8 @@ DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); assert(DK_IS_UNICODE(dict->ma_keys)); } + // _LOAD_GLOBAL_MODULE { - } - { - PyObject *null = NULL; - PyObject *res; uint16_t index = read_u16(&next_instr[3].cache); PyDictObject *dict = (PyDictObject *)GLOBALS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys); @@ -1678,22 +1772,19 @@ Py_INCREF(res); STAT_INC(LOAD_GLOBAL, hit); null = NULL; - if (oparg & 1) { _tmp_2 = null; } - _tmp_1 = res; } - next_instr += 4; STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = _tmp_1; - if (oparg & 1) { stack_pointer[-2] = _tmp_2; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = res; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } + next_instr += 4; DISPATCH(); } TARGET(LOAD_GLOBAL_BUILTIN) { - PyObject *_tmp_1; - PyObject *_tmp_2; - { - } + PyObject *res; + PyObject *null = NULL; + // _GUARD_GLOBALS_VERSION { uint16_t version = read_u16(&next_instr[1].cache); PyDictObject *dict = (PyDictObject *)GLOBALS(); @@ -1701,6 +1792,7 @@ DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); assert(DK_IS_UNICODE(dict->ma_keys)); } + // _GUARD_BUILTINS_VERSION { uint16_t version = read_u16(&next_instr[2].cache); PyDictObject *dict = (PyDictObject *)BUILTINS(); @@ -1708,9 +1800,8 @@ DEOPT_IF(dict->ma_keys->dk_version != version, LOAD_GLOBAL); assert(DK_IS_UNICODE(dict->ma_keys)); } + // _LOAD_GLOBAL_BUILTINS { - PyObject *null = NULL; - PyObject *res; uint16_t index = read_u16(&next_instr[3].cache); PyDictObject *bdict = (PyDictObject *)BUILTINS(); PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys); @@ -1719,14 +1810,12 @@ Py_INCREF(res); STAT_INC(LOAD_GLOBAL, hit); null = NULL; - if (oparg & 1) { _tmp_2 = null; } - _tmp_1 = res; } - next_instr += 4; STACK_GROW(1); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = _tmp_1; - if (oparg & 1) { stack_pointer[-2] = _tmp_2; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = res; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } + next_instr += 4; DISPATCH(); } @@ -1755,7 +1844,7 @@ // Can't use ERROR_IF here. // Fortunately we don't need its superpower. if (oldobj == NULL) { - format_exc_unbound(tstate, _PyFrame_GetCode(frame), oparg); + _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); goto error; } PyCell_SET(cell, NULL); @@ -1764,8 +1853,9 @@ } TARGET(LOAD_FROM_DICT_OR_DEREF) { - PyObject *class_dict = stack_pointer[-1]; + PyObject *class_dict; PyObject *value; + class_dict = stack_pointer[-1]; PyObject *name; assert(class_dict); assert(oparg >= 0 && oparg < _PyFrame_GetCode(frame)->co_nlocalsplus); @@ -1779,7 +1869,7 @@ PyObject *cell = GETLOCAL(oparg); value = PyCell_GET(cell); if (value == NULL) { - format_exc_unbound(tstate, _PyFrame_GetCode(frame), oparg); + _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); goto error; } Py_INCREF(value); @@ -1793,7 +1883,7 @@ PyObject *cell = GETLOCAL(oparg); value = PyCell_GET(cell); if (value == NULL) { - format_exc_unbound(tstate, _PyFrame_GetCode(frame), oparg); + _PyEval_FormatExcUnbound(tstate, _PyFrame_GetCode(frame), oparg); if (true) goto error; } Py_INCREF(value); @@ -1803,7 +1893,8 @@ } TARGET(STORE_DEREF) { - PyObject *v = stack_pointer[-1]; + PyObject *v; + v = stack_pointer[-1]; PyObject *cell = GETLOCAL(oparg); PyObject *oldobj = PyCell_GET(cell); PyCell_SET(cell, v); @@ -1827,8 +1918,9 @@ } TARGET(BUILD_STRING) { - PyObject **pieces = (stack_pointer - oparg); + PyObject **pieces; PyObject *str; + pieces = stack_pointer - oparg; str = _PyUnicode_JoinArray(&_Py_STR(empty), pieces, oparg); for (int _i = oparg; --_i >= 0;) { Py_DECREF(pieces[_i]); @@ -1841,8 +1933,9 @@ } TARGET(BUILD_TUPLE) { - PyObject **values = (stack_pointer - oparg); + PyObject **values; PyObject *tup; + values = stack_pointer - oparg; tup = _PyTuple_FromArraySteal(values, oparg); if (tup == NULL) { STACK_SHRINK(oparg); goto error; } STACK_SHRINK(oparg); @@ -1852,8 +1945,9 @@ } TARGET(BUILD_LIST) { - PyObject **values = (stack_pointer - oparg); + PyObject **values; PyObject *list; + values = stack_pointer - oparg; list = _PyList_FromArraySteal(values, oparg); if (list == NULL) { STACK_SHRINK(oparg); goto error; } STACK_SHRINK(oparg); @@ -1863,8 +1957,10 @@ } TARGET(LIST_EXTEND) { - PyObject *iterable = stack_pointer[-1]; - PyObject *list = stack_pointer[-(2 + (oparg-1))]; + PyObject *iterable; + PyObject *list; + iterable = stack_pointer[-1]; + list = stack_pointer[-2 - (oparg-1)]; PyObject *none_val = _PyList_Extend((PyListObject *)list, iterable); if (none_val == NULL) { if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError) && @@ -1885,8 +1981,10 @@ } TARGET(SET_UPDATE) { - PyObject *iterable = stack_pointer[-1]; - PyObject *set = stack_pointer[-(2 + (oparg-1))]; + PyObject *iterable; + PyObject *set; + iterable = stack_pointer[-1]; + set = stack_pointer[-2 - (oparg-1)]; int err = _PySet_Update(set, iterable); Py_DECREF(iterable); if (err < 0) goto pop_1_error; @@ -1895,8 +1993,9 @@ } TARGET(BUILD_SET) { - PyObject **values = (stack_pointer - oparg); + PyObject **values; PyObject *set; + values = stack_pointer - oparg; set = PySet_New(NULL); if (set == NULL) goto error; @@ -1918,8 +2017,9 @@ } TARGET(BUILD_MAP) { - PyObject **values = (stack_pointer - oparg*2); + PyObject **values; PyObject *map; + values = stack_pointer - oparg*2; map = _PyDict_FromItems( values, 2, values+1, 2, @@ -1979,9 +2079,11 @@ } TARGET(BUILD_CONST_KEY_MAP) { - PyObject *keys = stack_pointer[-1]; - PyObject **values = (stack_pointer - (1 + oparg)); + PyObject *keys; + PyObject **values; PyObject *map; + keys = stack_pointer[-1]; + values = stack_pointer - 1 - oparg; if (!PyTuple_CheckExact(keys) || PyTuple_GET_SIZE(keys) != (Py_ssize_t)oparg) { _PyErr_SetString(tstate, PyExc_SystemError, @@ -2002,8 +2104,10 @@ } TARGET(DICT_UPDATE) { - PyObject *update = stack_pointer[-1]; - PyObject *dict = PEEK(oparg + 1); // update is still on the stack + PyObject *update; + PyObject *dict; + update = stack_pointer[-1]; + dict = stack_pointer[-2 - (oparg - 1)]; if (PyDict_Update(dict, update) < 0) { if (_PyErr_ExceptionMatches(tstate, PyExc_AttributeError)) { _PyErr_Format(tstate, PyExc_TypeError, @@ -2019,11 +2123,14 @@ } TARGET(DICT_MERGE) { - PyObject *update = stack_pointer[-1]; - PyObject *dict = PEEK(oparg + 1); // update is still on the stack - + PyObject *update; + PyObject *dict; + PyObject *callable; + update = stack_pointer[-1]; + dict = stack_pointer[-2 - (oparg - 1)]; + callable = stack_pointer[-5 - (oparg - 1)]; if (_PyDict_MergeEx(dict, update, 2) < 0) { - format_kwargs_error(tstate, PEEK(3 + oparg), update); + _PyEval_FormatKwargsError(tstate, callable, update); Py_DECREF(update); if (true) goto pop_1_error; } @@ -2033,9 +2140,12 @@ } TARGET(MAP_ADD) { - PyObject *value = stack_pointer[-1]; - PyObject *key = stack_pointer[-2]; - PyObject *dict = PEEK(oparg + 2); // key, value are still on the stack + PyObject *value; + PyObject *key; + PyObject *dict; + value = stack_pointer[-1]; + key = stack_pointer[-2]; + dict = stack_pointer[-3 - (oparg - 1)]; assert(PyDict_CheckExact(dict)); /* dict[key] = value */ // Do not DECREF INPUTS because the function steals the references @@ -2050,16 +2160,21 @@ // don't want to specialize instrumented instructions INCREMENT_ADAPTIVE_COUNTER(cache->counter); GO_TO_INSTRUCTION(LOAD_SUPER_ATTR); + STACK_SHRINK(2); + STACK_GROW(((oparg & 1) ? 1 : 0)); } TARGET(LOAD_SUPER_ATTR) { PREDICTED(LOAD_SUPER_ATTR); static_assert(INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR == 1, "incorrect cache size"); - PyObject *self = stack_pointer[-1]; - PyObject *class = stack_pointer[-2]; - PyObject *global_super = stack_pointer[-3]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *self; + PyObject *class; + PyObject *global_super; + PyObject *attr; + PyObject *null = NULL; + self = stack_pointer[-1]; + class = stack_pointer[-2]; + global_super = stack_pointer[-3]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); int load_method = oparg & 1; #if ENABLE_SPECIALIZATION @@ -2105,47 +2220,51 @@ Py_DECREF(class); Py_DECREF(self); if (super == NULL) goto pop_3_error; - res = PyObject_GetAttr(super, name); + attr = PyObject_GetAttr(super, name); Py_DECREF(super); - if (res == NULL) goto pop_3_error; + if (attr == NULL) goto pop_3_error; + null = NULL; STACK_SHRINK(2); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } next_instr += 1; DISPATCH(); } TARGET(LOAD_SUPER_ATTR_ATTR) { - PyObject *self = stack_pointer[-1]; - PyObject *class = stack_pointer[-2]; - PyObject *global_super = stack_pointer[-3]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *self; + PyObject *class; + PyObject *global_super; + PyObject *attr; + self = stack_pointer[-1]; + class = stack_pointer[-2]; + global_super = stack_pointer[-3]; assert(!(oparg & 1)); DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); STAT_INC(LOAD_SUPER_ATTR, hit); PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); - res = _PySuper_Lookup((PyTypeObject *)class, self, name, NULL); + attr = _PySuper_Lookup((PyTypeObject *)class, self, name, NULL); Py_DECREF(global_super); Py_DECREF(class); Py_DECREF(self); - if (res == NULL) goto pop_3_error; + if (attr == NULL) goto pop_3_error; STACK_SHRINK(2); - STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + stack_pointer[-1 - (0 ? 1 : 0)] = attr; next_instr += 1; DISPATCH(); } TARGET(LOAD_SUPER_ATTR_METHOD) { - PyObject *self = stack_pointer[-1]; - PyObject *class = stack_pointer[-2]; - PyObject *global_super = stack_pointer[-3]; - PyObject *res2; - PyObject *res; + PyObject *self; + PyObject *class; + PyObject *global_super; + PyObject *attr; + PyObject *self_or_null; + self = stack_pointer[-1]; + class = stack_pointer[-2]; + global_super = stack_pointer[-3]; assert(oparg & 1); DEOPT_IF(global_super != (PyObject *)&PySuper_Type, LOAD_SUPER_ATTR); DEOPT_IF(!PyType_Check(class), LOAD_SUPER_ATTR); @@ -2153,24 +2272,23 @@ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2); PyTypeObject *cls = (PyTypeObject *)class; int method_found = 0; - res2 = _PySuper_Lookup(cls, self, name, - cls->tp_getattro == PyObject_GenericGetAttr ? &method_found : NULL); + attr = _PySuper_Lookup(cls, self, name, + Py_TYPE(self)->tp_getattro == PyObject_GenericGetAttr ? &method_found : NULL); Py_DECREF(global_super); Py_DECREF(class); - if (res2 == NULL) { + if (attr == NULL) { Py_DECREF(self); if (true) goto pop_3_error; } if (method_found) { - res = self; // transfer ownership + self_or_null = self; // transfer ownership } else { Py_DECREF(self); - res = res2; - res2 = NULL; + self_or_null = NULL; } STACK_SHRINK(1); - stack_pointer[-1] = res; - stack_pointer[-2] = res2; + stack_pointer[-2] = attr; + stack_pointer[-1] = self_or_null; next_instr += 1; DISPATCH(); } @@ -2178,9 +2296,10 @@ TARGET(LOAD_ATTR) { PREDICTED(LOAD_ATTR); static_assert(INLINE_CACHE_ENTRIES_LOAD_ATTR == 9, "incorrect cache size"); - PyObject *owner = stack_pointer[-1]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *owner; + PyObject *attr; + PyObject *self_or_null = NULL; + owner = stack_pointer[-1]; #if ENABLE_SPECIALIZATION _PyAttrCache *cache = (_PyAttrCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -2195,16 +2314,15 @@ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1); if (oparg & 1) { /* Designed to work in tandem with CALL, pushes two values. */ - PyObject* meth = NULL; - if (_PyObject_GetMethod(owner, name, &meth)) { + attr = NULL; + if (_PyObject_GetMethod(owner, name, &attr)) { /* We can bypass temporary bound method object. meth is unbound method and obj is self. meth | self | arg1 | ... | argN */ - assert(meth != NULL); // No errors on this branch - res2 = meth; - res = owner; // Transfer ownership + assert(attr != NULL); // No errors on this branch + self_or_null = owner; // Transfer ownership } else { /* meth is not an unbound method (but a regular attr, or @@ -2215,71 +2333,67 @@ NULL | meth | arg1 | ... | argN */ Py_DECREF(owner); - if (meth == NULL) goto pop_1_error; - res2 = NULL; - res = meth; + if (attr == NULL) goto pop_1_error; + self_or_null = NULL; } } else { /* Classic, pushes one value. */ - res = PyObject_GetAttr(owner, name); + attr = PyObject_GetAttr(owner, name); Py_DECREF(owner); - if (res == NULL) goto pop_1_error; + if (attr == NULL) goto pop_1_error; } STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = self_or_null; } next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_INSTANCE_VALUE) { - PyObject *_tmp_1; - PyObject *_tmp_2 = stack_pointer[-1]; - { - } + PyObject *owner; + PyObject *attr; + PyObject *null = NULL; + // _GUARD_TYPE_VERSION + owner = stack_pointer[-1]; { - PyObject *owner = _tmp_2; uint32_t type_version = read_u32(&next_instr[1].cache); PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); - _tmp_2 = owner; } + // _CHECK_MANAGED_OBJECT_HAS_VALUES { - PyObject *owner = _tmp_2; assert(Py_TYPE(owner)->tp_dictoffset < 0); assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - _tmp_2 = owner; + PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && + !_PyObject_MakeInstanceAttributesFromDict(owner, dorv), + LOAD_ATTR); } + // _LOAD_ATTR_INSTANCE_VALUE { - PyObject *owner = _tmp_2; - PyObject *res2 = NULL; - PyObject *res; uint16_t index = read_u16(&next_instr[3].cache); PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - res = _PyDictOrValues_GetValues(dorv)->values[index]; - DEOPT_IF(res == NULL, LOAD_ATTR); + attr = _PyDictOrValues_GetValues(dorv)->values[index]; + DEOPT_IF(attr == NULL, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - res2 = NULL; + Py_INCREF(attr); + null = NULL; Py_DECREF(owner); - if (oparg & 1) { _tmp_2 = res2; } - _tmp_1 = res; } - next_instr += 9; STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = _tmp_1; - if (oparg & 1) { stack_pointer[-2] = _tmp_2; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } + next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_MODULE) { - PyObject *owner = stack_pointer[-1]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *owner; + PyObject *attr; + PyObject *null = NULL; + owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR); @@ -2289,23 +2403,24 @@ assert(dict->ma_keys->dk_kind == DICT_KEYS_UNICODE); assert(index < dict->ma_keys->dk_nentries); PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + index; - res = ep->me_value; - DEOPT_IF(res == NULL, LOAD_ATTR); + attr = ep->me_value; + DEOPT_IF(attr == NULL, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - res2 = NULL; + Py_INCREF(attr); + null = NULL; Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_WITH_HINT) { - PyObject *owner = stack_pointer[-1]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *owner; + PyObject *attr; + PyObject *null = NULL; + owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); PyTypeObject *tp = Py_TYPE(owner); @@ -2323,78 +2438,82 @@ if (DK_IS_UNICODE(dict->ma_keys)) { PyDictUnicodeEntry *ep = DK_UNICODE_ENTRIES(dict->ma_keys) + hint; DEOPT_IF(ep->me_key != name, LOAD_ATTR); - res = ep->me_value; + attr = ep->me_value; } else { PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint; DEOPT_IF(ep->me_key != name, LOAD_ATTR); - res = ep->me_value; + attr = ep->me_value; } - DEOPT_IF(res == NULL, LOAD_ATTR); + DEOPT_IF(attr == NULL, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - res2 = NULL; + Py_INCREF(attr); + null = NULL; Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_SLOT) { - PyObject *owner = stack_pointer[-1]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *owner; + PyObject *attr; + PyObject *null = NULL; + owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); char *addr = (char *)owner + index; - res = *(PyObject **)addr; - DEOPT_IF(res == NULL, LOAD_ATTR); + attr = *(PyObject **)addr; + DEOPT_IF(attr == NULL, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); - Py_INCREF(res); - res2 = NULL; + Py_INCREF(attr); + null = NULL; Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_CLASS) { - PyObject *cls = stack_pointer[-1]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *owner; + PyObject *attr; + PyObject *null = NULL; + owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); - DEOPT_IF(!PyType_Check(cls), LOAD_ATTR); - DEOPT_IF(((PyTypeObject *)cls)->tp_version_tag != type_version, + DEOPT_IF(!PyType_Check(owner), LOAD_ATTR); + DEOPT_IF(((PyTypeObject *)owner)->tp_version_tag != type_version, LOAD_ATTR); assert(type_version != 0); STAT_INC(LOAD_ATTR, hit); - res2 = NULL; - res = descr; - assert(res != NULL); - Py_INCREF(res); - Py_DECREF(cls); + null = NULL; + attr = descr; + assert(attr != NULL); + Py_INCREF(attr); + Py_DECREF(owner); STACK_GROW(((oparg & 1) ? 1 : 0)); - stack_pointer[-1] = res; - if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; } + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_PROPERTY) { - PyObject *owner = stack_pointer[-1]; + PyObject *owner; + owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t func_version = read_u32(&next_instr[3].cache); PyObject *fget = read_obj(&next_instr[5].cache); + assert((oparg & 1) == 0); DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); @@ -2411,9 +2530,7 @@ Py_INCREF(fget); _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f, 1); // Manipulate stack directly because we exit with DISPATCH_INLINED(). - SET_TOP(NULL); - int shrink_stack = !(oparg & 1); - STACK_SHRINK(shrink_stack); + STACK_SHRINK(1); new_frame->localsplus[0] = owner; SKIP_OVER(INLINE_CACHE_ENTRIES_LOAD_ATTR); frame->return_offset = 0; @@ -2421,10 +2538,12 @@ } TARGET(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN) { - PyObject *owner = stack_pointer[-1]; + PyObject *owner; + owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t func_version = read_u32(&next_instr[3].cache); PyObject *getattribute = read_obj(&next_instr[5].cache); + assert((oparg & 1) == 0); DEOPT_IF(tstate->interp->eval_frame, LOAD_ATTR); PyTypeObject *cls = Py_TYPE(owner); DEOPT_IF(cls->tp_version_tag != type_version, LOAD_ATTR); @@ -2442,9 +2561,7 @@ Py_INCREF(f); _PyInterpreterFrame *new_frame = _PyFrame_PushUnchecked(tstate, f, 2); // Manipulate stack directly because we exit with DISPATCH_INLINED(). - SET_TOP(NULL); - int shrink_stack = !(oparg & 1); - STACK_SHRINK(shrink_stack); + STACK_SHRINK(1); new_frame->localsplus[0] = owner; new_frame->localsplus[1] = Py_NewRef(name); SKIP_OVER(INLINE_CACHE_ENTRIES_LOAD_ATTR); @@ -2453,8 +2570,10 @@ } TARGET(STORE_ATTR_INSTANCE_VALUE) { - PyObject *owner = stack_pointer[-1]; - PyObject *value = stack_pointer[-2]; + PyObject *owner; + PyObject *value; + owner = stack_pointer[-1]; + value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); PyTypeObject *tp = Py_TYPE(owner); @@ -2480,8 +2599,10 @@ } TARGET(STORE_ATTR_WITH_HINT) { - PyObject *owner = stack_pointer[-1]; - PyObject *value = stack_pointer[-2]; + PyObject *owner; + PyObject *value; + owner = stack_pointer[-1]; + value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t hint = read_u16(&next_instr[3].cache); PyTypeObject *tp = Py_TYPE(owner); @@ -2528,8 +2649,10 @@ } TARGET(STORE_ATTR_SLOT) { - PyObject *owner = stack_pointer[-1]; - PyObject *value = stack_pointer[-2]; + PyObject *owner; + PyObject *value; + owner = stack_pointer[-1]; + value = stack_pointer[-2]; uint32_t type_version = read_u32(&next_instr[1].cache); uint16_t index = read_u16(&next_instr[3].cache); PyTypeObject *tp = Py_TYPE(owner); @@ -2549,9 +2672,11 @@ TARGET(COMPARE_OP) { PREDICTED(COMPARE_OP); static_assert(INLINE_CACHE_ENTRIES_COMPARE_OP == 1, "incorrect cache size"); - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; #if ENABLE_SPECIALIZATION _PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -2580,9 +2705,11 @@ } TARGET(COMPARE_OP_FLOAT) { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; DEOPT_IF(!PyFloat_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyFloat_CheckExact(right), COMPARE_OP); STAT_INC(COMPARE_OP, hit); @@ -2601,9 +2728,11 @@ } TARGET(COMPARE_OP_INT) { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; DEOPT_IF(!PyLong_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyLong_CheckExact(right), COMPARE_OP); DEOPT_IF(!_PyLong_IsCompact((PyLongObject *)left), COMPARE_OP); @@ -2626,9 +2755,11 @@ } TARGET(COMPARE_OP_STR) { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; DEOPT_IF(!PyUnicode_CheckExact(left), COMPARE_OP); DEOPT_IF(!PyUnicode_CheckExact(right), COMPARE_OP); STAT_INC(COMPARE_OP, hit); @@ -2648,9 +2779,11 @@ } TARGET(IS_OP) { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *b; + right = stack_pointer[-1]; + left = stack_pointer[-2]; int res = Py_Is(left, right) ^ oparg; Py_DECREF(left); Py_DECREF(right); @@ -2661,9 +2794,11 @@ } TARGET(CONTAINS_OP) { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *b; + right = stack_pointer[-1]; + left = stack_pointer[-2]; int res = PySequence_Contains(right, left); Py_DECREF(left); Py_DECREF(right); @@ -2675,11 +2810,13 @@ } TARGET(CHECK_EG_MATCH) { - PyObject *match_type = stack_pointer[-1]; - PyObject *exc_value = stack_pointer[-2]; + PyObject *match_type; + PyObject *exc_value; PyObject *rest; PyObject *match; - if (check_except_star_type_valid(tstate, match_type) < 0) { + match_type = stack_pointer[-1]; + exc_value = stack_pointer[-2]; + if (_PyEval_CheckExceptStarTypeValid(tstate, match_type) < 0) { Py_DECREF(exc_value); Py_DECREF(match_type); if (true) goto pop_2_error; @@ -2687,8 +2824,8 @@ match = NULL; rest = NULL; - int res = exception_group_match(exc_value, match_type, - &match, &rest); + int res = _PyEval_ExceptionGroupMatch(exc_value, match_type, + &match, &rest); Py_DECREF(exc_value); Py_DECREF(match_type); if (res < 0) goto pop_2_error; @@ -2699,17 +2836,19 @@ if (!Py_IsNone(match)) { PyErr_SetHandledException(match); } - stack_pointer[-1] = match; stack_pointer[-2] = rest; + stack_pointer[-1] = match; DISPATCH(); } TARGET(CHECK_EXC_MATCH) { - PyObject *right = stack_pointer[-1]; - PyObject *left = stack_pointer[-2]; + PyObject *right; + PyObject *left; PyObject *b; + right = stack_pointer[-1]; + left = stack_pointer[-2]; assert(PyExceptionInstance_Check(left)); - if (check_except_type_valid(tstate, right) < 0) { + if (_PyEval_CheckExceptTypeValid(tstate, right) < 0) { Py_DECREF(right); if (true) goto pop_1_error; } @@ -2722,9 +2861,11 @@ } TARGET(IMPORT_NAME) { - PyObject *fromlist = stack_pointer[-1]; - PyObject *level = stack_pointer[-2]; + PyObject *fromlist; + PyObject *level; PyObject *res; + fromlist = stack_pointer[-1]; + level = stack_pointer[-2]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); res = import_name(tstate, frame, name, fromlist, level); Py_DECREF(level); @@ -2736,8 +2877,9 @@ } TARGET(IMPORT_FROM) { - PyObject *from = stack_pointer[-1]; + PyObject *from; PyObject *res; + from = stack_pointer[-1]; PyObject *name = GETITEM(FRAME_CO_NAMES, oparg); res = import_from(tstate, from, name); if (res == NULL) goto error; @@ -2758,7 +2900,14 @@ JUMPBY(1-oparg); #if ENABLE_SPECIALIZATION here[1].cache += (1 << OPTIMIZER_BITS_IN_COUNTER); - if (here[1].cache > tstate->interp->optimizer_backedge_threshold) { + if (here[1].cache > tstate->interp->optimizer_backedge_threshold && + // Double-check that the opcode isn't instrumented or something: + here->op.code == JUMP_BACKWARD && + // _PyOptimizer_BackEdge is going to change frame->prev_instr, + // which breaks line event calculations: + next_instr->op.code != INSTRUMENTED_LINE + ) + { OBJECT_STAT_INC(optimization_attempts); frame = _PyOptimizer_BackEdge(frame, here, next_instr, stack_pointer); if (frame == NULL) { @@ -2791,7 +2940,8 @@ } TARGET(POP_JUMP_IF_FALSE) { - PyObject *cond = stack_pointer[-1]; + PyObject *cond; + cond = stack_pointer[-1]; assert(PyBool_Check(cond)); JUMPBY(oparg * Py_IsFalse(cond)); STACK_SHRINK(1); @@ -2799,7 +2949,8 @@ } TARGET(POP_JUMP_IF_TRUE) { - PyObject *cond = stack_pointer[-1]; + PyObject *cond; + cond = stack_pointer[-1]; assert(PyBool_Check(cond)); JUMPBY(oparg * Py_IsTrue(cond)); STACK_SHRINK(1); @@ -2807,10 +2958,12 @@ } TARGET(POP_JUMP_IF_NONE) { - PyObject *_tmp_1 = stack_pointer[-1]; + PyObject *value; + PyObject *b; + PyObject *cond; + // IS_NONE + value = stack_pointer[-1]; { - PyObject *value = _tmp_1; - PyObject *b; if (Py_IsNone(value)) { b = Py_True; } @@ -2818,10 +2971,10 @@ b = Py_False; Py_DECREF(value); } - _tmp_1 = b; } + // POP_JUMP_IF_TRUE + cond = b; { - PyObject *cond = _tmp_1; assert(PyBool_Check(cond)); JUMPBY(oparg * Py_IsTrue(cond)); } @@ -2830,10 +2983,12 @@ } TARGET(POP_JUMP_IF_NOT_NONE) { - PyObject *_tmp_1 = stack_pointer[-1]; + PyObject *value; + PyObject *b; + PyObject *cond; + // IS_NONE + value = stack_pointer[-1]; { - PyObject *value = _tmp_1; - PyObject *b; if (Py_IsNone(value)) { b = Py_True; } @@ -2841,10 +2996,10 @@ b = Py_False; Py_DECREF(value); } - _tmp_1 = b; } + // POP_JUMP_IF_FALSE + cond = b; { - PyObject *cond = _tmp_1; assert(PyBool_Check(cond)); JUMPBY(oparg * Py_IsFalse(cond)); } @@ -2863,8 +3018,9 @@ } TARGET(GET_LEN) { - PyObject *obj = stack_pointer[-1]; + PyObject *obj; PyObject *len_o; + obj = stack_pointer[-1]; // PUSH(len(TOS)) Py_ssize_t len_i = PyObject_Length(obj); if (len_i < 0) goto error; @@ -2876,14 +3032,17 @@ } TARGET(MATCH_CLASS) { - PyObject *names = stack_pointer[-1]; - PyObject *type = stack_pointer[-2]; - PyObject *subject = stack_pointer[-3]; + PyObject *names; + PyObject *type; + PyObject *subject; PyObject *attrs; + names = stack_pointer[-1]; + type = stack_pointer[-2]; + subject = stack_pointer[-3]; // Pop TOS and TOS1. Set TOS to a tuple of attributes on success, or // None on failure. assert(PyTuple_CheckExact(names)); - attrs = match_class(tstate, subject, type, oparg, names); + attrs = _PyEval_MatchClass(tstate, subject, type, oparg, names); Py_DECREF(subject); Py_DECREF(type); Py_DECREF(names); @@ -2900,8 +3059,9 @@ } TARGET(MATCH_MAPPING) { - PyObject *subject = stack_pointer[-1]; + PyObject *subject; PyObject *res; + subject = stack_pointer[-1]; int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_MAPPING; res = match ? Py_True : Py_False; STACK_GROW(1); @@ -2910,8 +3070,9 @@ } TARGET(MATCH_SEQUENCE) { - PyObject *subject = stack_pointer[-1]; + PyObject *subject; PyObject *res; + subject = stack_pointer[-1]; int match = Py_TYPE(subject)->tp_flags & Py_TPFLAGS_SEQUENCE; res = match ? Py_True : Py_False; STACK_GROW(1); @@ -2920,11 +3081,13 @@ } TARGET(MATCH_KEYS) { - PyObject *keys = stack_pointer[-1]; - PyObject *subject = stack_pointer[-2]; + PyObject *keys; + PyObject *subject; PyObject *values_or_none; + keys = stack_pointer[-1]; + subject = stack_pointer[-2]; // On successful match, PUSH(values). Otherwise, PUSH(None). - values_or_none = match_keys(tstate, subject, keys); + values_or_none = _PyEval_MatchKeys(tstate, subject, keys); if (values_or_none == NULL) goto error; STACK_GROW(1); stack_pointer[-1] = values_or_none; @@ -2932,8 +3095,9 @@ } TARGET(GET_ITER) { - PyObject *iterable = stack_pointer[-1]; + PyObject *iterable; PyObject *iter; + iterable = stack_pointer[-1]; /* before: [obj]; after [getiter(obj)] */ iter = PyObject_GetIter(iterable); Py_DECREF(iterable); @@ -2943,8 +3107,9 @@ } TARGET(GET_YIELD_FROM_ITER) { - PyObject *iterable = stack_pointer[-1]; + PyObject *iterable; PyObject *iter; + iterable = stack_pointer[-1]; /* before: [obj]; after [getiter(obj)] */ if (PyCoro_CheckExact(iterable)) { /* `iterable` is a coroutine */ @@ -2976,8 +3141,9 @@ TARGET(FOR_ITER) { PREDICTED(FOR_ITER); static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size"); - PyObject *iter = stack_pointer[-1]; + PyObject *iter; PyObject *next; + iter = stack_pointer[-1]; #if ENABLE_SPECIALIZATION _PyForIterCache *cache = (_PyForIterCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -3045,15 +3211,15 @@ } TARGET(FOR_ITER_LIST) { - PyObject *_tmp_1; - PyObject *_tmp_2 = stack_pointer[-1]; + PyObject *iter; + PyObject *next; + // _ITER_CHECK_LIST + iter = stack_pointer[-1]; { - PyObject *iter = _tmp_2; DEOPT_IF(Py_TYPE(iter) != &PyListIter_Type, FOR_ITER); - _tmp_2 = iter; } + // _ITER_JUMP_LIST { - PyObject *iter = _tmp_2; _PyListIterObject *it = (_PyListIterObject *)iter; assert(Py_TYPE(iter) == &PyListIter_Type); STAT_INC(FOR_ITER, hit); @@ -3070,37 +3236,32 @@ JUMPBY(oparg + 1); DISPATCH(); } - _tmp_2 = iter; } + // _ITER_NEXT_LIST { - PyObject *iter = _tmp_2; - PyObject *next; _PyListIterObject *it = (_PyListIterObject *)iter; assert(Py_TYPE(iter) == &PyListIter_Type); PyListObject *seq = it->it_seq; assert(seq); assert(it->it_index < PyList_GET_SIZE(seq)); next = Py_NewRef(PyList_GET_ITEM(seq, it->it_index++)); - _tmp_2 = iter; - _tmp_1 = next; } - next_instr += 1; STACK_GROW(1); - stack_pointer[-1] = _tmp_1; - stack_pointer[-2] = _tmp_2; + stack_pointer[-1] = next; + next_instr += 1; DISPATCH(); } TARGET(FOR_ITER_TUPLE) { - PyObject *_tmp_1; - PyObject *_tmp_2 = stack_pointer[-1]; + PyObject *iter; + PyObject *next; + // _ITER_CHECK_TUPLE + iter = stack_pointer[-1]; { - PyObject *iter = _tmp_2; DEOPT_IF(Py_TYPE(iter) != &PyTupleIter_Type, FOR_ITER); - _tmp_2 = iter; } + // _ITER_JUMP_TUPLE { - PyObject *iter = _tmp_2; _PyTupleIterObject *it = (_PyTupleIterObject *)iter; assert(Py_TYPE(iter) == &PyTupleIter_Type); STAT_INC(FOR_ITER, hit); @@ -3117,38 +3278,33 @@ JUMPBY(oparg + 1); DISPATCH(); } - _tmp_2 = iter; } + // _ITER_NEXT_TUPLE { - PyObject *iter = _tmp_2; - PyObject *next; _PyTupleIterObject *it = (_PyTupleIterObject *)iter; assert(Py_TYPE(iter) == &PyTupleIter_Type); PyTupleObject *seq = it->it_seq; assert(seq); assert(it->it_index < PyTuple_GET_SIZE(seq)); next = Py_NewRef(PyTuple_GET_ITEM(seq, it->it_index++)); - _tmp_2 = iter; - _tmp_1 = next; } - next_instr += 1; STACK_GROW(1); - stack_pointer[-1] = _tmp_1; - stack_pointer[-2] = _tmp_2; + stack_pointer[-1] = next; + next_instr += 1; DISPATCH(); } TARGET(FOR_ITER_RANGE) { - PyObject *_tmp_1; - PyObject *_tmp_2 = stack_pointer[-1]; + PyObject *iter; + PyObject *next; + // _ITER_CHECK_RANGE + iter = stack_pointer[-1]; { - PyObject *iter = _tmp_2; _PyRangeIterObject *r = (_PyRangeIterObject *)iter; DEOPT_IF(Py_TYPE(r) != &PyRangeIter_Type, FOR_ITER); - _tmp_2 = iter; } + // _ITER_JUMP_RANGE { - PyObject *iter = _tmp_2; _PyRangeIterObject *r = (_PyRangeIterObject *)iter; assert(Py_TYPE(r) == &PyRangeIter_Type); STAT_INC(FOR_ITER, hit); @@ -3160,11 +3316,9 @@ JUMPBY(oparg + 1); DISPATCH(); } - _tmp_2 = iter; } + // _ITER_NEXT_RANGE { - PyObject *iter = _tmp_2; - PyObject *next; _PyRangeIterObject *r = (_PyRangeIterObject *)iter; assert(Py_TYPE(r) == &PyRangeIter_Type); assert(r->len > 0); @@ -3173,18 +3327,16 @@ r->len--; next = PyLong_FromLong(value); if (next == NULL) goto error; - _tmp_2 = iter; - _tmp_1 = next; } - next_instr += 1; STACK_GROW(1); - stack_pointer[-1] = _tmp_1; - stack_pointer[-2] = _tmp_2; + stack_pointer[-1] = next; + next_instr += 1; DISPATCH(); } TARGET(FOR_ITER_GEN) { - PyObject *iter = stack_pointer[-1]; + PyObject *iter; + iter = stack_pointer[-1]; DEOPT_IF(tstate->interp->eval_frame, FOR_ITER); PyGenObject *gen = (PyGenObject *)iter; DEOPT_IF(Py_TYPE(gen) != &PyGen_Type, FOR_ITER); @@ -3200,12 +3352,14 @@ assert(next_instr[oparg].op.code == END_FOR || next_instr[oparg].op.code == INSTRUMENTED_END_FOR); DISPATCH_INLINED(gen_frame); + STACK_GROW(1); } TARGET(BEFORE_ASYNC_WITH) { - PyObject *mgr = stack_pointer[-1]; + PyObject *mgr; PyObject *exit; PyObject *res; + mgr = stack_pointer[-1]; PyObject *enter = _PyObject_LookupSpecial(mgr, &_Py_ID(__aenter__)); if (enter == NULL) { if (!_PyErr_Occurred(tstate)) { @@ -3236,15 +3390,16 @@ if (true) goto pop_1_error; } STACK_GROW(1); - stack_pointer[-1] = res; stack_pointer[-2] = exit; + stack_pointer[-1] = res; DISPATCH(); } TARGET(BEFORE_WITH) { - PyObject *mgr = stack_pointer[-1]; + PyObject *mgr; PyObject *exit; PyObject *res; + mgr = stack_pointer[-1]; /* pop the context manager, push its __exit__ and the * value returned from calling its __enter__ */ @@ -3278,16 +3433,19 @@ if (true) goto pop_1_error; } STACK_GROW(1); - stack_pointer[-1] = res; stack_pointer[-2] = exit; + stack_pointer[-1] = res; DISPATCH(); } TARGET(WITH_EXCEPT_START) { - PyObject *val = stack_pointer[-1]; - PyObject *lasti = stack_pointer[-3]; - PyObject *exit_func = stack_pointer[-4]; + PyObject *val; + PyObject *lasti; + PyObject *exit_func; PyObject *res; + val = stack_pointer[-1]; + lasti = stack_pointer[-3]; + exit_func = stack_pointer[-4]; /* At the top of the stack are 4 values: - val: TOP = exc_info() - unused: SECOND = previous exception @@ -3301,7 +3459,12 @@ assert(val && PyExceptionInstance_Check(val)); exc = PyExceptionInstance_Class(val); tb = PyException_GetTraceback(val); - Py_XDECREF(tb); + if (tb == NULL) { + tb = Py_None; + } + else { + Py_DECREF(tb); + } assert(PyLong_Check(lasti)); (void)lasti; // Shut up compiler warning if asserts are off PyObject *stack[4] = {NULL, exc, val, tb}; @@ -3314,8 +3477,9 @@ } TARGET(PUSH_EXC_INFO) { - PyObject *new_exc = stack_pointer[-1]; + PyObject *new_exc; PyObject *prev_exc; + new_exc = stack_pointer[-1]; _PyErr_StackItem *exc_info = tstate->exc_info; if (exc_info->exc_value != NULL) { prev_exc = exc_info->exc_value; @@ -3326,129 +3490,138 @@ assert(PyExceptionInstance_Check(new_exc)); exc_info->exc_value = Py_NewRef(new_exc); STACK_GROW(1); - stack_pointer[-1] = new_exc; stack_pointer[-2] = prev_exc; + stack_pointer[-1] = new_exc; DISPATCH(); } TARGET(LOAD_ATTR_METHOD_WITH_VALUES) { - PyObject *self = stack_pointer[-1]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *owner; + PyObject *attr; + PyObject *self; + owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t keys_version = read_u32(&next_instr[3].cache); PyObject *descr = read_obj(&next_instr[5].cache); assert(oparg & 1); /* Cached method object */ - PyTypeObject *self_cls = Py_TYPE(self); + PyTypeObject *owner_cls = Py_TYPE(owner); assert(type_version != 0); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(self); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - PyHeapTypeObject *self_heap_type = (PyHeapTypeObject *)self_cls; - DEOPT_IF(self_heap_type->ht_cached_keys->dk_version != + DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(owner_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && + !_PyObject_MakeInstanceAttributesFromDict(owner, dorv), + LOAD_ATTR); + PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; + DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); - res2 = Py_NewRef(descr); - assert(_PyType_HasFeature(Py_TYPE(res2), Py_TPFLAGS_METHOD_DESCRIPTOR)); - res = self; + attr = Py_NewRef(descr); + assert(_PyType_HasFeature(Py_TYPE(attr), Py_TPFLAGS_METHOD_DESCRIPTOR)); + self = owner; STACK_GROW(1); - stack_pointer[-1] = res; - stack_pointer[-2] = res2; + stack_pointer[-2] = attr; + stack_pointer[-1] = self; next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_METHOD_NO_DICT) { - PyObject *self = stack_pointer[-1]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *owner; + PyObject *attr; + PyObject *self; + owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); assert(oparg & 1); - PyTypeObject *self_cls = Py_TYPE(self); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_dictoffset == 0); + PyTypeObject *owner_cls = Py_TYPE(owner); + DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(owner_cls->tp_dictoffset == 0); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)); - res2 = Py_NewRef(descr); - res = self; + attr = Py_NewRef(descr); + self = owner; STACK_GROW(1); - stack_pointer[-1] = res; - stack_pointer[-2] = res2; + stack_pointer[-2] = attr; + stack_pointer[-1] = self; next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES) { - PyObject *self = stack_pointer[-1]; - PyObject *res; + PyObject *owner; + PyObject *attr; + owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); uint32_t keys_version = read_u32(&next_instr[3].cache); PyObject *descr = read_obj(&next_instr[5].cache); assert((oparg & 1) == 0); - PyTypeObject *self_cls = Py_TYPE(self); + PyTypeObject *owner_cls = Py_TYPE(owner); assert(type_version != 0); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(self); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), LOAD_ATTR); - PyHeapTypeObject *self_heap_type = (PyHeapTypeObject *)self_cls; - DEOPT_IF(self_heap_type->ht_cached_keys->dk_version != + DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(owner_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && + !_PyObject_MakeInstanceAttributesFromDict(owner, dorv), + LOAD_ATTR); + PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; + DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); - Py_DECREF(self); - res = Py_NewRef(descr); - stack_pointer[-1] = res; + Py_DECREF(owner); + attr = Py_NewRef(descr); + stack_pointer[-1 - (0 ? 1 : 0)] = attr; next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_NONDESCRIPTOR_NO_DICT) { - PyObject *self = stack_pointer[-1]; - PyObject *res; + PyObject *owner; + PyObject *attr; + owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); assert((oparg & 1) == 0); - PyTypeObject *self_cls = Py_TYPE(self); + PyTypeObject *owner_cls = Py_TYPE(owner); assert(type_version != 0); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(self_cls->tp_dictoffset == 0); + DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); + assert(owner_cls->tp_dictoffset == 0); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); - Py_DECREF(self); - res = Py_NewRef(descr); - stack_pointer[-1] = res; + Py_DECREF(owner); + attr = Py_NewRef(descr); + stack_pointer[-1 - (0 ? 1 : 0)] = attr; next_instr += 9; DISPATCH(); } TARGET(LOAD_ATTR_METHOD_LAZY_DICT) { - PyObject *self = stack_pointer[-1]; - PyObject *res2 = NULL; - PyObject *res; + PyObject *owner; + PyObject *attr; + PyObject *self; + owner = stack_pointer[-1]; uint32_t type_version = read_u32(&next_instr[1].cache); PyObject *descr = read_obj(&next_instr[5].cache); assert(oparg & 1); - PyTypeObject *self_cls = Py_TYPE(self); - DEOPT_IF(self_cls->tp_version_tag != type_version, LOAD_ATTR); - Py_ssize_t dictoffset = self_cls->tp_dictoffset; + PyTypeObject *owner_cls = Py_TYPE(owner); + DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); + Py_ssize_t dictoffset = owner_cls->tp_dictoffset; assert(dictoffset > 0); - PyObject *dict = *(PyObject **)((char *)self + dictoffset); + PyObject *dict = *(PyObject **)((char *)owner + dictoffset); /* This object has a __dict__, just not yet created */ DEOPT_IF(dict != NULL, LOAD_ATTR); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)); - res2 = Py_NewRef(descr); - res = self; + attr = Py_NewRef(descr); + self = owner; STACK_GROW(1); - stack_pointer[-1] = res; - stack_pointer[-2] = res2; + stack_pointer[-2] = attr; + stack_pointer[-1] = self; next_instr += 9; DISPATCH(); } @@ -3461,9 +3634,9 @@ } TARGET(INSTRUMENTED_CALL) { - int is_meth = PEEK(oparg+2) != NULL; + int is_meth = PEEK(oparg + 1) != NULL; int total_args = oparg + is_meth; - PyObject *function = PEEK(total_args + 1); + PyObject *function = PEEK(oparg + 2); PyObject *arg = total_args == 0 ? &_PyInstrumentation_MISSING : PEEK(total_args); int err = _Py_call_instrumentation_2args( @@ -3478,14 +3651,15 @@ TARGET(CALL) { PREDICTED(CALL); static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; - int is_meth = method != NULL; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -3499,13 +3673,12 @@ STAT_INC(CALL, deferred); DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ - if (!is_meth && Py_TYPE(callable) == &PyMethod_Type) { - is_meth = 1; // For consistenct; it's dead, though + if (self_or_null == NULL && Py_TYPE(callable) == &PyMethod_Type) { args--; total_args++; PyObject *self = ((PyMethodObject *)callable)->im_self; args[0] = Py_NewRef(self); - method = ((PyMethodObject *)callable)->im_func; + PyObject *method = ((PyMethodObject *)callable)->im_func; args[-1] = Py_NewRef(method); Py_DECREF(callable); callable = method; @@ -3541,7 +3714,7 @@ kwnames); if (opcode == INSTRUMENTED_CALL) { PyObject *arg = total_args == 0 ? - &_PyInstrumentation_MISSING : PEEK(total_args); + &_PyInstrumentation_MISSING : args[0]; if (res == NULL) { _Py_call_instrumentation_exc2( tstate, PY_MONITORING_EVENT_C_RAISE, @@ -3572,31 +3745,36 @@ } TARGET(CALL_BOUND_METHOD_EXACT_ARGS) { - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; - DEOPT_IF(method != NULL, CALL); + PyObject *null; + PyObject *callable; + null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; + DEOPT_IF(null != NULL, CALL); DEOPT_IF(Py_TYPE(callable) != &PyMethod_Type, CALL); STAT_INC(CALL, hit); PyObject *self = ((PyMethodObject *)callable)->im_self; - PEEK(oparg + 1) = Py_NewRef(self); // callable + PEEK(oparg + 1) = Py_NewRef(self); // self_or_null PyObject *meth = ((PyMethodObject *)callable)->im_func; - PEEK(oparg + 2) = Py_NewRef(meth); // method + PEEK(oparg + 2) = Py_NewRef(meth); // callable Py_DECREF(callable); GO_TO_INSTRUCTION(CALL_PY_EXACT_ARGS); + STACK_SHRINK(oparg); + STACK_SHRINK(1); } TARGET(CALL_PY_EXACT_ARGS) { PREDICTED(CALL_PY_EXACT_ARGS); - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; uint32_t func_version = read_u32(&next_instr[1].cache); ASSERT_KWNAMES_IS_NULL(); DEOPT_IF(tstate->interp->eval_frame, CALL); - int is_meth = method != NULL; int argcount = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; argcount++; } @@ -3616,19 +3794,22 @@ SKIP_OVER(INLINE_CACHE_ENTRIES_CALL); frame->return_offset = 0; DISPATCH_INLINED(new_frame); + STACK_SHRINK(oparg); + STACK_SHRINK(1); } TARGET(CALL_PY_WITH_DEFAULTS) { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; uint32_t func_version = read_u32(&next_instr[1].cache); ASSERT_KWNAMES_IS_NULL(); DEOPT_IF(tstate->interp->eval_frame, CALL); - int is_meth = method != NULL; int argcount = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; argcount++; } @@ -3658,13 +3839,18 @@ SKIP_OVER(INLINE_CACHE_ENTRIES_CALL); frame->return_offset = 0; DISPATCH_INLINED(new_frame); + STACK_SHRINK(oparg); + STACK_SHRINK(1); } TARGET(CALL_NO_KW_TYPE_1) { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *null = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3682,10 +3868,13 @@ } TARGET(CALL_NO_KW_STR_1) { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *null = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3705,10 +3894,13 @@ } TARGET(CALL_NO_KW_TUPLE_1) { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *null = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); assert(oparg == 1); DEOPT_IF(null != NULL, CALL); @@ -3728,9 +3920,12 @@ } TARGET(CALL_NO_KW_ALLOC_AND_ENTER_INIT) { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *null = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *null; + PyObject *callable; + args = stack_pointer - oparg; + null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; /* This instruction does the following: * 1. Creates the object (by calling ``object.__new__``) * 2. Pushes a shim frame to the frame stack (to cleanup after ``__init__``) @@ -3781,10 +3976,13 @@ * as it will be checked after start_frame */ tstate->py_recursion_remaining--; goto start_frame; + STACK_SHRINK(oparg); + STACK_SHRINK(1); } TARGET(EXIT_INIT_CHECK) { - PyObject *should_be_none = stack_pointer[-1]; + PyObject *should_be_none; + should_be_none = stack_pointer[-1]; assert(STACK_LEVEL() == 2); if (should_be_none != Py_None) { PyErr_Format(PyExc_TypeError, @@ -3797,14 +3995,15 @@ } TARGET(CALL_BUILTIN_CLASS) { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; - int is_meth = method != NULL; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -3831,16 +4030,17 @@ } TARGET(CALL_NO_KW_BUILTIN_O) { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; /* Builtin METH_O functions */ ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -3871,16 +4071,17 @@ } TARGET(CALL_NO_KW_BUILTIN_FAST) { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; /* Builtin METH_FASTCALL functions, without keywords */ ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -3915,15 +4116,16 @@ } TARGET(CALL_BUILTIN_FAST_WITH_KEYWORDS) { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; /* Builtin METH_FASTCALL | METH_KEYWORDS functions */ - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } @@ -3959,21 +4161,22 @@ } TARGET(CALL_NO_KW_LEN) { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); /* len(o) */ - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } DEOPT_IF(total_args != 1, CALL); - PyInterpreterState *interp = _PyInterpreterState_GET(); + PyInterpreterState *interp = tstate->interp; DEOPT_IF(callable != interp->callable_cache.len, CALL); STAT_INC(CALL, hit); PyObject *arg = args[0]; @@ -3995,21 +4198,22 @@ } TARGET(CALL_NO_KW_ISINSTANCE) { - PyObject **args = (stack_pointer - oparg); - PyObject *callable = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); /* isinstance(o, o2) */ - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { - callable = method; + if (self_or_null != NULL) { args--; total_args++; } DEOPT_IF(total_args != 2, CALL); - PyInterpreterState *interp = _PyInterpreterState_GET(); + PyInterpreterState *interp = tstate->interp; DEOPT_IF(callable != interp->callable_cache.isinstance, CALL); STAT_INC(CALL, hit); PyObject *cls = args[1]; @@ -4033,48 +4237,55 @@ } TARGET(CALL_NO_KW_LIST_APPEND) { - PyObject **args = (stack_pointer - oparg); - PyObject *self = stack_pointer[-(1 + oparg)]; - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self; + PyObject *callable; + args = stack_pointer - oparg; + self = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); assert(oparg == 1); - assert(method != NULL); - PyInterpreterState *interp = _PyInterpreterState_GET(); - DEOPT_IF(method != interp->callable_cache.list_append, CALL); + assert(self != NULL); + PyInterpreterState *interp = tstate->interp; + DEOPT_IF(callable != interp->callable_cache.list_append, CALL); DEOPT_IF(!PyList_Check(self), CALL); STAT_INC(CALL, hit); if (_PyList_AppendTakeRef((PyListObject *)self, args[0]) < 0) { goto pop_1_error; // Since arg is DECREF'ed already } Py_DECREF(self); - Py_DECREF(method); + Py_DECREF(callable); STACK_SHRINK(3); // CALL + POP_TOP SKIP_OVER(INLINE_CACHE_ENTRIES_CALL + 1); assert(next_instr[-1].op.code == POP_TOP); DISPATCH(); + STACK_SHRINK(oparg); + STACK_SHRINK(1); } TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_O) { - PyObject **args = (stack_pointer - oparg); - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { + if (self_or_null != NULL) { args--; total_args++; } - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); + PyMethodDescrObject *method = (PyMethodDescrObject *)callable; DEOPT_IF(total_args != 2, CALL); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; + DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = method->d_method; DEOPT_IF(meth->ml_flags != METH_O, CALL); PyObject *arg = args[1]; PyObject *self = args[0]; - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); STAT_INC(CALL, hit); PyCFunction cfunc = meth->ml_meth; // This is slower but CPython promises to check all non-vectorcall @@ -4098,21 +4309,23 @@ } TARGET(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS) { - PyObject **args = (stack_pointer - oparg); - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; - int is_meth = method != NULL; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; int total_args = oparg; - if (is_meth) { + if (self_or_null != NULL) { args--; total_args++; } - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; + PyMethodDescrObject *method = (PyMethodDescrObject *)callable; + DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = method->d_method; DEOPT_IF(meth->ml_flags != (METH_FASTCALL|METH_KEYWORDS), CALL); - PyTypeObject *d_type = callable->d_common.d_type; + PyTypeObject *d_type = method->d_common.d_type; PyObject *self = args[0]; DEOPT_IF(!Py_IS_TYPE(self, d_type), CALL); STAT_INC(CALL, hit); @@ -4138,23 +4351,26 @@ } TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_NOARGS) { - PyObject **args = (stack_pointer - oparg); - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); assert(oparg == 0 || oparg == 1); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { + if (self_or_null != NULL) { args--; total_args++; } DEOPT_IF(total_args != 1, CALL); - PyMethodDescrObject *callable = (PyMethodDescrObject *)SECOND(); - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; + PyMethodDescrObject *method = (PyMethodDescrObject *)callable; + DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = method->d_method; PyObject *self = args[0]; - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); DEOPT_IF(meth->ml_flags != METH_NOARGS, CALL); STAT_INC(CALL, hit); PyCFunction cfunc = meth->ml_meth; @@ -4178,24 +4394,26 @@ } TARGET(CALL_NO_KW_METHOD_DESCRIPTOR_FAST) { - PyObject **args = (stack_pointer - oparg); - PyObject *method = stack_pointer[-(2 + oparg)]; + PyObject **args; + PyObject *self_or_null; + PyObject *callable; PyObject *res; + args = stack_pointer - oparg; + self_or_null = stack_pointer[-1 - oparg]; + callable = stack_pointer[-2 - oparg]; ASSERT_KWNAMES_IS_NULL(); - int is_meth = method != NULL; int total_args = oparg; - if (is_meth) { + if (self_or_null != NULL) { args--; total_args++; } - PyMethodDescrObject *callable = - (PyMethodDescrObject *)PEEK(total_args + 1); + PyMethodDescrObject *method = (PyMethodDescrObject *)callable; /* Builtin METH_FASTCALL methods, without keywords */ - DEOPT_IF(!Py_IS_TYPE(callable, &PyMethodDescr_Type), CALL); - PyMethodDef *meth = callable->d_method; + DEOPT_IF(!Py_IS_TYPE(method, &PyMethodDescr_Type), CALL); + PyMethodDef *meth = method->d_method; DEOPT_IF(meth->ml_flags != METH_FASTCALL, CALL); PyObject *self = args[0]; - DEOPT_IF(!Py_IS_TYPE(self, callable->d_common.d_type), CALL); + DEOPT_IF(!Py_IS_TYPE(self, method->d_common.d_type), CALL); STAT_INC(CALL, hit); _PyCFunctionFast cfunc = (_PyCFunctionFast)(void(*)(void))meth->ml_meth; @@ -4222,10 +4440,13 @@ TARGET(CALL_FUNCTION_EX) { PREDICTED(CALL_FUNCTION_EX); - PyObject *kwargs = (oparg & 1) ? stack_pointer[-(((oparg & 1) ? 1 : 0))] : NULL; - PyObject *callargs = stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))]; - PyObject *func = stack_pointer[-(2 + ((oparg & 1) ? 1 : 0))]; + PyObject *kwargs = NULL; + PyObject *callargs; + PyObject *func; PyObject *result; + if (oparg & 1) { kwargs = stack_pointer[-(oparg & 1 ? 1 : 0)]; } + callargs = stack_pointer[-1 - (oparg & 1 ? 1 : 0)]; + func = stack_pointer[-3 - (oparg & 1 ? 1 : 0)]; // DICT_MERGE is called before this opcode if there are kwargs. // It converts all dict subtypes in kwargs into regular dicts. assert(kwargs == NULL || PyDict_CheckExact(kwargs)); @@ -4290,7 +4511,7 @@ Py_DECREF(func); Py_DECREF(callargs); Py_XDECREF(kwargs); - assert(PEEK(3 + (oparg & 1)) == NULL); + assert(PEEK(2 + (oparg & 1)) == NULL); if (result == NULL) { STACK_SHRINK(((oparg & 1) ? 1 : 0)); goto pop_3_error; } STACK_SHRINK(((oparg & 1) ? 1 : 0)); STACK_SHRINK(2); @@ -4300,8 +4521,9 @@ } TARGET(MAKE_FUNCTION) { - PyObject *codeobj = stack_pointer[-1]; + PyObject *codeobj; PyObject *func; + codeobj = stack_pointer[-1]; PyFunctionObject *func_obj = (PyFunctionObject *) PyFunction_New(codeobj, GLOBALS()); @@ -4318,8 +4540,10 @@ } TARGET(SET_FUNCTION_ATTRIBUTE) { - PyObject *func = stack_pointer[-1]; - PyObject *attr = stack_pointer[-2]; + PyObject *func; + PyObject *attr; + func = stack_pointer[-1]; + attr = stack_pointer[-2]; assert(PyFunction_Check(func)); PyFunctionObject *func_obj = (PyFunctionObject *)func; switch(oparg) { @@ -4373,10 +4597,13 @@ } TARGET(BUILD_SLICE) { - PyObject *step = (oparg == 3) ? stack_pointer[-(((oparg == 3) ? 1 : 0))] : NULL; - PyObject *stop = stack_pointer[-(1 + ((oparg == 3) ? 1 : 0))]; - PyObject *start = stack_pointer[-(2 + ((oparg == 3) ? 1 : 0))]; + PyObject *step = NULL; + PyObject *stop; + PyObject *start; PyObject *slice; + if (oparg == 3) { step = stack_pointer[-(oparg == 3 ? 1 : 0)]; } + stop = stack_pointer[-1 - (oparg == 3 ? 1 : 0)]; + start = stack_pointer[-2 - (oparg == 3 ? 1 : 0)]; slice = PySlice_New(start, stop, step); Py_DECREF(start); Py_DECREF(stop); @@ -4389,8 +4616,9 @@ } TARGET(CONVERT_VALUE) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *result; + value = stack_pointer[-1]; convertion_func_ptr conv_fn; assert(oparg >= FVC_STR && oparg <= FVC_ASCII); conv_fn = CONVERSION_FUNCTIONS[oparg]; @@ -4402,8 +4630,9 @@ } TARGET(FORMAT_SIMPLE) { - PyObject *value = stack_pointer[-1]; + PyObject *value; PyObject *res; + value = stack_pointer[-1]; /* If value is a unicode object, then we know the result * of format(value) is value itself. */ if (!PyUnicode_CheckExact(value)) { @@ -4419,9 +4648,11 @@ } TARGET(FORMAT_WITH_SPEC) { - PyObject *fmt_spec = stack_pointer[-1]; - PyObject *value = stack_pointer[-2]; + PyObject *fmt_spec; + PyObject *value; PyObject *res; + fmt_spec = stack_pointer[-1]; + value = stack_pointer[-2]; res = PyObject_Format(value, fmt_spec); Py_DECREF(value); Py_DECREF(fmt_spec); @@ -4432,8 +4663,9 @@ } TARGET(COPY) { - PyObject *bottom = stack_pointer[-(1 + (oparg-1))]; + PyObject *bottom; PyObject *top; + bottom = stack_pointer[-1 - (oparg-1)]; assert(oparg > 0); top = Py_NewRef(bottom); STACK_GROW(1); @@ -4444,9 +4676,11 @@ TARGET(BINARY_OP) { PREDICTED(BINARY_OP); static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); - PyObject *rhs = stack_pointer[-1]; - PyObject *lhs = stack_pointer[-2]; + PyObject *rhs; + PyObject *lhs; PyObject *res; + rhs = stack_pointer[-1]; + lhs = stack_pointer[-2]; #if ENABLE_SPECIALIZATION _PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr; if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) { @@ -4457,10 +4691,10 @@ STAT_INC(BINARY_OP, deferred); DECREMENT_ADAPTIVE_COUNTER(cache->counter); #endif /* ENABLE_SPECIALIZATION */ - assert(0 <= oparg); - assert((unsigned)oparg < Py_ARRAY_LENGTH(binary_ops)); - assert(binary_ops[oparg]); - res = binary_ops[oparg](lhs, rhs); + assert(NB_ADD <= oparg); + assert(oparg <= NB_INPLACE_XOR); + assert(_PyEval_BinaryOps[oparg]); + res = _PyEval_BinaryOps[oparg](lhs, rhs); Py_DECREF(lhs); Py_DECREF(rhs); if (res == NULL) goto pop_2_error; @@ -4471,11 +4705,13 @@ } TARGET(SWAP) { - PyObject *top = stack_pointer[-1]; - PyObject *bottom = stack_pointer[-(2 + (oparg-2))]; + PyObject *top; + PyObject *bottom; + top = stack_pointer[-1]; + bottom = stack_pointer[-2 - (oparg-2)]; assert(oparg >= 2); + stack_pointer[-2 - (oparg-2)] = top; stack_pointer[-1] = bottom; - stack_pointer[-(2 + (oparg-2))] = top; DISPATCH(); } diff --git a/Python/getargs.c b/Python/getargs.c index 45befab4f8bc37..916e46578a454b 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -2,8 +2,9 @@ /* New getargs implementation */ #include "Python.h" -#include "pycore_tuple.h" // _PyTuple_ITEMS() +#include "pycore_dict.h" // _PyDict_HasOnlyStringKeys() #include "pycore_pylifecycle.h" // _PyArg_Fini +#include "pycore_tuple.h" // _PyTuple_ITEMS() #include #include @@ -1417,20 +1418,6 @@ _PyArg_ParseStackAndKeywords_SizeT(PyObject *const *args, Py_ssize_t nargs, PyOb } -PyAPI_FUNC(int) -_PyArg_VaParseTupleAndKeywordsFast(PyObject *args, PyObject *keywords, - struct _PyArg_Parser *parser, va_list va) -{ - int retval; - va_list lva; - - va_copy(lva, va); - - retval = vgetargskeywordsfast(args, keywords, parser, &lva, 0); - va_end(lva); - return retval; -} - static void error_unexpected_keyword_arg(PyObject *kwargs, PyObject *kwnames, PyObject *kwtuple, const char *fname) { diff --git a/Python/hamt.c b/Python/hamt.c index c78b5a7fab94f0..24265edc2c3fd4 100644 --- a/Python/hamt.c +++ b/Python/hamt.c @@ -514,7 +514,7 @@ hamt_node_bitmap_new(Py_ssize_t size) /* Since bitmap nodes are immutable, we can cache the instance for size=0 and reuse it whenever we need an empty bitmap node. */ - return (PyHamtNode *)Py_NewRef(&_Py_SINGLETON(hamt_bitmap_node_empty)); + return (PyHamtNode *)&_Py_SINGLETON(hamt_bitmap_node_empty); } assert(size >= 0); diff --git a/Python/hashtable.c b/Python/hashtable.c index 09501de199b0e6..4e22a1a5509eb5 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -46,6 +46,7 @@ #include "Python.h" #include "pycore_hashtable.h" +#include "pycore_pyhash.h" // _Py_HashPointerRaw() #define HASHTABLE_MIN_SIZE 16 #define HASHTABLE_HIGH 0.50 @@ -128,6 +129,13 @@ _Py_hashtable_size(const _Py_hashtable_t *ht) } +size_t +_Py_hashtable_len(const _Py_hashtable_t *ht) +{ + return ht->nentries; +} + + _Py_hashtable_entry_t * _Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key) { diff --git a/Python/import.c b/Python/import.c index cf993cbd62a2ef..56b2dc1a4ada2c 100644 --- a/Python/import.c +++ b/Python/import.c @@ -2,10 +2,12 @@ #include "Python.h" +#include "pycore_hashtable.h" // _Py_hashtable_new_full() #include "pycore_import.h" // _PyImport_BootstrapImp() #include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_interp.h" // struct _import_runtime_state #include "pycore_namespace.h" // _PyNamespace_Type +#include "pycore_object.h" // _Py_SetImmortal() #include "pycore_pyerrors.h" // _PyErr_SetString() #include "pycore_pyhash.h" // _Py_KeyedHash() #include "pycore_pylifecycle.h" @@ -210,17 +212,6 @@ PyImport_GetModuleDict(void) return MODULES(interp); } -// This is only kept around for extensions that use _Py_IDENTIFIER. -PyObject * -_PyImport_GetModuleId(_Py_Identifier *nameid) -{ - PyObject *name = _PyUnicode_FromId(nameid); /* borrowed */ - if (name == NULL) { - return NULL; - } - return PyImport_GetModule(name); -} - int _PyImport_SetModule(PyObject *name, PyObject *m) { @@ -610,7 +601,7 @@ _PyImport_ClearModulesByIndex(PyInterpreterState *interp) when an extension is loaded. This includes when it is imported for the first time. - Here's a summary, using importlib._boostrap._load() as a starting point. + Here's a summary, using importlib._bootstrap._load() as a starting point. 1. importlib._bootstrap._load() 2. _load(): acquire import lock @@ -916,35 +907,79 @@ extensions_lock_release(void) dictionary, to avoid loading shared libraries twice. */ +static void * +hashtable_key_from_2_strings(PyObject *str1, PyObject *str2, const char sep) +{ + Py_ssize_t str1_len, str2_len; + const char *str1_data = PyUnicode_AsUTF8AndSize(str1, &str1_len); + const char *str2_data = PyUnicode_AsUTF8AndSize(str2, &str2_len); + if (str1_data == NULL || str2_data == NULL) { + return NULL; + } + /* Make sure sep and the NULL byte won't cause an overflow. */ + assert(SIZE_MAX - str1_len - str2_len > 2); + size_t size = str1_len + 1 + str2_len + 1; + + char *key = PyMem_RawMalloc(size); + if (key == NULL) { + PyErr_NoMemory(); + return NULL; + } + + strncpy(key, str1_data, str1_len); + key[str1_len] = sep; + strncpy(key + str1_len + 1, str2_data, str2_len + 1); + assert(strlen(key) == size - 1); + return key; +} + +static Py_uhash_t +hashtable_hash_str(const void *key) +{ + return _Py_HashBytes(key, strlen((const char *)key)); +} + +static int +hashtable_compare_str(const void *key1, const void *key2) +{ + return strcmp((const char *)key1, (const char *)key2) == 0; +} + static void -_extensions_cache_init(void) +hashtable_destroy_str(void *ptr) { - /* The runtime (i.e. main interpreter) must be initializing, - so we don't need to worry about the lock. */ - _PyThreadState_InitDetached(&EXTENSIONS.main_tstate, - _PyInterpreterState_Main()); + PyMem_RawFree(ptr); } +#define HTSEP ':' + static PyModuleDef * _extensions_cache_get(PyObject *filename, PyObject *name) { PyModuleDef *def = NULL; + void *key = NULL; extensions_lock_acquire(); - PyObject *key = PyTuple_Pack(2, filename, name); - if (key == NULL) { + if (EXTENSIONS.hashtable == NULL) { goto finally; } - PyObject *extensions = EXTENSIONS.dict; - if (extensions == NULL) { + key = hashtable_key_from_2_strings(filename, name, HTSEP); + if (key == NULL) { goto finally; } - def = (PyModuleDef *)PyDict_GetItemWithError(extensions, key); + _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry( + EXTENSIONS.hashtable, key); + if (entry == NULL) { + goto finally; + } + def = (PyModuleDef *)entry->value; finally: - Py_XDECREF(key); extensions_lock_release(); + if (key != NULL) { + PyMem_RawFree(key); + } return def; } @@ -952,124 +987,100 @@ static int _extensions_cache_set(PyObject *filename, PyObject *name, PyModuleDef *def) { int res = -1; - PyThreadState *oldts = NULL; extensions_lock_acquire(); - /* Swap to the main interpreter, if necessary. This matters if - the dict hasn't been created yet or if the item isn't in the - dict yet. In both cases we must ensure the relevant objects - are created using the main interpreter. */ - PyThreadState *main_tstate = &EXTENSIONS.main_tstate; - PyInterpreterState *interp = _PyInterpreterState_GET(); - if (!_Py_IsMainInterpreter(interp)) { - _PyThreadState_BindDetached(main_tstate); - oldts = _PyThreadState_Swap(interp->runtime, main_tstate); - assert(!_Py_IsMainInterpreter(oldts->interp)); - - /* Make sure the name and filename objects are owned - by the main interpreter. */ - name = PyUnicode_InternFromString(PyUnicode_AsUTF8(name)); - assert(name != NULL); - filename = PyUnicode_InternFromString(PyUnicode_AsUTF8(filename)); - assert(filename != NULL); + if (EXTENSIONS.hashtable == NULL) { + _Py_hashtable_allocator_t alloc = {PyMem_RawMalloc, PyMem_RawFree}; + EXTENSIONS.hashtable = _Py_hashtable_new_full( + hashtable_hash_str, + hashtable_compare_str, + hashtable_destroy_str, // key + /* There's no need to decref the def since it's immortal. */ + NULL, // value + &alloc + ); + if (EXTENSIONS.hashtable == NULL) { + PyErr_NoMemory(); + goto finally; + } } - PyObject *key = PyTuple_Pack(2, filename, name); + void *key = hashtable_key_from_2_strings(filename, name, HTSEP); if (key == NULL) { goto finally; } - PyObject *extensions = EXTENSIONS.dict; - if (extensions == NULL) { - extensions = PyDict_New(); - if (extensions == NULL) { + int already_set = 0; + _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry( + EXTENSIONS.hashtable, key); + if (entry == NULL) { + if (_Py_hashtable_set(EXTENSIONS.hashtable, key, def) < 0) { + PyMem_RawFree(key); + PyErr_NoMemory(); goto finally; } - EXTENSIONS.dict = extensions; - } - - PyModuleDef *actual = (PyModuleDef *)PyDict_GetItemWithError(extensions, key); - if (PyErr_Occurred()) { - goto finally; } - else if (actual != NULL) { - /* We expect it to be static, so it must be the same pointer. */ - assert(def == actual); - res = 0; - goto finally; + else { + if (entry->value == NULL) { + entry->value = def; + } + else { + /* We expect it to be static, so it must be the same pointer. */ + assert((PyModuleDef *)entry->value == def); + already_set = 1; + } + PyMem_RawFree(key); } - - /* This might trigger a resize, which is why we must switch - to the main interpreter. */ - res = PyDict_SetItem(extensions, key, (PyObject *)def); - if (res < 0) { - res = -1; - goto finally; + if (!already_set) { + /* We assume that all module defs are statically allocated + and will never be freed. Otherwise, we would incref here. */ + _Py_SetImmortal(def); } res = 0; finally: - Py_XDECREF(key); - if (oldts != NULL) { - _PyThreadState_Swap(interp->runtime, oldts); - _PyThreadState_UnbindDetached(main_tstate); - Py_DECREF(name); - Py_DECREF(filename); - } extensions_lock_release(); return res; } -static int +static void _extensions_cache_delete(PyObject *filename, PyObject *name) { - int res = -1; - PyThreadState *oldts = NULL; + void *key = NULL; extensions_lock_acquire(); - PyObject *key = PyTuple_Pack(2, filename, name); - if (key == NULL) { + if (EXTENSIONS.hashtable == NULL) { + /* It was never added. */ goto finally; } - PyObject *extensions = EXTENSIONS.dict; - if (extensions == NULL) { - res = 0; + key = hashtable_key_from_2_strings(filename, name, HTSEP); + if (key == NULL) { goto finally; } - PyModuleDef *actual = (PyModuleDef *)PyDict_GetItemWithError(extensions, key); - if (PyErr_Occurred()) { + _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry( + EXTENSIONS.hashtable, key); + if (entry == NULL) { + /* It was never added. */ goto finally; } - else if (actual == NULL) { - /* It was already removed or never added. */ - res = 0; + if (entry->value == NULL) { + /* It was already removed. */ goto finally; } - - /* Swap to the main interpreter, if necessary. */ - PyThreadState *main_tstate = &EXTENSIONS.main_tstate; - PyInterpreterState *interp = _PyInterpreterState_GET(); - if (!_Py_IsMainInterpreter(interp)) { - _PyThreadState_BindDetached(main_tstate); - oldts = _PyThreadState_Swap(interp->runtime, main_tstate); - assert(!_Py_IsMainInterpreter(oldts->interp)); - } - - if (PyDict_DelItem(extensions, key) < 0) { - goto finally; - } - res = 0; + /* If we hadn't made the stored defs immortal, we would decref here. + However, this decref would be problematic if the module def were + dynamically allocated, it were the last ref, and this function + were called with an interpreter other than the def's owner. */ + assert(_Py_IsImmortal(entry->value)); + entry->value = NULL; finally: - if (oldts != NULL) { - _PyThreadState_Swap(interp->runtime, oldts); - _PyThreadState_UnbindDetached(main_tstate); - } - Py_XDECREF(key); extensions_lock_release(); - return res; + if (key != NULL) { + PyMem_RawFree(key); + } } static void @@ -1077,11 +1088,12 @@ _extensions_cache_clear_all(void) { /* The runtime (i.e. main interpreter) must be finalizing, so we don't need to worry about the lock. */ - // XXX assert(_Py_IsMainInterpreter(_PyInterpreterState_GET())); - Py_CLEAR(EXTENSIONS.dict); - _PyThreadState_ClearDetached(&EXTENSIONS.main_tstate); + _Py_hashtable_destroy(EXTENSIONS.hashtable); + EXTENSIONS.hashtable = NULL; } +#undef HTSEP + static bool check_multi_interp_extensions(PyInterpreterState *interp) @@ -1226,6 +1238,15 @@ import_find_extension(PyThreadState *tstate, PyObject *name, return NULL; } + /* It may have been successfully imported previously + in an interpreter that allows legacy modules + but is not allowed in the current interpreter. */ + const char *name_buf = PyUnicode_AsUTF8(name); + assert(name_buf != NULL); + if (_PyImport_CheckSubinterpIncompatibleExtensionAllowed(name_buf) < 0) { + return NULL; + } + PyObject *mod, *mdict; PyObject *modules = MODULES(tstate->interp); @@ -1233,6 +1254,8 @@ import_find_extension(PyThreadState *tstate, PyObject *name, PyObject *m_copy = def->m_base.m_copy; /* Module does not support repeated initialization */ if (m_copy == NULL) { + /* It might be a core module (e.g. sys & builtins), + for which we don't set m_copy. */ m_copy = get_core_module_dict(tstate->interp, name, filename); if (m_copy == NULL) { return NULL; @@ -1302,9 +1325,7 @@ clear_singlephase_extension(PyInterpreterState *interp, } /* Clear the cached module def. */ - if (_extensions_cache_delete(filename, name) < 0) { - return -1; - } + _extensions_cache_delete(filename, name); return 0; } @@ -3053,6 +3074,8 @@ void _PyImport_Fini(void) { /* Destroy the database used by _PyImport_{Fixup,Find}Extension */ + // XXX Should we actually leave them (mostly) intact, since we don't + // ever dlclose() the module files? _extensions_cache_clear_all(); /* Use the same memory allocator as _PyImport_Init(). */ @@ -3090,10 +3113,6 @@ _PyImport_Fini2(void) PyStatus _PyImport_InitCore(PyThreadState *tstate, PyObject *sysmod, int importlib) { - if (_Py_IsMainInterpreter(tstate->interp)) { - _extensions_cache_init(); - } - // XXX Initialize here: interp->modules and interp->import_func. // XXX Initialize here: sys.modules and sys.meta_path. @@ -3715,16 +3734,8 @@ _imp_create_dynamic_impl(PyObject *module, PyObject *spec, PyObject *file) PyThreadState *tstate = _PyThreadState_GET(); mod = import_find_extension(tstate, name, path); - if (mod != NULL) { - const char *name_buf = PyUnicode_AsUTF8(name); - assert(name_buf != NULL); - if (_PyImport_CheckSubinterpIncompatibleExtensionAllowed(name_buf) < 0) { - Py_DECREF(mod); - mod = NULL; - } - goto finally; - } - else if (PyErr_Occurred()) { + if (mod != NULL || _PyErr_Occurred(tstate)) { + assert(mod == NULL || !_PyErr_Occurred(tstate)); goto finally; } diff --git a/Python/instrumentation.c b/Python/instrumentation.c index e29748f0ad9872..6d11649c07fbe4 100644 --- a/Python/instrumentation.c +++ b/Python/instrumentation.c @@ -137,7 +137,7 @@ is_instrumented(int opcode) static inline bool monitors_equals(_Py_Monitors a, _Py_Monitors b) { - for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + for (int i = 0; i < _PY_MONITORING_UNGROUPED_EVENTS; i++) { if (a.tools[i] != b.tools[i]) { return false; } @@ -150,7 +150,7 @@ static inline _Py_Monitors monitors_sub(_Py_Monitors a, _Py_Monitors b) { _Py_Monitors res; - for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + for (int i = 0; i < _PY_MONITORING_UNGROUPED_EVENTS; i++) { res.tools[i] = a.tools[i] & ~b.tools[i]; } return res; @@ -161,7 +161,7 @@ static inline _Py_Monitors monitors_and(_Py_Monitors a, _Py_Monitors b) { _Py_Monitors res; - for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + for (int i = 0; i < _PY_MONITORING_UNGROUPED_EVENTS; i++) { res.tools[i] = a.tools[i] & b.tools[i]; } return res; @@ -172,7 +172,7 @@ static inline _Py_Monitors monitors_or(_Py_Monitors a, _Py_Monitors b) { _Py_Monitors res; - for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + for (int i = 0; i < _PY_MONITORING_UNGROUPED_EVENTS; i++) { res.tools[i] = a.tools[i] | b.tools[i]; } return res; @@ -181,7 +181,7 @@ monitors_or(_Py_Monitors a, _Py_Monitors b) static inline bool monitors_are_empty(_Py_Monitors m) { - for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + for (int i = 0; i < _PY_MONITORING_UNGROUPED_EVENTS; i++) { if (m.tools[i]) { return false; } @@ -192,7 +192,7 @@ monitors_are_empty(_Py_Monitors m) static inline bool multiple_tools(_Py_Monitors *m) { - for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + for (int i = 0; i < _PY_MONITORING_UNGROUPED_EVENTS; i++) { if (_Py_popcount32(m->tools[i]) > 1) { return true; } @@ -204,7 +204,7 @@ static inline _PyMonitoringEventSet get_events(_Py_Monitors *m, int tool_id) { _PyMonitoringEventSet result = 0; - for (int e = 0; e < PY_MONITORING_UNGROUPED_EVENTS; e++) { + for (int e = 0; e < _PY_MONITORING_UNGROUPED_EVENTS; e++) { if ((m->tools[e] >> tool_id) & 1) { result |= (1 << e); } @@ -276,6 +276,13 @@ _PyInstruction_GetLength(PyCodeObject *code, int offset) } assert(opcode != 0); assert(!is_instrumented(opcode)); + if (opcode == ENTER_EXECUTOR) { + int exec_index = _PyCode_CODE(code)[offset].op.arg; + _PyExecutorObject *exec = code->co_executors->executors[exec_index]; + opcode = exec->vm_data.opcode; + + } + assert(opcode != ENTER_EXECUTOR); assert(opcode == _PyOpcode_Deopt[opcode]); return 1 + _PyOpcode_Caches[opcode]; } @@ -339,7 +346,7 @@ static void dump_monitors(const char *prefix, _Py_Monitors monitors, FILE*out) { fprintf(out, "%s monitors:\n", prefix); - for (int event = 0; event < PY_MONITORING_UNGROUPED_EVENTS; event++) { + for (int event = 0; event < _PY_MONITORING_UNGROUPED_EVENTS; event++) { fprintf(out, " Event %d: Tools %x\n", event, monitors.tools[event]); } } @@ -695,29 +702,13 @@ instrument_per_instruction(PyCodeObject *code, int i) *opcode_ptr = INSTRUMENTED_INSTRUCTION; } -#ifndef NDEBUG -static bool -instruction_has_event(PyCodeObject *code, int offset) -{ - _Py_CODEUNIT instr = _PyCode_CODE(code)[offset]; - int opcode = instr.op.code; - if (opcode == INSTRUMENTED_LINE) { - opcode = code->_co_monitoring->lines[offset].original_opcode; - } - if (opcode == INSTRUMENTED_INSTRUCTION) { - opcode = code->_co_monitoring->per_instruction_opcodes[offset]; - } - return opcode_has_event(opcode); -} -#endif - static void remove_tools(PyCodeObject * code, int offset, int event, int tools) { assert(event != PY_MONITORING_EVENT_LINE); assert(event != PY_MONITORING_EVENT_INSTRUCTION); - assert(event < PY_MONITORING_INSTRUMENTED_EVENTS); - assert(instruction_has_event(code, offset)); + assert(PY_MONITORING_IS_INSTRUMENTED_EVENT(event)); + assert(opcode_has_event(_Py_GetBaseOpcode(code, offset))); _PyCoMonitoringData *monitoring = code->_co_monitoring; if (monitoring && monitoring->tools) { monitoring->tools[offset] &= ~tools; @@ -772,7 +763,7 @@ add_tools(PyCodeObject * code, int offset, int event, int tools) { assert(event != PY_MONITORING_EVENT_LINE); assert(event != PY_MONITORING_EVENT_INSTRUCTION); - assert(event < PY_MONITORING_INSTRUMENTED_EVENTS); + assert(PY_MONITORING_IS_INSTRUMENTED_EVENT(event)); assert(code->_co_monitoring); if (code->_co_monitoring && code->_co_monitoring->tools @@ -907,12 +898,12 @@ get_tools_for_instruction(PyCodeObject *code, PyInterpreterState *interp, int i, uint8_t tools; assert(event != PY_MONITORING_EVENT_LINE); assert(event != PY_MONITORING_EVENT_INSTRUCTION); - if (event >= PY_MONITORING_UNGROUPED_EVENTS) { + if (event >= _PY_MONITORING_UNGROUPED_EVENTS) { assert(event == PY_MONITORING_EVENT_C_RAISE || event == PY_MONITORING_EVENT_C_RETURN); event = PY_MONITORING_EVENT_CALL; } - if (event < PY_MONITORING_INSTRUMENTED_EVENTS) { + if (PY_MONITORING_IS_INSTRUMENTED_EVENT(event)) { CHECK(is_version_up_to_date(code, interp)); CHECK(instrumentation_cross_checks(interp, code)); if (code->_co_monitoring->tools) { @@ -933,6 +924,26 @@ get_tools_for_instruction(PyCodeObject *code, PyInterpreterState *interp, int i, return tools; } +static const char *const event_names [] = { + [PY_MONITORING_EVENT_PY_START] = "PY_START", + [PY_MONITORING_EVENT_PY_RESUME] = "PY_RESUME", + [PY_MONITORING_EVENT_PY_RETURN] = "PY_RETURN", + [PY_MONITORING_EVENT_PY_YIELD] = "PY_YIELD", + [PY_MONITORING_EVENT_CALL] = "CALL", + [PY_MONITORING_EVENT_LINE] = "LINE", + [PY_MONITORING_EVENT_INSTRUCTION] = "INSTRUCTION", + [PY_MONITORING_EVENT_JUMP] = "JUMP", + [PY_MONITORING_EVENT_BRANCH] = "BRANCH", + [PY_MONITORING_EVENT_C_RETURN] = "C_RETURN", + [PY_MONITORING_EVENT_PY_THROW] = "PY_THROW", + [PY_MONITORING_EVENT_RAISE] = "RAISE", + [PY_MONITORING_EVENT_RERAISE] = "RERAISE", + [PY_MONITORING_EVENT_EXCEPTION_HANDLED] = "EXCEPTION_HANDLED", + [PY_MONITORING_EVENT_C_RAISE] = "C_RAISE", + [PY_MONITORING_EVENT_PY_UNWIND] = "PY_UNWIND", + [PY_MONITORING_EVENT_STOP_ITERATION] = "STOP_ITERATION", +}; + static int call_instrumentation_vector( PyThreadState *tstate, int event, @@ -950,7 +961,7 @@ call_instrumentation_vector( /* Offset visible to user should be the offset in bytes, as that is the * convention for APIs involving code offsets. */ int bytes_offset = offset * (int)sizeof(_Py_CODEUNIT); - PyObject *offset_obj = PyLong_FromSsize_t(bytes_offset); + PyObject *offset_obj = PyLong_FromLong(bytes_offset); if (offset_obj == NULL) { return -1; } @@ -977,7 +988,18 @@ call_instrumentation_vector( } else { /* DISABLE */ - remove_tools(code, offset, event, 1 << tool); + if (!PY_MONITORING_IS_INSTRUMENTED_EVENT(event)) { + PyErr_Format(PyExc_ValueError, + "Cannot disable %s events. Callback removed.", + event_names[event]); + /* Clear tool to prevent infinite loop */ + Py_CLEAR(interp->monitoring_callables[tool][event]); + err = -1; + break; + } + else { + remove_tools(code, offset, event, 1 << tool); + } } } Py_DECREF(offset_obj); @@ -1059,16 +1081,6 @@ call_instrumentation_vector_protected( assert(_PyErr_Occurred(tstate)); } -void -_Py_call_instrumentation_exc0( - PyThreadState *tstate, int event, - _PyInterpreterFrame *frame, _Py_CODEUNIT *instr) -{ - assert(_PyErr_Occurred(tstate)); - PyObject *args[3] = { NULL, NULL, NULL }; - call_instrumentation_vector_protected(tstate, event, frame, instr, 2, args); -} - void _Py_call_instrumentation_exc2( PyThreadState *tstate, int event, @@ -1129,14 +1141,46 @@ _Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, (interp->monitors.tools[PY_MONITORING_EVENT_LINE] | code->_co_monitoring->local_monitors.tools[PY_MONITORING_EVENT_LINE] ); - PyObject *line_obj = PyLong_FromSsize_t(line); + /* Special case sys.settrace to avoid boxing the line number, + * only to immediately unbox it. */ + if (tools & (1 << PY_MONITORING_SYS_TRACE_ID)) { + if (tstate->c_tracefunc != NULL && line >= 0) { + PyFrameObject *frame_obj = _PyFrame_GetFrameObject(frame); + if (frame_obj == NULL) { + return -1; + } + if (frame_obj->f_trace_lines) { + /* Need to set tracing and what_event as if using + * the instrumentation call. */ + int old_what = tstate->what_event; + tstate->what_event = PY_MONITORING_EVENT_LINE; + tstate->tracing++; + /* Call c_tracefunc directly, having set the line number. */ + Py_INCREF(frame_obj); + frame_obj->f_lineno = line; + int err = tstate->c_tracefunc(tstate->c_traceobj, frame_obj, PyTrace_LINE, Py_None); + frame_obj->f_lineno = 0; + tstate->tracing--; + tstate->what_event = old_what; + Py_DECREF(frame_obj); + if (err) { + return -1; + } + } + } + tools &= (255 - (1 << PY_MONITORING_SYS_TRACE_ID)); + } + if (tools == 0) { + goto done; + } + PyObject *line_obj = PyLong_FromLong(line); if (line_obj == NULL) { return -1; } PyObject *args[3] = { NULL, (PyObject *)code, line_obj }; - while (tools) { + do { int tool = most_significant_bit(tools); - assert(tool >= 0 && tool < 8); + assert(tool >= 0 && tool < PY_MONITORING_SYS_PROFILE_ID); assert(tools & (1 << tool)); tools &= ~(1 << tool); int res = call_one_instrument(interp, tstate, &args[1], @@ -1154,7 +1198,7 @@ _Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, /* DISABLE */ remove_line_tools(code, i, 1 << tool); } - } + } while (tools); Py_DECREF(line_obj); done: assert(original_opcode != 0); @@ -1183,7 +1227,7 @@ _Py_call_instrumentation_instruction(PyThreadState *tstate, _PyInterpreterFrame* code->_co_monitoring->local_monitors.tools[PY_MONITORING_EVENT_INSTRUCTION] ); int bytes_offset = offset * (int)sizeof(_Py_CODEUNIT); - PyObject *offset_obj = PyLong_FromSsize_t(bytes_offset); + PyObject *offset_obj = PyLong_FromLong(bytes_offset); if (offset_obj == NULL) { return -1; } @@ -1220,7 +1264,7 @@ _PyMonitoring_RegisterCallback(int tool_id, int event_id, PyObject *obj) { PyInterpreterState *is = _PyInterpreterState_GET(); assert(0 <= tool_id && tool_id < PY_MONITORING_TOOL_IDS); - assert(0 <= event_id && event_id < PY_MONITORING_EVENTS); + assert(0 <= event_id && event_id < _PY_MONITORING_EVENTS); PyObject *callback = is->monitoring_callables[tool_id][event_id]; is->monitoring_callables[tool_id][event_id] = Py_XNewRef(obj); return callback; @@ -1255,7 +1299,7 @@ initialize_tools(PyCodeObject *code) assert(event > 0); } assert(event >= 0); - assert(event < PY_MONITORING_INSTRUMENTED_EVENTS); + assert(PY_MONITORING_IS_INSTRUMENTED_EVENT(event)); tools[i] = code->_co_monitoring->active_monitors.tools[event]; CHECK(tools[i] != 0); } @@ -1653,7 +1697,7 @@ static void set_events(_Py_Monitors *m, int tool_id, _PyMonitoringEventSet events) { assert(0 <= tool_id && tool_id < PY_MONITORING_TOOL_IDS); - for (int e = 0; e < PY_MONITORING_UNGROUPED_EVENTS; e++) { + for (int e = 0; e < _PY_MONITORING_UNGROUPED_EVENTS; e++) { uint8_t *tools = &m->tools[e]; int val = (events >> e) & 1; *tools &= ~(1 << tool_id); @@ -1678,7 +1722,7 @@ _PyMonitoring_SetEvents(int tool_id, _PyMonitoringEventSet events) { assert(0 <= tool_id && tool_id < PY_MONITORING_TOOL_IDS); PyInterpreterState *interp = _PyInterpreterState_GET(); - assert(events < (1 << PY_MONITORING_UNGROUPED_EVENTS)); + assert(events < (1 << _PY_MONITORING_UNGROUPED_EVENTS)); if (check_tool(interp, tool_id)) { return -1; } @@ -1696,7 +1740,7 @@ _PyMonitoring_SetLocalEvents(PyCodeObject *code, int tool_id, _PyMonitoringEvent { assert(0 <= tool_id && tool_id < PY_MONITORING_TOOL_IDS); PyInterpreterState *interp = _PyInterpreterState_GET(); - assert(events < (1 << PY_MONITORING_UNGROUPED_EVENTS)); + assert(events < (1 << _PY_MONITORING_UNGROUPED_EVENTS)); if (check_tool(interp, tool_id)) { return -1; } @@ -1835,10 +1879,13 @@ monitoring_register_callback_impl(PyObject *module, int tool_id, int event, return NULL; } int event_id = _Py_bit_length(event)-1; - if (event_id < 0 || event_id >= PY_MONITORING_EVENTS) { + if (event_id < 0 || event_id >= _PY_MONITORING_EVENTS) { PyErr_Format(PyExc_ValueError, "invalid event %d", event); return NULL; } + if (PySys_Audit("sys.monitoring.register_callback", "O", func) < 0) { + return NULL; + } if (func == Py_None) { func = NULL; } @@ -1885,7 +1932,7 @@ monitoring_set_events_impl(PyObject *module, int tool_id, int event_set) if (check_valid_tool(tool_id)) { return NULL; } - if (event_set < 0 || event_set >= (1 << PY_MONITORING_EVENTS)) { + if (event_set < 0 || event_set >= (1 << _PY_MONITORING_EVENTS)) { PyErr_Format(PyExc_ValueError, "invalid event set 0x%x", event_set); return NULL; } @@ -1927,7 +1974,7 @@ monitoring_get_local_events_impl(PyObject *module, int tool_id, _PyMonitoringEventSet event_set = 0; _PyCoMonitoringData *data = ((PyCodeObject *)code)->_co_monitoring; if (data != NULL) { - for (int e = 0; e < PY_MONITORING_UNGROUPED_EVENTS; e++) { + for (int e = 0; e < _PY_MONITORING_UNGROUPED_EVENTS; e++) { if ((data->local_monitors.tools[e] >> tool_id) & 1) { event_set |= (1 << e); } @@ -1961,7 +2008,7 @@ monitoring_set_local_events_impl(PyObject *module, int tool_id, if (check_valid_tool(tool_id)) { return NULL; } - if (event_set < 0 || event_set >= (1 << PY_MONITORING_EVENTS)) { + if (event_set < 0 || event_set >= (1 << _PY_MONITORING_EVENTS)) { PyErr_Format(PyExc_ValueError, "invalid event set 0x%x", event_set); return NULL; } @@ -2010,25 +2057,6 @@ add_power2_constant(PyObject *obj, const char *name, int i) return err; } -static const char *const event_names [] = { - [PY_MONITORING_EVENT_PY_START] = "PY_START", - [PY_MONITORING_EVENT_PY_RESUME] = "PY_RESUME", - [PY_MONITORING_EVENT_PY_RETURN] = "PY_RETURN", - [PY_MONITORING_EVENT_PY_YIELD] = "PY_YIELD", - [PY_MONITORING_EVENT_CALL] = "CALL", - [PY_MONITORING_EVENT_LINE] = "LINE", - [PY_MONITORING_EVENT_INSTRUCTION] = "INSTRUCTION", - [PY_MONITORING_EVENT_JUMP] = "JUMP", - [PY_MONITORING_EVENT_BRANCH] = "BRANCH", - [PY_MONITORING_EVENT_C_RETURN] = "C_RETURN", - [PY_MONITORING_EVENT_PY_THROW] = "PY_THROW", - [PY_MONITORING_EVENT_RAISE] = "RAISE", - [PY_MONITORING_EVENT_EXCEPTION_HANDLED] = "EXCEPTION_HANDLED", - [PY_MONITORING_EVENT_C_RAISE] = "C_RAISE", - [PY_MONITORING_EVENT_PY_UNWIND] = "PY_UNWIND", - [PY_MONITORING_EVENT_STOP_ITERATION] = "STOP_ITERATION", -}; - /*[clinic input] monitoring._all_events [clinic start generated code]*/ @@ -2042,7 +2070,7 @@ monitoring__all_events_impl(PyObject *module) if (res == NULL) { return NULL; } - for (int e = 0; e < PY_MONITORING_UNGROUPED_EVENTS; e++) { + for (int e = 0; e < _PY_MONITORING_UNGROUPED_EVENTS; e++) { uint8_t tools = interp->monitors.tools[e]; if (tools == 0) { continue; @@ -2101,7 +2129,7 @@ PyObject *_Py_CreateMonitoringObject(void) if (err) { goto error; } - for (int i = 0; i < PY_MONITORING_EVENTS; i++) { + for (int i = 0; i < _PY_MONITORING_EVENTS; i++) { if (add_power2_constant(events, event_names[i], i)) { goto error; } diff --git a/Python/intrinsics.c b/Python/intrinsics.c index 037b74ca820fab..61a8e75872d2e2 100644 --- a/Python/intrinsics.c +++ b/Python/intrinsics.c @@ -14,7 +14,7 @@ /******** Unary functions ********/ static PyObject * -no_intrinsic(PyThreadState* tstate, PyObject *unused) +no_intrinsic1(PyThreadState* tstate, PyObject *unused) { _PyErr_SetString(tstate, PyExc_SystemError, "invalid intrinsic function"); return NULL; @@ -203,25 +203,35 @@ make_typevar(PyThreadState* Py_UNUSED(ignored), PyObject *v) return _Py_make_typevar(v, NULL, NULL); } -const instrinsic_func1 + +#define INTRINSIC_FUNC_ENTRY(N, F) \ + [N] = {F, #N}, + +const intrinsic_func1_info _PyIntrinsics_UnaryFunctions[] = { - [0] = no_intrinsic, - [INTRINSIC_PRINT] = print_expr, - [INTRINSIC_IMPORT_STAR] = import_star, - [INTRINSIC_STOPITERATION_ERROR] = stopiteration_error, - [INTRINSIC_ASYNC_GEN_WRAP] = _PyAsyncGenValueWrapperNew, - [INTRINSIC_UNARY_POSITIVE] = unary_pos, - [INTRINSIC_LIST_TO_TUPLE] = list_to_tuple, - [INTRINSIC_TYPEVAR] = make_typevar, - [INTRINSIC_PARAMSPEC] = _Py_make_paramspec, - [INTRINSIC_TYPEVARTUPLE] = _Py_make_typevartuple, - [INTRINSIC_SUBSCRIPT_GENERIC] = _Py_subscript_generic, - [INTRINSIC_TYPEALIAS] = _Py_make_typealias, + INTRINSIC_FUNC_ENTRY(INTRINSIC_1_INVALID, no_intrinsic1) + INTRINSIC_FUNC_ENTRY(INTRINSIC_PRINT, print_expr) + INTRINSIC_FUNC_ENTRY(INTRINSIC_IMPORT_STAR, import_star) + INTRINSIC_FUNC_ENTRY(INTRINSIC_STOPITERATION_ERROR, stopiteration_error) + INTRINSIC_FUNC_ENTRY(INTRINSIC_ASYNC_GEN_WRAP, _PyAsyncGenValueWrapperNew) + INTRINSIC_FUNC_ENTRY(INTRINSIC_UNARY_POSITIVE, unary_pos) + INTRINSIC_FUNC_ENTRY(INTRINSIC_LIST_TO_TUPLE, list_to_tuple) + INTRINSIC_FUNC_ENTRY(INTRINSIC_TYPEVAR, make_typevar) + INTRINSIC_FUNC_ENTRY(INTRINSIC_PARAMSPEC, _Py_make_paramspec) + INTRINSIC_FUNC_ENTRY(INTRINSIC_TYPEVARTUPLE, _Py_make_typevartuple) + INTRINSIC_FUNC_ENTRY(INTRINSIC_SUBSCRIPT_GENERIC, _Py_subscript_generic) + INTRINSIC_FUNC_ENTRY(INTRINSIC_TYPEALIAS, _Py_make_typealias) }; /******** Binary functions ********/ +static PyObject * +no_intrinsic2(PyThreadState* tstate, PyObject *unused1, PyObject *unused2) +{ + _PyErr_SetString(tstate, PyExc_SystemError, "invalid intrinsic function"); + return NULL; +} static PyObject * prep_reraise_star(PyThreadState* unused, PyObject *orig, PyObject *excs) @@ -246,10 +256,31 @@ make_typevar_with_constraints(PyThreadState* Py_UNUSED(ignored), PyObject *name, return _Py_make_typevar(name, NULL, evaluate_constraints); } -const instrinsic_func2 +const intrinsic_func2_info _PyIntrinsics_BinaryFunctions[] = { - [INTRINSIC_PREP_RERAISE_STAR] = prep_reraise_star, - [INTRINSIC_TYPEVAR_WITH_BOUND] = make_typevar_with_bound, - [INTRINSIC_TYPEVAR_WITH_CONSTRAINTS] = make_typevar_with_constraints, - [INTRINSIC_SET_FUNCTION_TYPE_PARAMS] = _Py_set_function_type_params, + INTRINSIC_FUNC_ENTRY(INTRINSIC_2_INVALID, no_intrinsic2) + INTRINSIC_FUNC_ENTRY(INTRINSIC_PREP_RERAISE_STAR, prep_reraise_star) + INTRINSIC_FUNC_ENTRY(INTRINSIC_TYPEVAR_WITH_BOUND, make_typevar_with_bound) + INTRINSIC_FUNC_ENTRY(INTRINSIC_TYPEVAR_WITH_CONSTRAINTS, make_typevar_with_constraints) + INTRINSIC_FUNC_ENTRY(INTRINSIC_SET_FUNCTION_TYPE_PARAMS, _Py_set_function_type_params) }; + +#undef INTRINSIC_FUNC_ENTRY + +PyObject* +_PyUnstable_GetUnaryIntrinsicName(int index) +{ + if (index < 0 || index > MAX_INTRINSIC_1) { + return NULL; + } + return PyUnicode_FromString(_PyIntrinsics_UnaryFunctions[index].name); +} + +PyObject* +_PyUnstable_GetBinaryIntrinsicName(int index) +{ + if (index < 0 || index > MAX_INTRINSIC_2) { + return NULL; + } + return PyUnicode_FromString(_PyIntrinsics_BinaryFunctions[index].name); +} diff --git a/Python/legacy_tracing.c b/Python/legacy_tracing.c index 9cc48fc9493a05..7774d10b10172b 100644 --- a/Python/legacy_tracing.c +++ b/Python/legacy_tracing.c @@ -64,6 +64,16 @@ sys_profile_func3( return call_profile_func(self, args[2]); } +static PyObject * +sys_profile_unwind( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + assert(PyVectorcall_NARGS(nargsf) == 3); + return call_profile_func(self, Py_None); +} + static PyObject * sys_profile_call_or_return( _PyLegacyEventHandler *self, PyObject *const *args, @@ -152,6 +162,16 @@ sys_trace_func2( return call_trace_func(self, Py_None); } +static PyObject * +sys_trace_func3( + _PyLegacyEventHandler *self, PyObject *const *args, + size_t nargsf, PyObject *kwnames +) { + assert(kwnames == NULL); + assert(PyVectorcall_NARGS(nargsf) == 3); + return call_trace_func(self, Py_None); +} + static PyObject * sys_trace_return( _PyLegacyEventHandler *self, PyObject *const *args, @@ -362,7 +382,7 @@ _PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) return -1; } if (set_callbacks(PY_MONITORING_SYS_PROFILE_ID, - (vectorcallfunc)sys_profile_func2, PyTrace_RETURN, + (vectorcallfunc)sys_profile_unwind, PyTrace_RETURN, PY_MONITORING_EVENT_PY_UNWIND, -1)) { return -1; } @@ -425,7 +445,7 @@ _PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) return -1; } if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, - (vectorcallfunc)sys_trace_func2, PyTrace_CALL, + (vectorcallfunc)sys_trace_func3, PyTrace_CALL, PY_MONITORING_EVENT_PY_THROW, -1)) { return -1; } @@ -450,7 +470,7 @@ _PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) return -1; } if (set_callbacks(PY_MONITORING_SYS_TRACE_ID, - (vectorcallfunc)sys_trace_func2, PyTrace_RETURN, + (vectorcallfunc)sys_trace_func3, PyTrace_RETURN, PY_MONITORING_EVENT_PY_UNWIND, -1)) { return -1; } diff --git a/Python/makeopcodetargets.py b/Python/makeopcodetargets.py deleted file mode 100755 index 5843079b729936..00000000000000 --- a/Python/makeopcodetargets.py +++ /dev/null @@ -1,56 +0,0 @@ -#! /usr/bin/env python -"""Generate C code for the jump table of the threaded code interpreter -(for compilers supporting computed gotos or "labels-as-values", such as gcc). -""" - -import os -import sys - - -# 2023-04-27(warsaw): Pre-Python 3.12, this would catch ImportErrors and try to -# import imp, and then use imp.load_module(). The imp module was removed in -# Python 3.12 (and long deprecated before that), and it's unclear under what -# conditions this import will now fail, so the fallback was simply removed. -from importlib.machinery import SourceFileLoader - -def find_module(modname): - """Finds and returns a module in the local dist/checkout. - """ - modpath = os.path.join( - os.path.dirname(os.path.dirname(__file__)), "Lib", modname + ".py") - return SourceFileLoader(modname, modpath).load_module() - - -def write_contents(f): - """Write C code contents to the target file object. - """ - opcode = find_module('opcode') - _opcode_metadata = find_module('_opcode_metadata') - targets = ['_unknown_opcode'] * 256 - for opname, op in opcode.opmap.items(): - if not opcode.is_pseudo(op): - targets[op] = "TARGET_%s" % opname - next_op = 1 - for opname in _opcode_metadata._specialized_instructions: - while targets[next_op] != '_unknown_opcode': - next_op += 1 - targets[next_op] = "TARGET_%s" % opname - f.write("static void *opcode_targets[256] = {\n") - f.write(",\n".join([" &&%s" % s for s in targets])) - f.write("\n};\n") - - -def main(): - if len(sys.argv) >= 3: - sys.exit("Too many arguments") - if len(sys.argv) == 2: - target = sys.argv[1] - else: - target = "Python/opcode_targets.h" - with open(target, "w") as f: - write_contents(f) - print("Jump table written into %s" % target) - - -if __name__ == "__main__": - main() diff --git a/Python/marshal.c b/Python/marshal.c index 517220a4463cf3..8940582c7f5328 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -9,8 +9,9 @@ #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_code.h" // _PyCode_New() -#include "pycore_long.h" // _PyLong_DigitCount #include "pycore_hashtable.h" // _Py_hashtable_t +#include "pycore_long.h" // _PyLong_DigitCount +#include "pycore_setobject.h" // _PySet_NextEntry() #include "marshal.h" // Py_MARSHAL_VERSION /*[clinic input] diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h index d84d253c912a28..210c37b37225bb 100644 --- a/Python/opcode_targets.h +++ b/Python/opcode_targets.h @@ -42,12 +42,12 @@ static void *opcode_targets[256] = { &&TARGET_FORMAT_SIMPLE, &&TARGET_FORMAT_WITH_SPEC, &&TARGET_BINARY_SUBSCR_LIST_INT, + &&TARGET_BINARY_SUBSCR_STR_INT, &&TARGET_BINARY_SUBSCR_TUPLE_INT, &&TARGET_STORE_SUBSCR_DICT, &&TARGET_STORE_SUBSCR_LIST_INT, &&TARGET_SEND_GEN, &&TARGET_UNPACK_SEQUENCE_TWO_TUPLE, - &&TARGET_UNPACK_SEQUENCE_TUPLE, &&TARGET_WITH_EXCEPT_START, &&TARGET_GET_AITER, &&TARGET_GET_ANEXT, @@ -55,39 +55,39 @@ static void *opcode_targets[256] = { &&TARGET_BEFORE_WITH, &&TARGET_END_ASYNC_FOR, &&TARGET_CLEANUP_THROW, + &&TARGET_UNPACK_SEQUENCE_TUPLE, &&TARGET_UNPACK_SEQUENCE_LIST, &&TARGET_STORE_ATTR_INSTANCE_VALUE, &&TARGET_STORE_ATTR_SLOT, - &&TARGET_STORE_ATTR_WITH_HINT, &&TARGET_STORE_SUBSCR, &&TARGET_DELETE_SUBSCR, + &&TARGET_STORE_ATTR_WITH_HINT, &&TARGET_LOAD_GLOBAL_MODULE, &&TARGET_LOAD_GLOBAL_BUILTIN, &&TARGET_LOAD_SUPER_ATTR_ATTR, &&TARGET_LOAD_SUPER_ATTR_METHOD, &&TARGET_LOAD_ATTR_INSTANCE_VALUE, - &&TARGET_LOAD_ATTR_MODULE, &&TARGET_GET_ITER, &&TARGET_GET_YIELD_FROM_ITER, - &&TARGET_LOAD_ATTR_WITH_HINT, + &&TARGET_LOAD_ATTR_MODULE, &&TARGET_LOAD_BUILD_CLASS, + &&TARGET_LOAD_ATTR_WITH_HINT, &&TARGET_LOAD_ATTR_SLOT, - &&TARGET_LOAD_ATTR_CLASS, &&TARGET_LOAD_ASSERTION_ERROR, &&TARGET_RETURN_GENERATOR, + &&TARGET_LOAD_ATTR_CLASS, &&TARGET_LOAD_ATTR_PROPERTY, &&TARGET_LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN, &&TARGET_LOAD_ATTR_METHOD_WITH_VALUES, &&TARGET_LOAD_ATTR_METHOD_NO_DICT, &&TARGET_LOAD_ATTR_METHOD_LAZY_DICT, &&TARGET_LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, - &&TARGET_LOAD_ATTR_NONDESCRIPTOR_NO_DICT, &&TARGET_RETURN_VALUE, - &&TARGET_COMPARE_OP_FLOAT, + &&TARGET_LOAD_ATTR_NONDESCRIPTOR_NO_DICT, &&TARGET_SETUP_ANNOTATIONS, - &&TARGET_COMPARE_OP_INT, + &&TARGET_COMPARE_OP_FLOAT, &&TARGET_LOAD_LOCALS, - &&TARGET_COMPARE_OP_STR, + &&TARGET_COMPARE_OP_INT, &&TARGET_POP_EXCEPT, &&TARGET_STORE_NAME, &&TARGET_DELETE_NAME, @@ -110,9 +110,9 @@ static void *opcode_targets[256] = { &&TARGET_IMPORT_NAME, &&TARGET_IMPORT_FROM, &&TARGET_JUMP_FORWARD, + &&TARGET_COMPARE_OP_STR, &&TARGET_FOR_ITER_LIST, &&TARGET_FOR_ITER_TUPLE, - &&TARGET_FOR_ITER_RANGE, &&TARGET_POP_JUMP_IF_FALSE, &&TARGET_POP_JUMP_IF_TRUE, &&TARGET_LOAD_GLOBAL, @@ -131,11 +131,11 @@ static void *opcode_targets[256] = { &&TARGET_POP_JUMP_IF_NONE, &&TARGET_RAISE_VARARGS, &&TARGET_GET_AWAITABLE, - &&TARGET_FOR_ITER_GEN, + &&TARGET_FOR_ITER_RANGE, &&TARGET_BUILD_SLICE, &&TARGET_JUMP_BACKWARD_NO_INTERRUPT, &&TARGET_MAKE_CELL, - &&TARGET_CALL_BOUND_METHOD_EXACT_ARGS, + &&TARGET_FOR_ITER_GEN, &&TARGET_LOAD_DEREF, &&TARGET_STORE_DEREF, &&TARGET_DELETE_DEREF, @@ -147,26 +147,26 @@ static void *opcode_targets[256] = { &&TARGET_LIST_APPEND, &&TARGET_SET_ADD, &&TARGET_MAP_ADD, - &&TARGET_CALL_PY_EXACT_ARGS, + &&TARGET_CALL_BOUND_METHOD_EXACT_ARGS, &&TARGET_COPY_FREE_VARS, &&TARGET_YIELD_VALUE, &&TARGET_RESUME, &&TARGET_MATCH_CLASS, + &&TARGET_CALL_PY_EXACT_ARGS, &&TARGET_CALL_PY_WITH_DEFAULTS, &&TARGET_CALL_NO_KW_TYPE_1, - &&TARGET_CALL_NO_KW_STR_1, &&TARGET_BUILD_CONST_KEY_MAP, &&TARGET_BUILD_STRING, &&TARGET_CONVERT_VALUE, + &&TARGET_CALL_NO_KW_STR_1, &&TARGET_CALL_NO_KW_TUPLE_1, &&TARGET_CALL_BUILTIN_CLASS, - &&TARGET_CALL_NO_KW_BUILTIN_O, &&TARGET_LIST_EXTEND, &&TARGET_SET_UPDATE, &&TARGET_DICT_MERGE, &&TARGET_DICT_UPDATE, + &&TARGET_CALL_NO_KW_BUILTIN_O, &&TARGET_CALL_NO_KW_BUILTIN_FAST, - &&TARGET_CALL_BUILTIN_FAST_WITH_KEYWORDS, &&TARGET_LOAD_FAST_LOAD_FAST, &&TARGET_STORE_FAST_LOAD_FAST, &&TARGET_STORE_FAST_STORE_FAST, @@ -177,6 +177,7 @@ static void *opcode_targets[256] = { &&TARGET_LOAD_FROM_DICT_OR_GLOBALS, &&TARGET_LOAD_FROM_DICT_OR_DEREF, &&TARGET_SET_FUNCTION_ATTRIBUTE, + &&TARGET_CALL_BUILTIN_FAST_WITH_KEYWORDS, &&TARGET_CALL_NO_KW_LEN, &&TARGET_CALL_NO_KW_ISINSTANCE, &&TARGET_CALL_NO_KW_LIST_APPEND, @@ -228,7 +229,6 @@ static void *opcode_targets[256] = { &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, - &&_unknown_opcode, &&TARGET_ENTER_EXECUTOR, &&_unknown_opcode, &&_unknown_opcode, diff --git a/Python/optimizer.c b/Python/optimizer.c index 3d385a1506cba3..6c730aa14b9a47 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -155,6 +155,7 @@ PyUnstable_SetOptimizer(_PyOptimizerObject *optimizer) _PyInterpreterFrame * _PyOptimizer_BackEdge(_PyInterpreterFrame *frame, _Py_CODEUNIT *src, _Py_CODEUNIT *dest, PyObject **stack_pointer) { + assert(src->op.code == JUMP_BACKWARD); PyCodeObject *code = (PyCodeObject *)frame->f_executable; assert(PyCode_Check(code)); PyInterpreterState *interp = _PyInterpreterState_GET(); @@ -319,13 +320,7 @@ uop_name(int index) { static Py_ssize_t uop_len(_PyUOpExecutorObject *self) { - int count = 0; - for (; count < _Py_UOP_MAX_TRACE_LENGTH; count++) { - if (self->trace[count].opcode == 0) { - break; - } - } - return count; + return Py_SIZE(self); } static PyObject * @@ -367,8 +362,8 @@ PySequenceMethods uop_as_sequence = { static PyTypeObject UOpExecutor_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) .tp_name = "uop_executor", - .tp_basicsize = sizeof(_PyUOpExecutorObject), - .tp_itemsize = 0, + .tp_basicsize = sizeof(_PyUOpExecutorObject) - sizeof(_PyUOpInstruction), + .tp_itemsize = sizeof(_PyUOpInstruction), .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, .tp_dealloc = (destructor)uop_dealloc, .tp_as_sequence = &uop_as_sequence, @@ -396,7 +391,7 @@ translate_bytecode_to_trace( #ifdef Py_DEBUG #define DPRINTF(level, ...) \ - if (lltrace >= (level)) { fprintf(stderr, __VA_ARGS__); } + if (lltrace >= (level)) { printf(__VA_ARGS__); } #else #define DPRINTF(level, ...) #endif @@ -578,7 +573,8 @@ translate_bytecode_to_trace( for (int i = 0; i < nuops; i++) { oparg = orig_oparg; uint64_t operand = 0; - int offset = expansion->uops[i].offset; + // Add one to account for the actual opcode/oparg pair: + int offset = expansion->uops[i].offset + 1; switch (expansion->uops[i].size) { case OPARG_FULL: if (extras && OPCODE_HAS_JUMP(opcode)) { @@ -697,15 +693,12 @@ uop_optimize( return trace_length; } OBJECT_STAT_INC(optimization_traces_created); - _PyUOpExecutorObject *executor = PyObject_New(_PyUOpExecutorObject, &UOpExecutor_Type); + _PyUOpExecutorObject *executor = PyObject_NewVar(_PyUOpExecutorObject, &UOpExecutor_Type, trace_length); if (executor == NULL) { return -1; } executor->base.execute = _PyUopExecute; memcpy(executor->trace, trace, trace_length * sizeof(_PyUOpInstruction)); - if (trace_length < _Py_UOP_MAX_TRACE_LENGTH) { - executor->trace[trace_length].opcode = 0; // Sentinel - } *exec_ptr = (_PyExecutorObject *)executor; return 1; } diff --git a/Python/pyhash.c b/Python/pyhash.c index d5ac9f83be61cc..b2bdab5099d86a 100644 --- a/Python/pyhash.c +++ b/Python/pyhash.c @@ -4,6 +4,7 @@ All the utility functions (_Py_Hash*()) return "-1" to signify an error. */ #include "Python.h" +#include "pycore_pyhash.h" // _Py_HashSecret_t #ifdef __APPLE__ # include diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index cf8b4379c1467f..0de3abf9407899 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -24,6 +24,7 @@ #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_runtime.h" // _Py_ID() #include "pycore_runtime_init.h" // _PyRuntimeState_INIT +#include "pycore_setobject.h" // _PySet_NextEntry() #include "pycore_sliceobject.h" // _PySlice_Fini() #include "pycore_sysmodule.h" // _PySys_ClearAuditHooks() #include "pycore_traceback.h" // _Py_DumpTracebackThreads() @@ -1192,7 +1193,11 @@ init_interp_main(PyThreadState *tstate) } if (enabled) { PyObject *opt = PyUnstable_Optimizer_NewUOpOptimizer(); + if (opt == NULL) { + return _PyStatus_ERR("can't initialize optimizer"); + } PyUnstable_SetOptimizer((_PyOptimizerObject *)opt); + Py_DECREF(opt); } } @@ -1916,11 +1921,11 @@ Py_FinalizeEx(void) } if (dump_refs) { - _Py_PrintReferences(stderr); + _Py_PrintReferences(tstate->interp, stderr); } if (dump_refs_fp != NULL) { - _Py_PrintReferences(dump_refs_fp); + _Py_PrintReferences(tstate->interp, dump_refs_fp); } #endif /* Py_TRACE_REFS */ @@ -1956,11 +1961,11 @@ Py_FinalizeEx(void) */ if (dump_refs) { - _Py_PrintReferenceAddresses(stderr); + _Py_PrintReferenceAddresses(tstate->interp, stderr); } if (dump_refs_fp != NULL) { - _Py_PrintReferenceAddresses(dump_refs_fp); + _Py_PrintReferenceAddresses(tstate->interp, dump_refs_fp); fclose(dump_refs_fp); } #endif /* Py_TRACE_REFS */ @@ -2070,6 +2075,8 @@ new_interpreter(PyThreadState **tstate_p, const PyInterpreterConfig *config) } has_gil = 1; + /* No objects have been created yet. */ + status = pycore_interp_init(tstate); if (_PyStatus_EXCEPTION(status)) { goto error; diff --git a/Python/pystate.c b/Python/pystate.c index a9b404bd5c93e3..3a05cb0fa7988d 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -674,16 +674,17 @@ init_interpreter(PyInterpreterState *interp, _obmalloc_pools_INIT(interp->obmalloc.pools); memcpy(&interp->obmalloc.pools.used, temp, sizeof(temp)); } + _PyObject_InitState(interp); _PyEval_InitState(interp, pending_lock); _PyGC_InitState(&interp->gc); PyConfig_InitPythonConfig(&interp->config); _PyType_InitCache(interp); - for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + for (int i = 0; i < _PY_MONITORING_UNGROUPED_EVENTS; i++) { interp->monitors.tools[i] = 0; } for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) { - for (int e = 0; e < PY_MONITORING_EVENTS; e++) { + for (int e = 0; e < _PY_MONITORING_EVENTS; e++) { interp->monitoring_callables[t][e] = NULL; } @@ -841,11 +842,11 @@ interpreter_clear(PyInterpreterState *interp, PyThreadState *tstate) Py_CLEAR(interp->audit_hooks); - for (int i = 0; i < PY_MONITORING_UNGROUPED_EVENTS; i++) { + for (int i = 0; i < _PY_MONITORING_UNGROUPED_EVENTS; i++) { interp->monitors.tools[i] = 0; } for (int t = 0; t < PY_MONITORING_TOOL_IDS; t++) { - for (int e = 0; e < PY_MONITORING_EVENTS; e++) { + for (int e = 0; e < _PY_MONITORING_EVENTS; e++) { Py_CLEAR(interp->monitoring_callables[t][e]); } } @@ -1133,7 +1134,7 @@ _PyInterpreterState_RequireIDRef(PyInterpreterState *interp, int required) } PyObject * -_PyInterpreterState_GetMainModule(PyInterpreterState *interp) +PyUnstable_InterpreterState_GetMainModule(PyInterpreterState *interp) { PyObject *modules = _PyImport_GetModules(interp); if (modules == NULL) { @@ -1640,75 +1641,6 @@ _PyThreadState_DeleteExcept(PyThreadState *tstate) } -//------------------------- -// "detached" thread states -//------------------------- - -void -_PyThreadState_InitDetached(PyThreadState *tstate, PyInterpreterState *interp) -{ - _PyRuntimeState *runtime = interp->runtime; - - HEAD_LOCK(runtime); - interp->threads.next_unique_id += 1; - uint64_t id = interp->threads.next_unique_id; - HEAD_UNLOCK(runtime); - - init_threadstate(tstate, interp, id); - // We do not call add_threadstate(). -} - -void -_PyThreadState_ClearDetached(PyThreadState *tstate) -{ - assert(!tstate->_status.bound); - assert(!tstate->_status.bound_gilstate); - assert(tstate->datastack_chunk == NULL); - assert(tstate->thread_id == 0); - assert(tstate->native_thread_id == 0); - assert(tstate->next == NULL); - assert(tstate->prev == NULL); - - PyThreadState_Clear(tstate); - clear_datastack(tstate); -} - -void -_PyThreadState_BindDetached(PyThreadState *tstate) -{ - assert(!_Py_IsMainInterpreter( - current_fast_get(tstate->interp->runtime)->interp)); - assert(_Py_IsMainInterpreter(tstate->interp)); - bind_tstate(tstate); - /* Unlike _PyThreadState_Bind(), we do not modify gilstate TSS. */ -} - -void -_PyThreadState_UnbindDetached(PyThreadState *tstate) -{ - assert(!_Py_IsMainInterpreter( - current_fast_get(tstate->interp->runtime)->interp)); - assert(_Py_IsMainInterpreter(tstate->interp)); - assert(tstate_is_alive(tstate)); - assert(!tstate->_status.active); - assert(gilstate_tss_get(tstate->interp->runtime) != tstate); - - unbind_tstate(tstate); - - /* This thread state may be bound/unbound repeatedly, - so we must erase evidence that it was ever bound (or unbound). */ - tstate->_status.bound = 0; - tstate->_status.unbound = 0; - - /* We must fully unlink the thread state from any OS thread, - to allow it to be bound more than once. */ - tstate->thread_id = 0; -#ifdef PY_HAVE_THREAD_NATIVE_ID - tstate->native_thread_id = 0; -#endif -} - - //---------- // accessors //---------- diff --git a/Python/specialize.c b/Python/specialize.c index dcf4be712db20d..2d514c0dc476d3 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -1,5 +1,6 @@ #include "Python.h" #include "pycore_code.h" +#include "pycore_descrobject.h" // _PyMethodWrapper_Type #include "pycore_dict.h" #include "pycore_function.h" // _PyFunction_GetVersionForCurrentState() #include "pycore_global_strings.h" // _Py_ID() @@ -7,10 +8,9 @@ #include "pycore_moduleobject.h" #include "pycore_object.h" #include "pycore_opcode.h" // _PyOpcode_Caches -#include "structmember.h" // struct PyMemberDef, T_OFFSET_EX -#include "pycore_descrobject.h" #include "pycore_pylifecycle.h" // _PyOS_URandomNonblock() + #include // rand() /* For guidance on adding or extending families of instructions see @@ -18,7 +18,8 @@ */ #ifdef Py_STATS -PyStats _py_stats_struct = { 0 }; +GCStats _py_gc_stats[NUM_GENERATIONS] = { 0 }; +PyStats _py_stats_struct = { .gc_stats = &_py_gc_stats[0] }; PyStats *_py_stats = NULL; #define ADD_STAT_TO_DICT(res, field) \ @@ -191,6 +192,7 @@ print_object_stats(FILE *out, ObjectStats *stats) fprintf(out, "Object materialize dict (new key): %" PRIu64 "\n", stats->dict_materialized_new_key); fprintf(out, "Object materialize dict (too big): %" PRIu64 "\n", stats->dict_materialized_too_big); fprintf(out, "Object materialize dict (str subclass): %" PRIu64 "\n", stats->dict_materialized_str_subclass); + fprintf(out, "Object dematerialize dict: %" PRIu64 "\n", stats->dict_dematerialized); fprintf(out, "Object method cache hits: %" PRIu64 "\n", stats->type_cache_hits); fprintf(out, "Object method cache misses: %" PRIu64 "\n", stats->type_cache_misses); fprintf(out, "Object method cache collisions: %" PRIu64 "\n", stats->type_cache_collisions); @@ -202,17 +204,32 @@ print_object_stats(FILE *out, ObjectStats *stats) fprintf(out, "Optimization uops executed: %" PRIu64 "\n", stats->optimization_uops_executed); } +static void +print_gc_stats(FILE *out, GCStats *stats) +{ + for (int i = 0; i < NUM_GENERATIONS; i++) { + fprintf(out, "GC[%d] collections: %" PRIu64 "\n", i, stats[i].collections); + fprintf(out, "GC[%d] object visits: %" PRIu64 "\n", i, stats[i].object_visits); + fprintf(out, "GC[%d] objects collected: %" PRIu64 "\n", i, stats[i].objects_collected); + } +} + static void print_stats(FILE *out, PyStats *stats) { print_spec_stats(out, stats->opcode_stats); print_call_stats(out, &stats->call_stats); print_object_stats(out, &stats->object_stats); + print_gc_stats(out, stats->gc_stats); } void _Py_StatsClear(void) { + for (int i = 0; i < NUM_GENERATIONS; i++) { + _py_gc_stats[i] = (GCStats) { 0 }; + } _py_stats_struct = (PyStats) { 0 }; + _py_stats_struct.gc_stats = _py_gc_stats; } void @@ -347,7 +364,6 @@ _PyCode_Quicken(PyCodeObject *code) #define SPEC_FAIL_SUBSCR_ARRAY_SLICE 10 #define SPEC_FAIL_SUBSCR_LIST_SLICE 11 #define SPEC_FAIL_SUBSCR_TUPLE_SLICE 12 -#define SPEC_FAIL_SUBSCR_STRING_INT 13 #define SPEC_FAIL_SUBSCR_STRING_SLICE 14 #define SPEC_FAIL_SUBSCR_BUFFER_INT 15 #define SPEC_FAIL_SUBSCR_BUFFER_SLICE 16 @@ -621,7 +637,7 @@ analyze_descriptor(PyTypeObject *type, PyObject *name, PyObject **descr, int sto if (desc_cls == &PyMemberDescr_Type) { PyMemberDescrObject *member = (PyMemberDescrObject *)descriptor; struct PyMemberDef *dmem = member->d_member; - if (dmem->type == T_OBJECT_EX) { + if (dmem->type == Py_T_OBJECT_EX) { return OBJECT_SLOT; } return OTHER_SLOT; @@ -670,8 +686,10 @@ specialize_dict_access( return 0; } _PyAttrCache *cache = (_PyAttrCache *)(instr + 1); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - if (_PyDictOrValues_IsValues(dorv)) { + PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); + if (_PyDictOrValues_IsValues(*dorv) || + _PyObject_MakeInstanceAttributesFromDict(owner, dorv)) + { // Virtual dictionary PyDictKeysObject *keys = ((PyHeapTypeObject *)type)->ht_cached_keys; assert(PyUnicode_CheckExact(name)); @@ -689,12 +707,16 @@ specialize_dict_access( instr->op.code = values_op; } else { - PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(dorv); + PyDictObject *dict = (PyDictObject *)_PyDictOrValues_GetDict(*dorv); if (dict == NULL || !PyDict_CheckExact(dict)) { SPECIALIZATION_FAIL(base_op, SPEC_FAIL_NO_DICT); return 0; } // We found an instance with a __dict__. + if (dict->ma_values) { + SPECIALIZATION_FAIL(base_op, SPEC_FAIL_ATTR_NON_STRING_OR_SPLIT); + return 0; + } Py_ssize_t index = _PyDict_LookupIndex(dict, name); if (index != (uint16_t)index) { @@ -778,6 +800,10 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) if (!function_check_args(fget, 1, LOAD_ATTR)) { goto fail; } + if (instr->op.arg & 1) { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METHOD); + goto fail; + } uint32_t version = function_get_version(fget, LOAD_ATTR); if (version == 0) { goto fail; @@ -803,7 +829,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_EXPECTED_ERROR); goto fail; } - if (dmem->flags & PY_AUDIT_READ) { + if (dmem->flags & Py_AUDIT_READ) { SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_AUDITED_SLOT); goto fail; } @@ -811,7 +837,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_OUT_OF_RANGE); goto fail; } - assert(dmem->type == T_OBJECT_EX); + assert(dmem->type == Py_T_OBJECT_EX); assert(offset > 0); cache->index = (uint16_t)offset; write_u32(cache->version, type->tp_version_tag); @@ -844,6 +870,10 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) if (!function_check_args(descr, 2, LOAD_ATTR)) { goto fail; } + if (instr->op.arg & 1) { + SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_METHOD); + goto fail; + } uint32_t version = function_get_version(descr, LOAD_ATTR); if (version == 0) { goto fail; @@ -939,7 +969,7 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) SPECIALIZATION_FAIL(STORE_ATTR, SPEC_FAIL_EXPECTED_ERROR); goto fail; } - if (dmem->flags & READONLY) { + if (dmem->flags & Py_READONLY) { SPECIALIZATION_FAIL(STORE_ATTR, SPEC_FAIL_ATTR_READ_ONLY); goto fail; } @@ -947,7 +977,7 @@ _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name) SPECIALIZATION_FAIL(STORE_ATTR, SPEC_FAIL_OUT_OF_RANGE); goto fail; } - assert(dmem->type == T_OBJECT_EX); + assert(dmem->type == Py_T_OBJECT_EX); assert(offset > 0); cache->index = (uint16_t)offset; write_u32(cache->version, type->tp_version_tag); @@ -1077,9 +1107,11 @@ PyObject *descr, DescriptorClassification kind, bool is_method) assert(descr != NULL); assert((is_method && kind == METHOD) || (!is_method && kind == NON_DESCRIPTOR)); if (owner_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT) { - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); PyDictKeysObject *keys = ((PyHeapTypeObject *)owner_cls)->ht_cached_keys; - if (!_PyDictOrValues_IsValues(dorv)) { + if (!_PyDictOrValues_IsValues(*dorv) && + !_PyObject_MakeInstanceAttributesFromDict(owner, dorv)) + { SPECIALIZATION_FAIL(LOAD_ATTR, SPEC_FAIL_ATTR_HAS_MANAGED_DICT); return 0; } @@ -1244,16 +1276,7 @@ _Py_Specialize_LoadGlobal( static int binary_subscr_fail_kind(PyTypeObject *container_type, PyObject *sub) { - if (container_type == &PyUnicode_Type) { - if (PyLong_CheckExact(sub)) { - return SPEC_FAIL_SUBSCR_STRING_INT; - } - if (PySlice_Check(sub)) { - return SPEC_FAIL_SUBSCR_STRING_SLICE; - } - return SPEC_FAIL_OTHER; - } - else if (strcmp(container_type->tp_name, "array.array") == 0) { + if (strcmp(container_type->tp_name, "array.array") == 0) { if (PyLong_CheckExact(sub)) { return SPEC_FAIL_SUBSCR_ARRAY_INT; } @@ -1360,6 +1383,19 @@ _Py_Specialize_BinarySubscr( PySlice_Check(sub) ? SPEC_FAIL_SUBSCR_TUPLE_SLICE : SPEC_FAIL_OTHER); goto fail; } + if (container_type == &PyUnicode_Type) { + if (PyLong_CheckExact(sub)) { + if (_PyLong_IsNonNegativeCompact((PyLongObject *)sub)) { + instr->op.code = BINARY_SUBSCR_STR_INT; + goto success; + } + SPECIALIZATION_FAIL(BINARY_SUBSCR, SPEC_FAIL_OUT_OF_RANGE); + goto fail; + } + SPECIALIZATION_FAIL(BINARY_SUBSCR, + PySlice_Check(sub) ? SPEC_FAIL_SUBSCR_STRING_SLICE : SPEC_FAIL_OTHER); + goto fail; + } if (container_type == &PyDict_Type) { instr->op.code = BINARY_SUBSCR_DICT; goto success; diff --git a/Python/structmember.c b/Python/structmember.c index 19a75224a0f32e..7a5a6a49d23116 100644 --- a/Python/structmember.c +++ b/Python/structmember.c @@ -2,7 +2,7 @@ /* Map C struct members to Python object attributes */ #include "Python.h" -#include "structmember.h" // PyMemberDef + PyObject * PyMember_GetOne(const char *obj_addr, PyMemberDef *l) @@ -17,62 +17,62 @@ PyMember_GetOne(const char *obj_addr, PyMemberDef *l) const char* addr = obj_addr + l->offset; switch (l->type) { - case T_BOOL: + case Py_T_BOOL: v = PyBool_FromLong(*(char*)addr); break; - case T_BYTE: + case Py_T_BYTE: v = PyLong_FromLong(*(char*)addr); break; - case T_UBYTE: + case Py_T_UBYTE: v = PyLong_FromUnsignedLong(*(unsigned char*)addr); break; - case T_SHORT: + case Py_T_SHORT: v = PyLong_FromLong(*(short*)addr); break; - case T_USHORT: + case Py_T_USHORT: v = PyLong_FromUnsignedLong(*(unsigned short*)addr); break; - case T_INT: + case Py_T_INT: v = PyLong_FromLong(*(int*)addr); break; - case T_UINT: + case Py_T_UINT: v = PyLong_FromUnsignedLong(*(unsigned int*)addr); break; - case T_LONG: + case Py_T_LONG: v = PyLong_FromLong(*(long*)addr); break; - case T_ULONG: + case Py_T_ULONG: v = PyLong_FromUnsignedLong(*(unsigned long*)addr); break; - case T_PYSSIZET: + case Py_T_PYSSIZET: v = PyLong_FromSsize_t(*(Py_ssize_t*)addr); break; - case T_FLOAT: + case Py_T_FLOAT: v = PyFloat_FromDouble((double)*(float*)addr); break; - case T_DOUBLE: + case Py_T_DOUBLE: v = PyFloat_FromDouble(*(double*)addr); break; - case T_STRING: + case Py_T_STRING: if (*(char**)addr == NULL) { v = Py_NewRef(Py_None); } else v = PyUnicode_FromString(*(char**)addr); break; - case T_STRING_INPLACE: + case Py_T_STRING_INPLACE: v = PyUnicode_FromString((char*)addr); break; - case T_CHAR: + case Py_T_CHAR: v = PyUnicode_FromStringAndSize((char*)addr, 1); break; - case T_OBJECT: + case _Py_T_OBJECT: v = *(PyObject **)addr; if (v == NULL) v = Py_None; Py_INCREF(v); break; - case T_OBJECT_EX: + case Py_T_OBJECT_EX: v = *(PyObject **)addr; if (v == NULL) { PyObject *obj = (PyObject *)obj_addr; @@ -83,13 +83,13 @@ PyMember_GetOne(const char *obj_addr, PyMemberDef *l) } Py_XINCREF(v); break; - case T_LONGLONG: + case Py_T_LONGLONG: v = PyLong_FromLongLong(*(long long *)addr); break; - case T_ULONGLONG: + case Py_T_ULONGLONG: v = PyLong_FromUnsignedLongLong(*(unsigned long long *)addr); break; - case T_NONE: + case _Py_T_NONE: v = Py_NewRef(Py_None); break; default: @@ -118,27 +118,27 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) addr += l->offset; - if ((l->flags & READONLY)) + if ((l->flags & Py_READONLY)) { PyErr_SetString(PyExc_AttributeError, "readonly attribute"); return -1; } if (v == NULL) { - if (l->type == T_OBJECT_EX) { + if (l->type == Py_T_OBJECT_EX) { /* Check if the attribute is set. */ if (*(PyObject **)addr == NULL) { PyErr_SetString(PyExc_AttributeError, l->name); return -1; } } - else if (l->type != T_OBJECT) { + else if (l->type != _Py_T_OBJECT) { PyErr_SetString(PyExc_TypeError, "can't delete numeric/char attribute"); return -1; } } switch (l->type) { - case T_BOOL:{ + case Py_T_BOOL:{ if (!PyBool_Check(v)) { PyErr_SetString(PyExc_TypeError, "attribute value type must be bool"); @@ -150,7 +150,7 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) *(char*)addr = (char) 0; break; } - case T_BYTE:{ + case Py_T_BYTE:{ long long_val = PyLong_AsLong(v); if ((long_val == -1) && PyErr_Occurred()) return -1; @@ -161,7 +161,7 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) WARN("Truncation of value to char"); break; } - case T_UBYTE:{ + case Py_T_UBYTE:{ long long_val = PyLong_AsLong(v); if ((long_val == -1) && PyErr_Occurred()) return -1; @@ -170,7 +170,7 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) WARN("Truncation of value to unsigned char"); break; } - case T_SHORT:{ + case Py_T_SHORT:{ long long_val = PyLong_AsLong(v); if ((long_val == -1) && PyErr_Occurred()) return -1; @@ -179,7 +179,7 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) WARN("Truncation of value to short"); break; } - case T_USHORT:{ + case Py_T_USHORT:{ long long_val = PyLong_AsLong(v); if ((long_val == -1) && PyErr_Occurred()) return -1; @@ -188,7 +188,7 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) WARN("Truncation of value to unsigned short"); break; } - case T_INT:{ + case Py_T_INT:{ long long_val = PyLong_AsLong(v); if ((long_val == -1) && PyErr_Occurred()) return -1; @@ -197,7 +197,7 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) WARN("Truncation of value to int"); break; } - case T_UINT:{ + case Py_T_UINT:{ unsigned long ulong_val = PyLong_AsUnsignedLong(v); if ((ulong_val == (unsigned long)-1) && PyErr_Occurred()) { /* XXX: For compatibility, accept negative int values @@ -215,13 +215,13 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) WARN("Truncation of value to unsigned int"); break; } - case T_LONG:{ + case Py_T_LONG:{ *(long*)addr = PyLong_AsLong(v); if ((*(long*)addr == -1) && PyErr_Occurred()) return -1; break; } - case T_ULONG:{ + case Py_T_ULONG:{ *(unsigned long*)addr = PyLong_AsUnsignedLong(v); if ((*(unsigned long*)addr == (unsigned long)-1) && PyErr_Occurred()) { @@ -236,32 +236,32 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) } break; } - case T_PYSSIZET:{ + case Py_T_PYSSIZET:{ *(Py_ssize_t*)addr = PyLong_AsSsize_t(v); if ((*(Py_ssize_t*)addr == (Py_ssize_t)-1) && PyErr_Occurred()) return -1; break; } - case T_FLOAT:{ + case Py_T_FLOAT:{ double double_val = PyFloat_AsDouble(v); if ((double_val == -1) && PyErr_Occurred()) return -1; *(float*)addr = (float)double_val; break; } - case T_DOUBLE: + case Py_T_DOUBLE: *(double*)addr = PyFloat_AsDouble(v); if ((*(double*)addr == -1) && PyErr_Occurred()) return -1; break; - case T_OBJECT: - case T_OBJECT_EX: + case _Py_T_OBJECT: + case Py_T_OBJECT_EX: oldv = *(PyObject **)addr; *(PyObject **)addr = Py_XNewRef(v); Py_XDECREF(oldv); break; - case T_CHAR: { + case Py_T_CHAR: { const char *string; Py_ssize_t len; @@ -273,18 +273,18 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v) *(char*)addr = string[0]; break; } - case T_STRING: - case T_STRING_INPLACE: + case Py_T_STRING: + case Py_T_STRING_INPLACE: PyErr_SetString(PyExc_TypeError, "readonly attribute"); return -1; - case T_LONGLONG:{ + case Py_T_LONGLONG:{ long long value; *(long long*)addr = value = PyLong_AsLongLong(v); if ((value == -1) && PyErr_Occurred()) return -1; break; } - case T_ULONGLONG:{ + case Py_T_ULONGLONG:{ unsigned long long value; /* ??? PyLong_AsLongLong accepts an int, but PyLong_AsUnsignedLongLong doesn't ??? */ diff --git a/Python/symtable.c b/Python/symtable.c index e2c00d17480dd1..e9adbd5d29b1f9 100644 --- a/Python/symtable.c +++ b/Python/symtable.c @@ -3,7 +3,7 @@ #include "pycore_parser.h" // _PyParser_ASTFromString() #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_symtable.h" // PySTEntryObject -#include "structmember.h" // PyMemberDef + /* error strings used for warnings */ #define GLOBAL_PARAM \ @@ -171,14 +171,14 @@ ste_dealloc(PySTEntryObject *ste) #define OFF(x) offsetof(PySTEntryObject, x) static PyMemberDef ste_memberlist[] = { - {"id", T_OBJECT, OFF(ste_id), READONLY}, - {"name", T_OBJECT, OFF(ste_name), READONLY}, - {"symbols", T_OBJECT, OFF(ste_symbols), READONLY}, - {"varnames", T_OBJECT, OFF(ste_varnames), READONLY}, - {"children", T_OBJECT, OFF(ste_children), READONLY}, - {"nested", T_INT, OFF(ste_nested), READONLY}, - {"type", T_INT, OFF(ste_type), READONLY}, - {"lineno", T_INT, OFF(ste_lineno), READONLY}, + {"id", _Py_T_OBJECT, OFF(ste_id), Py_READONLY}, + {"name", _Py_T_OBJECT, OFF(ste_name), Py_READONLY}, + {"symbols", _Py_T_OBJECT, OFF(ste_symbols), Py_READONLY}, + {"varnames", _Py_T_OBJECT, OFF(ste_varnames), Py_READONLY}, + {"children", _Py_T_OBJECT, OFF(ste_children), Py_READONLY}, + {"nested", Py_T_INT, OFF(ste_nested), Py_READONLY}, + {"type", Py_T_INT, OFF(ste_type), Py_READONLY}, + {"lineno", Py_T_INT, OFF(ste_lineno), Py_READONLY}, {NULL} }; @@ -282,17 +282,10 @@ symtable_new(void) return NULL; } -/* When compiling the use of C stack is probably going to be a lot - lighter than when executing Python code but still can overflow - and causing a Python crash if not checked (e.g. eval("()"*300000)). - Using the current recursion limit for the compiler seems too - restrictive (it caused at least one test to fail) so a factor is - used to allow deeper recursion when compiling an expression. - - Using a scaling factor means this should automatically adjust when +/* Using a scaling factor means this should automatically adjust when the recursion limit is adjusted for small or large C stack allocations. */ -#define COMPILER_STACK_FRAME_SCALE 3 +#define COMPILER_STACK_FRAME_SCALE 2 struct symtable * _PySymtable_Build(mod_ty mod, PyObject *filename, PyFutureFeatures *future) @@ -389,7 +382,7 @@ _PySymtable_Free(struct symtable *st) } PySTEntryObject * -PySymtable_Lookup(struct symtable *st, void *key) +_PySymtable_Lookup(struct symtable *st, void *key) { PyObject *k, *v; diff --git a/Python/sysmodule.c b/Python/sysmodule.c index fea3f61ee01762..be026d95ba7e77 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -17,12 +17,13 @@ Data members: #include "Python.h" #include "pycore_call.h" // _PyObject_CallNoArgs() #include "pycore_ceval.h" // _PyEval_SetAsyncGenFinalizer() +#include "pycore_dict.h" // _PyDict_GetItemWithError() #include "pycore_frame.h" // _PyInterpreterFrame #include "pycore_initconfig.h" // _PyStatus_EXCEPTION() #include "pycore_long.h" // _PY_LONG_MAX_STR_DIGITS_THRESHOLD #include "pycore_modsupport.h" // _PyModule_CreateInitialized() #include "pycore_namespace.h" // _PyNamespace_New() -#include "pycore_object.h" // _PyObject_IS_GC() +#include "pycore_object.h" // _PyObject_IS_GC(), _PyObject_DebugTypeStats() #include "pycore_pathconfig.h" // _PyPathConfig_ComputeSysPath0() #include "pycore_pyerrors.h" // _PyErr_GetRaisedException() #include "pycore_pylifecycle.h" // _PyErr_WriteUnraisableDefaultHook() diff --git a/Python/traceback.c b/Python/traceback.c index c5787b5ea4678c..ca524b1b9af78b 100644 --- a/Python/traceback.c +++ b/Python/traceback.c @@ -17,7 +17,7 @@ #include "../Parser/pegen.h" // _PyPegen_byte_offset_to_character_offset() #include "frameobject.h" // PyFrame_New() -#include "structmember.h" // PyMemberDef + #include "osdefs.h" // SEP #ifdef HAVE_FCNTL_H # include @@ -148,9 +148,9 @@ static PyMethodDef tb_methods[] = { }; static PyMemberDef tb_memberlist[] = { - {"tb_frame", T_OBJECT, OFF(tb_frame), READONLY|PY_AUDIT_READ}, - {"tb_lasti", T_INT, OFF(tb_lasti), READONLY}, - {"tb_lineno", T_INT, OFF(tb_lineno), READONLY}, + {"tb_frame", _Py_T_OBJECT, OFF(tb_frame), Py_READONLY|Py_AUDIT_READ}, + {"tb_lasti", Py_T_INT, OFF(tb_lasti), Py_READONLY}, + {"tb_lineno", Py_T_INT, OFF(tb_lineno), Py_READONLY}, {NULL} /* Sentinel */ }; diff --git a/README.rst b/README.rst index 4ab26565a13e03..208bf8cec444a3 100644 --- a/README.rst +++ b/README.rst @@ -211,30 +211,6 @@ primary version, you would execute ``make install`` in your 3.13 build directory and ``make altinstall`` in the others. -Issue Tracker and Mailing List ------------------------------- - -Bug reports are welcome! You can use Github to `report bugs -`_, and/or `submit pull requests -`_. - -You can also follow development discussion on the `python-dev mailing list -`_. - - -Proposals for enhancement -------------------------- - -If you have a proposal to change Python, you may want to send an email to the -`comp.lang.python`_ or `python-ideas`_ mailing lists for initial feedback. A -Python Enhancement Proposal (PEP) may be submitted if your idea gains ground. -All current PEPs, as well as guidelines for submitting a new PEP, are listed at -`peps.python.org `_. - -.. _python-ideas: https://mail.python.org/mailman/listinfo/python-ideas/ -.. _comp.lang.python: https://mail.python.org/mailman/listinfo/python-list - - Release Schedule ---------------- diff --git a/Tools/build/deepfreeze.py b/Tools/build/deepfreeze.py index b084d3e457f782..a11fe6a62811ab 100644 --- a/Tools/build/deepfreeze.py +++ b/Tools/build/deepfreeze.py @@ -208,6 +208,7 @@ def generate_unicode(self, name: str, s: str) -> str: self.write(".kind = 1,") self.write(".compact = 1,") self.write(".ascii = 1,") + self.write(".statically_allocated = 1,") self.write(f"._data = {make_string_literal(s.encode('ascii'))},") return f"& {name}._ascii.ob_base" else: @@ -220,6 +221,7 @@ def generate_unicode(self, name: str, s: str) -> str: self.write(f".kind = {kind},") self.write(".compact = 1,") self.write(".ascii = 0,") + self.write(".statically_allocated = 1,") utf8 = s.encode('utf-8') self.write(f'.utf8 = {make_string_literal(utf8)},') self.write(f'.utf8_length = {len(utf8)},') diff --git a/Tools/build/generate_opcode_h.py b/Tools/build/generate_opcode_h.py index 5b0560e6b21a99..67f4a2c2d5d76f 100644 --- a/Tools/build/generate_opcode_h.py +++ b/Tools/build/generate_opcode_h.py @@ -6,7 +6,7 @@ SCRIPT_NAME = "Tools/build/generate_opcode_h.py" PYTHON_OPCODE = "Lib/opcode.py" -header = f""" +opcode_h_header = f""" // Auto-generated by {SCRIPT_NAME} from {PYTHON_OPCODE} #ifndef Py_OPCODE_H @@ -15,11 +15,11 @@ extern "C" {{ #endif +#include "opcode_ids.h" -/* Instruction opcodes for compiled code */ """.lstrip() -footer = """ +opcode_h_footer = """ #ifdef __cplusplus } @@ -27,6 +27,27 @@ #endif /* !Py_OPCODE_H */ """ +opcode_ids_h_header = f""" +// Auto-generated by {SCRIPT_NAME} from {PYTHON_OPCODE} + +#ifndef Py_OPCODE_IDS_H +#define Py_OPCODE_IDS_H +#ifdef __cplusplus +extern "C" {{ +#endif + + +/* Instruction opcodes for compiled code */ +""".lstrip() + +opcode_ids_h_footer = """ + +#ifdef __cplusplus +} +#endif +#endif /* !Py_OPCODE_IDS_H */ +""" + internal_header = f""" // Auto-generated by {SCRIPT_NAME} from {PYTHON_OPCODE} @@ -50,18 +71,6 @@ #endif // !Py_INTERNAL_OPCODE_H """ -intrinsic_header = f""" -// Auto-generated by {SCRIPT_NAME} from {PYTHON_OPCODE} - -""".lstrip() - -intrinsic_footer = """ -typedef PyObject *(*instrinsic_func1)(PyThreadState* tstate, PyObject *value); -typedef PyObject *(*instrinsic_func2)(PyThreadState* tstate, PyObject *value1, PyObject *value2); -extern const instrinsic_func1 _PyIntrinsics_UnaryFunctions[]; -extern const instrinsic_func2 _PyIntrinsics_BinaryFunctions[]; -""" - DEFINE = "#define {:<38} {:>3}\n" UINT32_MASK = (1<<32)-1 @@ -75,20 +84,17 @@ def get_python_module_dict(filename): def main(opcode_py, _opcode_metadata_py='Lib/_opcode_metadata.py', - outfile='Include/opcode.h', - internaloutfile='Include/internal/pycore_opcode.h', - intrinsicoutfile='Include/internal/pycore_intrinsics.h'): + opcode_ids_h='Include/opcode_ids.h', + opcode_h='Include/opcode.h', + opcode_targets_h='Python/opcode_targets.h', + internal_opcode_h='Include/internal/pycore_opcode.h'): _opcode_metadata = get_python_module_dict(_opcode_metadata_py) opcode = get_python_module_dict(opcode_py) opmap = opcode['opmap'] opname = opcode['opname'] - is_pseudo = opcode['is_pseudo'] - ENABLE_SPECIALIZATION = opcode["ENABLE_SPECIALIZATION"] - MIN_PSEUDO_OPCODE = opcode["MIN_PSEUDO_OPCODE"] - MAX_PSEUDO_OPCODE = opcode["MAX_PSEUDO_OPCODE"] MIN_INSTRUMENTED_OPCODE = opcode["MIN_INSTRUMENTED_OPCODE"] NUM_OPCODES = len(opname) @@ -107,42 +113,47 @@ def main(opcode_py, opname_including_specialized[next_op] = name used[next_op] = True - with open(outfile, 'w') as fobj, open(internaloutfile, 'w') as iobj, open( - intrinsicoutfile, "w") as nobj: - fobj.write(header) - iobj.write(internal_header) - nobj.write(intrinsic_header) + with open(opcode_ids_h, 'w') as fobj: + fobj.write(opcode_ids_h_header) for name in opname: if name in opmap: op = opmap[name] - if op == MIN_PSEUDO_OPCODE: - fobj.write(DEFINE.format("MIN_PSEUDO_OPCODE", MIN_PSEUDO_OPCODE)) if op == MIN_INSTRUMENTED_OPCODE: fobj.write(DEFINE.format("MIN_INSTRUMENTED_OPCODE", MIN_INSTRUMENTED_OPCODE)) fobj.write(DEFINE.format(name, op)) - if op == MAX_PSEUDO_OPCODE: - fobj.write(DEFINE.format("MAX_PSEUDO_OPCODE", MAX_PSEUDO_OPCODE)) - for name, op in specialized_opmap.items(): fobj.write(DEFINE.format(name, op)) + fobj.write(opcode_ids_h_footer) + + with open(opcode_h, 'w') as fobj: + fobj.write(opcode_h_header) + + fobj.write("\n") + for i, (op, _) in enumerate(opcode["_nb_ops"]): + fobj.write(DEFINE.format(op, i)) + + fobj.write(opcode_h_footer) + + with open(internal_opcode_h, 'w') as iobj: + iobj.write(internal_header) + iobj.write("\nextern const uint8_t _PyOpcode_Caches[256];\n") iobj.write("\nextern const uint8_t _PyOpcode_Deopt[256];\n") iobj.write("\n#ifdef NEED_OPCODE_TABLES\n") iobj.write("\nconst uint8_t _PyOpcode_Caches[256] = {\n") - for i, entries in enumerate(opcode["_inline_cache_entries"]): - if entries: - iobj.write(f" [{opname[i]}] = {entries},\n") + for name, entries in opcode["_inline_cache_entries"].items(): + iobj.write(f" [{name}] = {entries},\n") iobj.write("};\n") deoptcodes = {} for basic, op in opmap.items(): - if not is_pseudo(op): + if op < 256: deoptcodes[basic] = basic for basic, family in _opcode_metadata["_specializations"].items(): for specialized in family: @@ -153,30 +164,6 @@ def main(opcode_py, iobj.write("};\n") iobj.write("#endif // NEED_OPCODE_TABLES\n") - fobj.write("\n") - for i, (op, _) in enumerate(opcode["_nb_ops"]): - fobj.write(DEFINE.format(op, i)) - - nobj.write("/* Unary Functions: */") - nobj.write("\n") - for i, op in enumerate(opcode["_intrinsic_1_descs"]): - nobj.write(DEFINE.format(op, i)) - nobj.write("\n") - nobj.write(DEFINE.format("MAX_INTRINSIC_1", i)) - - nobj.write("\n\n") - nobj.write("/* Binary Functions: */\n") - for i, op in enumerate(opcode["_intrinsic_2_descs"]): - nobj.write(DEFINE.format(op, i)) - nobj.write("\n") - nobj.write(DEFINE.format("MAX_INTRINSIC_2", i)) - - nobj.write(intrinsic_footer) - - fobj.write("\n") - fobj.write("/* Defined in Lib/opcode.py */\n") - fobj.write(f"#define ENABLE_SPECIALIZATION {int(ENABLE_SPECIALIZATION)}") - iobj.write("\n") iobj.write(f"\nextern const char *const _PyOpcode_OpName[{NUM_OPCODES}];\n") iobj.write("\n#ifdef NEED_OPCODE_TABLES\n") @@ -195,12 +182,21 @@ def main(opcode_py, iobj.write(f" case {i}: \\\n") iobj.write(" ;\n") - fobj.write(footer) iobj.write(internal_footer) + with open(opcode_targets_h, "w") as f: + targets = ["_unknown_opcode"] * 256 + for op, name in enumerate(opname_including_specialized): + if op < 256 and not name.startswith("<"): + targets[op] = f"TARGET_{name}" + + f.write("static void *opcode_targets[256] = {\n") + f.write(",\n".join([f" &&{s}" for s in targets])) + f.write("\n};\n") - print(f"{outfile} regenerated from {opcode_py}") + print(f"{opcode_h} regenerated from {opcode_py}") if __name__ == '__main__': - main(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], sys.argv[5]) + main(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4], + sys.argv[5], sys.argv[6]) diff --git a/Tools/build/umarshal.py b/Tools/build/umarshal.py index f61570cbaff751..e05d93cf23c921 100644 --- a/Tools/build/umarshal.py +++ b/Tools/build/umarshal.py @@ -125,10 +125,10 @@ def r_long64(self) -> int: x |= buf[1] << 8 x |= buf[2] << 16 x |= buf[3] << 24 - x |= buf[1] << 32 - x |= buf[1] << 40 - x |= buf[1] << 48 - x |= buf[1] << 56 + x |= buf[4] << 32 + x |= buf[5] << 40 + x |= buf[6] << 48 + x |= buf[7] << 56 x |= -(x & (1<<63)) # Sign-extend return x diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv index 90bbc8928b428a..28bd2a4430d14e 100644 --- a/Tools/c-analyzer/cpython/globals-to-fix.tsv +++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv @@ -54,7 +54,7 @@ Objects/genobject.c - _PyAsyncGenASend_Type - Objects/genobject.c - _PyAsyncGenAThrow_Type - Objects/genobject.c - _PyAsyncGenWrappedValue_Type - Objects/genobject.c - _PyCoroWrapper_Type - -Objects/interpreteridobject.c - _PyInterpreterID_Type - +Objects/interpreteridobject.c - PyInterpreterID_Type - Objects/iterobject.c - PyCallIter_Type - Objects/iterobject.c - PySeqIter_Type - Objects/listobject.c - PyListIter_Type - @@ -322,6 +322,8 @@ Modules/_testcapi/vectorcall.c - MethodDescriptorBase_Type - Modules/_testcapi/vectorcall.c - MethodDescriptorDerived_Type - Modules/_testcapi/vectorcall.c - MethodDescriptorNopGet_Type - Modules/_testcapi/vectorcall.c - MethodDescriptor2_Type - +Modules/_testclinic.c - DeprStarInit - +Modules/_testclinic.c - DeprStarNew - ################################## @@ -421,11 +423,8 @@ Modules/_datetimemodule.c - us_per_hour - Modules/_datetimemodule.c - us_per_day - Modules/_datetimemodule.c - us_per_week - Modules/_datetimemodule.c - seconds_per_day - -Modules/_decimal/_decimal.c - global_state - ## state -Modules/_asynciomodule.c - fi_freelist - -Modules/_asynciomodule.c - fi_freelist_len - Modules/_ctypes/_ctypes.c - _ctypes_ptrtype_cache - Modules/_ctypes/_ctypes.c - global_state - Modules/_ctypes/ctypes.h - global_state - diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index 73eec6631d9512..c64d391bae13bd 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -205,14 +205,15 @@ Modules/_datetimemodule.c - max_fold_seconds - Modules/_datetimemodule.c datetime_isoformat specs - Modules/_datetimemodule.c parse_hh_mm_ss_ff correction - Modules/_datetimemodule.c time_isoformat specs - -Modules/_decimal/_decimal.c - cond_map - +Modules/_decimal/_decimal.c - cond_map_template - Modules/_decimal/_decimal.c - dec_signal_string - Modules/_decimal/_decimal.c - dflt_ctx - Modules/_decimal/_decimal.c - int_constants - Modules/_decimal/_decimal.c - invalid_rounding_err - Modules/_decimal/_decimal.c - invalid_signals_err - -Modules/_decimal/_decimal.c - signal_map - +Modules/_decimal/_decimal.c - signal_map_template - Modules/_decimal/_decimal.c - ssize_constants - +Modules/_decimal/_decimal.c - INVALID_SIGNALDICT_ERROR_MSG - Modules/_elementtree.c - ExpatMemoryHandler - Modules/_hashopenssl.c - py_hashes - Modules/_hacl/Hacl_Hash_SHA1.c - _h0 - @@ -326,7 +327,7 @@ Parser/parser.c - reserved_keywords - Parser/parser.c - soft_keywords - Parser/tokenizer.c - type_comment_prefix - Python/ast_opt.c fold_unaryop ops - -Python/ceval.c - binary_ops - +Python/ceval.c - _PyEval_BinaryOps - Python/ceval.c - _Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS - Python/codecs.c - Py_hexdigits - Python/codecs.c - ucnhash_capi - @@ -418,6 +419,7 @@ Modules/_testbuffer.c staticarray_init kwlist - Modules/_testcapi/buffer.c - testBufType - Modules/_testcapi/code.c get_code_extra_index key - Modules/_testcapi/datetime.c - test_run_counter - +Modules/_testcapi/docstring.c - DocStringNoSignatureTest - Modules/_testcapi/exceptions.c - PyRecursingInfinitelyError_Type - Modules/_testcapi/heaptype.c - _testcapimodule - Modules/_testcapi/mem.c - FmData - @@ -428,6 +430,7 @@ Modules/_testcapi/watchers.c - g_dict_watch_events - Modules/_testcapi/watchers.c - g_dict_watchers_installed - Modules/_testcapi/watchers.c - g_type_modified_events - Modules/_testcapi/watchers.c - g_type_watchers_installed - +Modules/_testcapi/watchers.c - code_watcher_ids - Modules/_testcapi/watchers.c - num_code_object_created_events - Modules/_testcapi/watchers.c - num_code_object_destroyed_events - Modules/_testcapi/watchers.c - pyfunc_watchers - diff --git a/Tools/cases_generator/README.md b/Tools/cases_generator/README.md index fc9331656fe787..ed802e44f31ad5 100644 --- a/Tools/cases_generator/README.md +++ b/Tools/cases_generator/README.md @@ -7,10 +7,14 @@ What's currently here: - `lexer.py`: lexer for C, originally written by Mark Shannon - `plexer.py`: OO interface on top of lexer.py; main class: `PLexer` -- `parser.py`: Parser for instruction definition DSL; main class `Parser` +- `parsing.py`: Parser for instruction definition DSL; main class `Parser` - `generate_cases.py`: driver script to read `Python/bytecodes.c` and write `Python/generated_cases.c.h` (and several other files) -- `test_generator.py`: tests, require manual running using `pytest` +- `analysis.py`: `Analyzer` class used to read the input files +- `flags.py`: abstractions related to metadata flags for instructions +- `formatting.py`: `Formatter` class used to write the output files +- `instructions.py`: classes to analyze and write instructions +- `stacking.py`: code to handle generalized stack effects Note that there is some dummy C code at the top and bottom of `Python/bytecodes.c` diff --git a/Tools/cases_generator/analysis.py b/Tools/cases_generator/analysis.py new file mode 100644 index 00000000000000..2db1cd01c19ae5 --- /dev/null +++ b/Tools/cases_generator/analysis.py @@ -0,0 +1,411 @@ +import re +import sys +import typing + +from flags import InstructionFlags, variable_used +from formatting import prettify_filename, UNUSED +from instructions import ( + ActiveCacheEffect, + Component, + Instruction, + InstructionOrCacheEffect, + MacroInstruction, + MacroParts, + OverriddenInstructionPlaceHolder, + PseudoInstruction, +) +import parsing +from parsing import StackEffect + +BEGIN_MARKER = "// BEGIN BYTECODES //" +END_MARKER = "// END BYTECODES //" + +RESERVED_WORDS = { + "co_consts": "Use FRAME_CO_CONSTS.", + "co_names": "Use FRAME_CO_NAMES.", +} + +RE_PREDICTED = r"^\s*(?:GO_TO_INSTRUCTION\(|DEOPT_IF\(.*?,\s*)(\w+)\);\s*(?://.*)?$" + + +class Analyzer: + """Parse input, analyze it, and write to output.""" + + input_filenames: list[str] + errors: int = 0 + warnings: int = 0 + + def __init__(self, input_filenames: list[str]): + self.input_filenames = input_filenames + + def message(self, msg: str, node: parsing.Node) -> None: + lineno = 0 + filename = "" + if context := node.context: + filename = context.owner.filename + # Use line number of first non-comment in the node + for token in context.owner.tokens[context.begin : context.end]: + lineno = token.line + if token.kind != "COMMENT": + break + print(f"{filename}:{lineno}: {msg}", file=sys.stderr) + + def error(self, msg: str, node: parsing.Node) -> None: + self.message("error: " + msg, node) + self.errors += 1 + + def warning(self, msg: str, node: parsing.Node) -> None: + self.message("warning: " + msg, node) + self.warnings += 1 + + def note(self, msg: str, node: parsing.Node) -> None: + self.message("note: " + msg, node) + + everything: list[ + parsing.InstDef + | parsing.Macro + | parsing.Pseudo + | OverriddenInstructionPlaceHolder + ] + instrs: dict[str, Instruction] # Includes ops + macros: dict[str, parsing.Macro] + macro_instrs: dict[str, MacroInstruction] + families: dict[str, parsing.Family] + pseudos: dict[str, parsing.Pseudo] + pseudo_instrs: dict[str, PseudoInstruction] + + def parse(self) -> None: + """Parse the source text. + + We only want the parser to see the stuff between the + begin and end markers. + """ + + self.everything = [] + self.instrs = {} + self.macros = {} + self.families = {} + self.pseudos = {} + + instrs_idx: dict[str, int] = dict() + + for filename in self.input_filenames: + self.parse_file(filename, instrs_idx) + + files = " + ".join(self.input_filenames) + n_instrs = 0 + n_ops = 0 + for instr in self.instrs.values(): + if instr.kind == "op": + n_ops += 1 + else: + n_instrs += 1 + print( + f"Read {n_instrs} instructions, {n_ops} ops, " + f"{len(self.macros)} macros, {len(self.pseudos)} pseudos, " + f"and {len(self.families)} families from {files}", + file=sys.stderr, + ) + + def parse_file(self, filename: str, instrs_idx: dict[str, int]) -> None: + with open(filename) as file: + src = file.read() + + psr = parsing.Parser(src, filename=prettify_filename(filename)) + + # Skip until begin marker + while tkn := psr.next(raw=True): + if tkn.text == BEGIN_MARKER: + break + else: + raise psr.make_syntax_error( + f"Couldn't find {BEGIN_MARKER!r} in {psr.filename}" + ) + start = psr.getpos() + + # Find end marker, then delete everything after it + while tkn := psr.next(raw=True): + if tkn.text == END_MARKER: + break + del psr.tokens[psr.getpos() - 1 :] + + # Parse from start + psr.setpos(start) + thing: parsing.Node | None + thing_first_token = psr.peek() + while thing := psr.definition(): + thing = typing.cast( + parsing.InstDef | parsing.Macro | parsing.Pseudo | parsing.Family, thing + ) + if ws := [w for w in RESERVED_WORDS if variable_used(thing, w)]: + self.error( + f"'{ws[0]}' is a reserved word. {RESERVED_WORDS[ws[0]]}", thing + ) + + match thing: + case parsing.InstDef(name=name): + if name in self.instrs: + if not thing.override: + raise psr.make_syntax_error( + f"Duplicate definition of '{name}' @ {thing.context} " + f"previous definition @ {self.instrs[name].inst.context}", + thing_first_token, + ) + self.everything[ + instrs_idx[name] + ] = OverriddenInstructionPlaceHolder(name=name) + if name not in self.instrs and thing.override: + raise psr.make_syntax_error( + f"Definition of '{name}' @ {thing.context} is supposed to be " + "an override but no previous definition exists.", + thing_first_token, + ) + self.instrs[name] = Instruction(thing) + instrs_idx[name] = len(self.everything) + self.everything.append(thing) + case parsing.Macro(name): + self.macros[name] = thing + self.everything.append(thing) + case parsing.Family(name): + self.families[name] = thing + case parsing.Pseudo(name): + self.pseudos[name] = thing + self.everything.append(thing) + case _: + typing.assert_never(thing) + if not psr.eof(): + raise psr.make_syntax_error(f"Extra stuff at the end of {filename}") + + def analyze(self) -> None: + """Analyze the inputs. + + Raises SystemExit if there is an error. + """ + self.analyze_macros_and_pseudos() + self.find_predictions() + self.map_families() + self.check_families() + + def find_predictions(self) -> None: + """Find the instructions that need PREDICTED() labels.""" + for instr in self.instrs.values(): + targets: set[str] = set() + for line in instr.block_text: + if m := re.match(RE_PREDICTED, line): + targets.add(m.group(1)) + for target in targets: + if target_instr := self.instrs.get(target): + target_instr.predicted = True + elif target_macro := self.macro_instrs.get(target): + target_macro.predicted = True + else: + self.error( + f"Unknown instruction {target!r} predicted in {instr.name!r}", + instr.inst, # TODO: Use better location + ) + + def map_families(self) -> None: + """Link instruction names back to their family, if they have one.""" + for family in self.families.values(): + for member in [family.name] + family.members: + if member_instr := self.instrs.get(member): + if ( + member_instr.family is not family + and member_instr.family is not None + ): + self.error( + f"Instruction {member} is a member of multiple families " + f"({member_instr.family.name}, {family.name}).", + family, + ) + else: + member_instr.family = family + elif not self.macro_instrs.get(member): + self.error( + f"Unknown instruction {member!r} referenced in family {family.name!r}", + family, + ) + + def check_families(self) -> None: + """Check each family: + + - Must have at least 2 members (including head) + - Head and all members must be known instructions + - Head and all members must have the same cache, input and output effects + """ + for family in self.families.values(): + if family.name not in self.macro_instrs and family.name not in self.instrs: + self.error( + f"Family {family.name!r} has unknown instruction {family.name!r}", + family, + ) + members = [ + member + for member in family.members + if member in self.instrs or member in self.macro_instrs + ] + if members != family.members: + unknown = set(family.members) - set(members) + self.error( + f"Family {family.name!r} has unknown members: {unknown}", family + ) + expected_effects = self.effect_counts(family.name) + for member in members: + member_effects = self.effect_counts(member) + if member_effects != expected_effects: + self.error( + f"Family {family.name!r} has inconsistent " + f"(cache, input, output) effects:\n" + f" {family.name} = {expected_effects}; " + f"{member} = {member_effects}", + family, + ) + + def effect_counts(self, name: str) -> tuple[int, int, int]: + if instr := self.instrs.get(name): + cache = instr.cache_offset + input = len(instr.input_effects) + output = len(instr.output_effects) + elif mac := self.macro_instrs.get(name): + cache = mac.cache_offset + input, output = 0, 0 + for part in mac.parts: + if isinstance(part, Component): + # A component may pop what the previous component pushed, + # so we offset the input/output counts by that. + delta_i = len(part.instr.input_effects) + delta_o = len(part.instr.output_effects) + offset = min(delta_i, output) + input += delta_i - offset + output += delta_o - offset + else: + assert False, f"Unknown instruction {name!r}" + return cache, input, output + + def analyze_macros_and_pseudos(self) -> None: + """Analyze each macro and pseudo instruction.""" + self.macro_instrs = {} + self.pseudo_instrs = {} + for name, macro in self.macros.items(): + self.macro_instrs[name] = mac = self.analyze_macro(macro) + self.check_macro_consistency(mac) + for name, pseudo in self.pseudos.items(): + self.pseudo_instrs[name] = self.analyze_pseudo(pseudo) + + # TODO: Merge with similar code in stacking.py, write_components() + def check_macro_consistency(self, mac: MacroInstruction) -> None: + def get_var_names(instr: Instruction) -> dict[str, StackEffect]: + vars: dict[str, StackEffect] = {} + for eff in instr.input_effects + instr.output_effects: + if eff.name == UNUSED: + continue + if eff.name in vars: + if vars[eff.name] != eff: + self.error( + f"Instruction {instr.name!r} has " + f"inconsistent type/cond/size for variable " + f"{eff.name!r}: {vars[eff.name]} vs {eff}", + instr.inst, + ) + else: + vars[eff.name] = eff + return vars + + all_vars: dict[str, StackEffect] = {} + # print("Checking", mac.name) + prevop: Instruction | None = None + for part in mac.parts: + if not isinstance(part, Component): + continue + vars = get_var_names(part.instr) + # print(" //", part.instr.name, "//", vars) + for name, eff in vars.items(): + if name in all_vars: + if all_vars[name] != eff: + self.error( + f"Macro {mac.name!r} has " + f"inconsistent type/cond/size for variable " + f"{name!r}: " + f"{all_vars[name]} vs {eff} in {part.instr.name!r}", + mac.macro, + ) + else: + all_vars[name] = eff + if prevop is not None: + pushes = list(prevop.output_effects) + pops = list(reversed(part.instr.input_effects)) + copies: list[tuple[StackEffect, StackEffect]] = [] + while pushes and pops and pushes[-1] == pops[0]: + src, dst = pushes.pop(), pops.pop(0) + if src.name == dst.name or dst.name == UNUSED: + continue + copies.append((src, dst)) + reads = set(copy[0].name for copy in copies) + writes = set(copy[1].name for copy in copies) + if reads & writes: + self.error( + f"Macro {mac.name!r} has conflicting copies " + f"(source of one copy is destination of another): " + f"{reads & writes}", + mac.macro, + ) + prevop = part.instr + + def analyze_macro(self, macro: parsing.Macro) -> MacroInstruction: + components = self.check_macro_components(macro) + parts: MacroParts = [] + flags = InstructionFlags.newEmpty() + offset = 0 + for component in components: + match component: + case parsing.CacheEffect() as ceffect: + parts.append(ceffect) + offset += ceffect.size + case Instruction() as instr: + part, offset = self.analyze_instruction(instr, offset) + parts.append(part) + flags.add(instr.instr_flags) + case _: + typing.assert_never(component) + format = "IB" + if offset: + format += "C" + "0" * (offset - 1) + return MacroInstruction(macro.name, format, flags, macro, parts, offset) + + def analyze_pseudo(self, pseudo: parsing.Pseudo) -> PseudoInstruction: + targets = [self.instrs[target] for target in pseudo.targets] + assert targets + # Make sure the targets have the same fmt + fmts = list(set([t.instr_fmt for t in targets])) + assert len(fmts) == 1 + assert len(list(set([t.instr_flags.bitmap() for t in targets]))) == 1 + return PseudoInstruction(pseudo.name, targets, fmts[0], targets[0].instr_flags) + + def analyze_instruction( + self, instr: Instruction, offset: int + ) -> tuple[Component, int]: + active_effects: list[ActiveCacheEffect] = [] + for ceffect in instr.cache_effects: + if ceffect.name != UNUSED: + active_effects.append(ActiveCacheEffect(ceffect, offset)) + offset += ceffect.size + return ( + Component(instr, active_effects), + offset, + ) + + def check_macro_components( + self, macro: parsing.Macro + ) -> list[InstructionOrCacheEffect]: + components: list[InstructionOrCacheEffect] = [] + for uop in macro.uops: + match uop: + case parsing.OpName(name): + if name not in self.instrs: + self.error(f"Unknown instruction {name!r}", macro) + components.append(self.instrs[name]) + case parsing.CacheEffect(): + components.append(uop) + case _: + typing.assert_never(uop) + return components diff --git a/Tools/cases_generator/flags.py b/Tools/cases_generator/flags.py new file mode 100644 index 00000000000000..f7ebdeb0d65677 --- /dev/null +++ b/Tools/cases_generator/flags.py @@ -0,0 +1,102 @@ +import dataclasses + +from formatting import Formatter +import lexer as lx +import parsing + + +@dataclasses.dataclass +class InstructionFlags: + """Construct and manipulate instruction flags""" + + HAS_ARG_FLAG: bool + HAS_CONST_FLAG: bool + HAS_NAME_FLAG: bool + HAS_JUMP_FLAG: bool + HAS_FREE_FLAG: bool + HAS_LOCAL_FLAG: bool + + def __post_init__(self): + self.bitmask = {name: (1 << i) for i, name in enumerate(self.names())} + + @staticmethod + def fromInstruction(instr: parsing.Node): + + has_free = ( + variable_used(instr, "PyCell_New") + or variable_used(instr, "PyCell_GET") + or variable_used(instr, "PyCell_SET") + ) + + return InstructionFlags( + HAS_ARG_FLAG=variable_used(instr, "oparg"), + HAS_CONST_FLAG=variable_used(instr, "FRAME_CO_CONSTS"), + HAS_NAME_FLAG=variable_used(instr, "FRAME_CO_NAMES"), + HAS_JUMP_FLAG=variable_used(instr, "JUMPBY"), + HAS_FREE_FLAG=has_free, + HAS_LOCAL_FLAG=( + variable_used(instr, "GETLOCAL") or variable_used(instr, "SETLOCAL") + ) + and not has_free, + ) + + @staticmethod + def newEmpty(): + return InstructionFlags(False, False, False, False, False, False) + + def add(self, other: "InstructionFlags") -> None: + for name, value in dataclasses.asdict(other).items(): + if value: + setattr(self, name, value) + + def names(self, value=None) -> list[str]: + if value is None: + return list(dataclasses.asdict(self).keys()) + return [n for n, v in dataclasses.asdict(self).items() if v == value] + + def bitmap(self) -> int: + flags = 0 + for name in self.names(): + if getattr(self, name): + flags |= self.bitmask[name] + return flags + + @classmethod + def emit_macros(cls, out: Formatter): + flags = cls.newEmpty() + for name, value in flags.bitmask.items(): + out.emit(f"#define {name} ({value})") + + for name, value in flags.bitmask.items(): + out.emit( + f"#define OPCODE_{name[:-len('_FLAG')]}(OP) " + f"(_PyOpcode_opcode_metadata[OP].flags & ({name}))" + ) + + +def variable_used(node: parsing.Node, name: str) -> bool: + """Determine whether a variable with a given name is used in a node.""" + return any( + token.kind == "IDENTIFIER" and token.text == name for token in node.tokens + ) + + +def variable_used_unspecialized(node: parsing.Node, name: str) -> bool: + """Like variable_used(), but skips #if ENABLE_SPECIALIZATION blocks.""" + tokens: list[lx.Token] = [] + skipping = False + for i, token in enumerate(node.tokens): + if token.kind == "MACRO": + text = "".join(token.text.split()) + # TODO: Handle nested #if + if text == "#if": + if ( + i + 1 < len(node.tokens) + and node.tokens[i + 1].text == "ENABLE_SPECIALIZATION" + ): + skipping = True + elif text in ("#else", "#endif"): + skipping = False + if not skipping: + tokens.append(token) + return any(token.kind == "IDENTIFIER" and token.text == name for token in tokens) diff --git a/Tools/cases_generator/formatting.py b/Tools/cases_generator/formatting.py new file mode 100644 index 00000000000000..5894751bd9635c --- /dev/null +++ b/Tools/cases_generator/formatting.py @@ -0,0 +1,205 @@ +import contextlib +import re +import typing + +from parsing import StackEffect, Family + +UNUSED = "unused" + + +class Formatter: + """Wraps an output stream with the ability to indent etc.""" + + stream: typing.TextIO + prefix: str + emit_line_directives: bool = False + lineno: int # Next line number, 1-based + filename: str # Slightly improved stream.filename + nominal_lineno: int + nominal_filename: str + + def __init__( + self, + stream: typing.TextIO, + indent: int, + emit_line_directives: bool = False, + comment: str = "//", + ) -> None: + self.stream = stream + self.prefix = " " * indent + self.emit_line_directives = emit_line_directives + self.comment = comment + self.lineno = 1 + self.filename = prettify_filename(self.stream.name) + self.nominal_lineno = 1 + self.nominal_filename = self.filename + + def write_raw(self, s: str) -> None: + self.stream.write(s) + newlines = s.count("\n") + self.lineno += newlines + self.nominal_lineno += newlines + + def emit(self, arg: str) -> None: + if arg: + self.write_raw(f"{self.prefix}{arg}\n") + else: + self.write_raw("\n") + + def set_lineno(self, lineno: int, filename: str) -> None: + if self.emit_line_directives: + if lineno != self.nominal_lineno or filename != self.nominal_filename: + self.emit(f'#line {lineno} "{filename}"') + self.nominal_lineno = lineno + self.nominal_filename = filename + + def reset_lineno(self) -> None: + if self.lineno != self.nominal_lineno or self.filename != self.nominal_filename: + self.set_lineno(self.lineno + 1, self.filename) + + @contextlib.contextmanager + def indent(self): + self.prefix += " " + yield + self.prefix = self.prefix[:-4] + + @contextlib.contextmanager + def block(self, head: str, tail: str = ""): + if head: + self.emit(head + " {") + else: + self.emit("{") + with self.indent(): + yield + self.emit("}" + tail) + + def stack_adjust( + self, + input_effects: list[StackEffect], + output_effects: list[StackEffect], + ): + shrink, isym = list_effect_size(input_effects) + grow, osym = list_effect_size(output_effects) + diff = grow - shrink + if isym and isym != osym: + self.emit(f"STACK_SHRINK({isym});") + if diff < 0: + self.emit(f"STACK_SHRINK({-diff});") + if diff > 0: + self.emit(f"STACK_GROW({diff});") + if osym and osym != isym: + self.emit(f"STACK_GROW({osym});") + + def declare(self, dst: StackEffect, src: StackEffect | None): + if dst.name == UNUSED or dst.cond == "0": + return + typ = f"{dst.type}" if dst.type else "PyObject *" + if src: + cast = self.cast(dst, src) + initexpr = f"{cast}{src.name}" + if src.cond and src.cond != "1": + initexpr = f"{parenthesize_cond(src.cond)} ? {initexpr} : NULL" + init = f" = {initexpr}" + elif dst.cond and dst.cond != "1": + init = " = NULL" + else: + init = "" + sepa = "" if typ.endswith("*") else " " + self.emit(f"{typ}{sepa}{dst.name}{init};") + + def assign(self, dst: StackEffect, src: StackEffect): + if src.name == UNUSED or dst.name == UNUSED: + return + cast = self.cast(dst, src) + if re.match(r"^REG\(oparg(\d+)\)$", dst.name): + self.emit(f"Py_XSETREF({dst.name}, {cast}{src.name});") + else: + stmt = f"{dst.name} = {cast}{src.name};" + if src.cond and src.cond != "1": + if src.cond == "0": + # It will not be executed + return + stmt = f"if ({src.cond}) {{ {stmt} }}" + self.emit(stmt) + + def cast(self, dst: StackEffect, src: StackEffect) -> str: + return f"({dst.type or 'PyObject *'})" if src.type != dst.type else "" + + def static_assert_family_size( + self, name: str, family: Family | None, cache_offset: int + ) -> None: + """Emit a static_assert for the size of a family, if known. + + This will fail at compile time if the cache size computed from + the instruction definition does not match the size of the struct + used by specialize.c. + """ + if family and name == family.name: + cache_size = family.size + if cache_size: + self.emit( + f"static_assert({cache_size} == {cache_offset}, " + f'"incorrect cache size");' + ) + + +def prettify_filename(filename: str) -> str: + # Make filename more user-friendly and less platform-specific, + # it is only used for error reporting at this point. + filename = filename.replace("\\", "/") + if filename.startswith("./"): + filename = filename[2:] + if filename.endswith(".new"): + filename = filename[:-4] + return filename + + +def list_effect_size(effects: list[StackEffect]) -> tuple[int, str]: + numeric = 0 + symbolic: list[str] = [] + for effect in effects: + diff, sym = effect_size(effect) + numeric += diff + if sym: + symbolic.append(maybe_parenthesize(sym)) + return numeric, " + ".join(symbolic) + + +def effect_size(effect: StackEffect) -> tuple[int, str]: + """Return the 'size' impact of a stack effect. + + Returns a tuple (numeric, symbolic) where: + + - numeric is an int giving the statically analyzable size of the effect + - symbolic is a string representing a variable effect (e.g. 'oparg*2') + + At most one of these will be non-zero / non-empty. + """ + if effect.size: + assert not effect.cond, "Array effects cannot have a condition" + return 0, effect.size + elif effect.cond: + if effect.cond in ("0", "1"): + return int(effect.cond), "" + return 0, f"{maybe_parenthesize(effect.cond)} ? 1 : 0" + else: + return 1, "" + + +def maybe_parenthesize(sym: str) -> str: + """Add parentheses around a string if it contains an operator. + + An exception is made for '*' which is common and harmless + in the context where the symbolic size is used. + """ + if re.match(r"^[\s\w*]+$", sym): + return sym + else: + return f"({sym})" + + +def parenthesize_cond(cond: str) -> str: + """Parenthesize a condition, but only if it contains ?: itself.""" + if "?" in cond: + cond = f"({cond})" + return cond diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py index 33eff548a18809..d35a16a80e8d00 100644 --- a/Tools/cases_generator/generate_cases.py +++ b/Tools/cases_generator/generate_cases.py @@ -1,21 +1,32 @@ """Generate the main interpreter switch. - Reads the instruction definitions from bytecodes.c. Writes the cases to generated_cases.c.h, which is #included in ceval.c. """ import argparse -import contextlib -import dataclasses import os import posixpath -import re import sys import typing -import lexer as lx -import parser -from parser import StackEffect +import stacking # Early import to avoid circular import +from analysis import Analyzer +from formatting import Formatter, list_effect_size +from flags import InstructionFlags, variable_used +from instructions import ( + AnyInstruction, + Component, + Instruction, + MacroInstruction, + MacroParts, + PseudoInstruction, + StackEffect, + OverriddenInstructionPlaceHolder, + TIER_TWO, +) +import parsing +from parsing import StackEffect + HERE = os.path.dirname(__file__) ROOT = os.path.join(HERE, "../..") @@ -32,13 +43,6 @@ DEFAULT_EXECUTOR_OUTPUT = os.path.relpath( os.path.join(ROOT, "Python/executor_cases.c.h") ) -BEGIN_MARKER = "// BEGIN BYTECODES //" -END_MARKER = "// END BYTECODES //" -RE_PREDICTED = ( - r"^\s*(?:GO_TO_INSTRUCTION\(|DEOPT_IF\(.*?,\s*)(\w+)\);\s*(?://.*)?$" -) -UNUSED = "unused" -BITS_PER_CODE_UNIT = 16 # Constants used instead of size for macro expansions. # Note: 1, 2, 4 must match actual cache entry sizes. @@ -51,10 +55,7 @@ "OPARG_BOTTOM": 6, } -RESERVED_WORDS = { - "co_consts" : "Use FRAME_CO_CONSTS.", - "co_names": "Use FRAME_CO_NAMES.", -} +INSTR_FMT_PREFIX = "INSTR_FMT_" arg_parser = argparse.ArgumentParser( description="Generate the code for the interpreter switch.", @@ -64,10 +65,18 @@ "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT ) arg_parser.add_argument( - "-m", "--metadata", type=str, help="Generated C metadata", default=DEFAULT_METADATA_OUTPUT + "-m", + "--metadata", + type=str, + help="Generated C metadata", + default=DEFAULT_METADATA_OUTPUT, ) arg_parser.add_argument( - "-p", "--pymetadata", type=str, help="Generated Python metadata", default=DEFAULT_PYMETADATA_OUTPUT + "-p", + "--pymetadata", + type=str, + help="Generated Python metadata", + default=DEFAULT_PYMETADATA_OUTPUT, ) arg_parser.add_argument( "-l", "--emit-line-directives", help="Emit #line directives", action="store_true" @@ -84,966 +93,9 @@ ) -def effect_size(effect: StackEffect) -> tuple[int, str]: - """Return the 'size' impact of a stack effect. - - Returns a tuple (numeric, symbolic) where: - - - numeric is an int giving the statically analyzable size of the effect - - symbolic is a string representing a variable effect (e.g. 'oparg*2') - - At most one of these will be non-zero / non-empty. - """ - if effect.size: - assert not effect.cond, "Array effects cannot have a condition" - return 0, effect.size - elif effect.cond: - if effect.cond in ("0", "1"): - return int(effect.cond), "" - return 0, f"{maybe_parenthesize(effect.cond)} ? 1 : 0" - else: - return 1, "" - - -def maybe_parenthesize(sym: str) -> str: - """Add parentheses around a string if it contains an operator. - - An exception is made for '*' which is common and harmless - in the context where the symbolic size is used. - """ - if re.match(r"^[\s\w*]+$", sym): - return sym - else: - return f"({sym})" - - -def list_effect_size(effects: list[StackEffect]) -> tuple[int, str]: - numeric = 0 - symbolic: list[str] = [] - for effect in effects: - diff, sym = effect_size(effect) - numeric += diff - if sym: - symbolic.append(maybe_parenthesize(sym)) - return numeric, " + ".join(symbolic) - - -def string_effect_size(arg: tuple[int, str]) -> str: - numeric, symbolic = arg - if numeric and symbolic: - return f"{numeric} + {symbolic}" - elif symbolic: - return symbolic - else: - return str(numeric) - - -class Formatter: - """Wraps an output stream with the ability to indent etc.""" - - stream: typing.TextIO - prefix: str - emit_line_directives: bool = False - lineno: int # Next line number, 1-based - filename: str # Slightly improved stream.filename - nominal_lineno: int - nominal_filename: str - - def __init__( - self, stream: typing.TextIO, indent: int, - emit_line_directives: bool = False, comment: str = "//", - ) -> None: - self.stream = stream - self.prefix = " " * indent - self.emit_line_directives = emit_line_directives - self.comment = comment - self.lineno = 1 - self.filename = prettify_filename(self.stream.name) - self.nominal_lineno = 1 - self.nominal_filename = self.filename - - def write_raw(self, s: str) -> None: - self.stream.write(s) - newlines = s.count("\n") - self.lineno += newlines - self.nominal_lineno += newlines - - def emit(self, arg: str) -> None: - if arg: - self.write_raw(f"{self.prefix}{arg}\n") - else: - self.write_raw("\n") - - def set_lineno(self, lineno: int, filename: str) -> None: - if self.emit_line_directives: - if lineno != self.nominal_lineno or filename != self.nominal_filename: - self.emit(f'#line {lineno} "{filename}"') - self.nominal_lineno = lineno - self.nominal_filename = filename - - def reset_lineno(self) -> None: - if self.lineno != self.nominal_lineno or self.filename != self.nominal_filename: - self.set_lineno(self.lineno + 1, self.filename) - - @contextlib.contextmanager - def indent(self): - self.prefix += " " - yield - self.prefix = self.prefix[:-4] - - @contextlib.contextmanager - def block(self, head: str, tail: str = ""): - if head: - self.emit(head + " {") - else: - self.emit("{") - with self.indent(): - yield - self.emit("}" + tail) - - def stack_adjust( - self, - input_effects: list[StackEffect], - output_effects: list[StackEffect], - ): - shrink, isym = list_effect_size(input_effects) - grow, osym = list_effect_size(output_effects) - diff = grow - shrink - if isym and isym != osym: - self.emit(f"STACK_SHRINK({isym});") - if diff < 0: - self.emit(f"STACK_SHRINK({-diff});") - if diff > 0: - self.emit(f"STACK_GROW({diff});") - if osym and osym != isym: - self.emit(f"STACK_GROW({osym});") - - def declare(self, dst: StackEffect, src: StackEffect | None): - if dst.name == UNUSED or dst.cond == "0": - return - typ = f"{dst.type}" if dst.type else "PyObject *" - if src: - cast = self.cast(dst, src) - init = f" = {cast}{src.name}" - elif dst.cond: - init = " = NULL" - else: - init = "" - sepa = "" if typ.endswith("*") else " " - self.emit(f"{typ}{sepa}{dst.name}{init};") - - def assign(self, dst: StackEffect, src: StackEffect): - if src.name == UNUSED: - return - if src.size: - # Don't write sized arrays -- it's up to the user code. - return - cast = self.cast(dst, src) - if re.match(r"^REG\(oparg(\d+)\)$", dst.name): - self.emit(f"Py_XSETREF({dst.name}, {cast}{src.name});") - else: - stmt = f"{dst.name} = {cast}{src.name};" - if src.cond and src.cond != "1": - if src.cond == "0": - # It will not be executed - return - stmt = f"if ({src.cond}) {{ {stmt} }}" - self.emit(stmt) - - def cast(self, dst: StackEffect, src: StackEffect) -> str: - return f"({dst.type or 'PyObject *'})" if src.type != dst.type else "" - -@dataclasses.dataclass -class InstructionFlags: - """Construct and manipulate instruction flags""" - - HAS_ARG_FLAG: bool - HAS_CONST_FLAG: bool - HAS_NAME_FLAG: bool - HAS_JUMP_FLAG: bool - HAS_FREE_FLAG: bool - HAS_LOCAL_FLAG: bool - - def __post_init__(self): - self.bitmask = { - name : (1 << i) for i, name in enumerate(self.names()) - } - - @staticmethod - def fromInstruction(instr: "AnyInstruction"): - - has_free = (variable_used(instr, "PyCell_New") or - variable_used(instr, "PyCell_GET") or - variable_used(instr, "PyCell_SET")) - - return InstructionFlags( - HAS_ARG_FLAG=variable_used(instr, "oparg"), - HAS_CONST_FLAG=variable_used(instr, "FRAME_CO_CONSTS"), - HAS_NAME_FLAG=variable_used(instr, "FRAME_CO_NAMES"), - HAS_JUMP_FLAG=variable_used(instr, "JUMPBY"), - HAS_FREE_FLAG=has_free, - HAS_LOCAL_FLAG=(variable_used(instr, "GETLOCAL") or - variable_used(instr, "SETLOCAL")) and - not has_free, - ) - - @staticmethod - def newEmpty(): - return InstructionFlags(False, False, False, False, False, False) - - def add(self, other: "InstructionFlags") -> None: - for name, value in dataclasses.asdict(other).items(): - if value: - setattr(self, name, value) - - def names(self, value=None): - if value is None: - return dataclasses.asdict(self).keys() - return [n for n, v in dataclasses.asdict(self).items() if v == value] - - def bitmap(self) -> int: - flags = 0 - for name in self.names(): - if getattr(self, name): - flags |= self.bitmask[name] - return flags - - @classmethod - def emit_macros(cls, out: Formatter): - flags = cls.newEmpty() - for name, value in flags.bitmask.items(): - out.emit(f"#define {name} ({value})"); - - for name, value in flags.bitmask.items(): - out.emit( - f"#define OPCODE_{name[:-len('_FLAG')]}(OP) " - f"(_PyOpcode_opcode_metadata[OP].flags & ({name}))") - - -@dataclasses.dataclass -class ActiveCacheEffect: - """Wraps a CacheEffect that is actually used, in context.""" - effect: parser.CacheEffect - offset: int - - -FORBIDDEN_NAMES_IN_UOPS = ( - "resume_with_error", - "kwnames", - "next_instr", - "oparg1", # Proxy for super-instructions like LOAD_FAST_LOAD_FAST - "JUMPBY", - "DISPATCH", - "INSTRUMENTED_JUMP", - "throwflag", - "exception_unwind", - "import_from", - "import_name", - "_PyObject_CallNoArgs", # Proxy for BEFORE_WITH -) - - -# Interpreter tiers -TIER_ONE = 1 # Specializing adaptive interpreter (PEP 659) -TIER_TWO = 2 # Experimental tracing interpreter -Tiers: typing.TypeAlias = typing.Literal[1, 2] - - -@dataclasses.dataclass -class Instruction: - """An instruction with additional data and code.""" - - # Parts of the underlying instruction definition - inst: parser.InstDef - kind: typing.Literal["inst", "op"] - name: str - block: parser.Block - block_text: list[str] # Block.text, less curlies, less PREDICT() calls - block_line: int # First line of block in original code - - # Computed by constructor - always_exits: bool - cache_offset: int - cache_effects: list[parser.CacheEffect] - input_effects: list[StackEffect] - output_effects: list[StackEffect] - unmoved_names: frozenset[str] - instr_fmt: str - instr_flags: InstructionFlags - active_caches: list[ActiveCacheEffect] - - # Set later - family: parser.Family | None = None - predicted: bool = False - - def __init__(self, inst: parser.InstDef): - self.inst = inst - self.kind = inst.kind - self.name = inst.name - self.block = inst.block - self.block_text, self.check_eval_breaker, self.block_line = \ - extract_block_text(self.block) - self.always_exits = always_exits(self.block_text) - self.cache_effects = [ - effect for effect in inst.inputs if isinstance(effect, parser.CacheEffect) - ] - self.cache_offset = sum(c.size for c in self.cache_effects) - self.input_effects = [ - effect for effect in inst.inputs if isinstance(effect, StackEffect) - ] - self.output_effects = inst.outputs # For consistency/completeness - unmoved_names: set[str] = set() - for ieffect, oeffect in zip(self.input_effects, self.output_effects): - if ieffect.name == oeffect.name: - unmoved_names.add(ieffect.name) - else: - break - self.unmoved_names = frozenset(unmoved_names) - - self.instr_flags = InstructionFlags.fromInstruction(inst) - - self.active_caches = [] - offset = 0 - for effect in self.cache_effects: - if effect.name != UNUSED: - self.active_caches.append(ActiveCacheEffect(effect, offset)) - offset += effect.size - - if self.instr_flags.HAS_ARG_FLAG: - fmt = "IB" - else: - fmt = "IX" - if offset: - fmt += "C" + "0"*(offset-1) - self.instr_fmt = fmt - - def is_viable_uop(self) -> bool: - """Whether this instruction is viable as a uop.""" - dprint: typing.Callable[..., None] = lambda *args, **kwargs: None - # if self.name.startswith("CALL"): - # dprint = print - - if self.name == "EXIT_TRACE": - return True # This has 'return frame' but it's okay - if self.always_exits: - dprint(f"Skipping {self.name} because it always exits") - return False - if len(self.active_caches) > 1: - # print(f"Skipping {self.name} because it has >1 cache entries") - return False - res = True - for forbidden in FORBIDDEN_NAMES_IN_UOPS: - # NOTE: To disallow unspecialized uops, use - # if variable_used(self.inst, forbidden): - if variable_used_unspecialized(self.inst, forbidden): - dprint(f"Skipping {self.name} because it uses {forbidden}") - res = False - return res - - def write(self, out: Formatter, tier: Tiers = TIER_ONE) -> None: - """Write one instruction, sans prologue and epilogue.""" - # Write a static assertion that a family's cache size is correct - if family := self.family: - if self.name == family.name: - if cache_size := family.size: - out.emit( - f"static_assert({cache_size} == " - f'{self.cache_offset}, "incorrect cache size");' - ) - - # Write input stack effect variable declarations and initializations - ieffects = list(reversed(self.input_effects)) - for i, ieffect in enumerate(ieffects): - isize = string_effect_size( - list_effect_size([ieff for ieff in ieffects[: i + 1]]) - ) - if ieffect.size: - src = StackEffect(f"(stack_pointer - {maybe_parenthesize(isize)})", "PyObject **") - elif ieffect.cond: - src = StackEffect(f"({ieffect.cond}) ? stack_pointer[-{maybe_parenthesize(isize)}] : NULL", "") - else: - src = StackEffect(f"stack_pointer[-{maybe_parenthesize(isize)}]", "") - out.declare(ieffect, src) - - # Write output stack effect variable declarations - isize = string_effect_size(list_effect_size(self.input_effects)) - input_names = {ieffect.name for ieffect in self.input_effects} - for i, oeffect in enumerate(self.output_effects): - if oeffect.name not in input_names: - if oeffect.size: - osize = string_effect_size( - list_effect_size([oeff for oeff in self.output_effects[:i]]) - ) - offset = "stack_pointer" - if isize != osize: - if isize != "0": - offset += f" - ({isize})" - if osize != "0": - offset += f" + {osize}" - src = StackEffect(offset, "PyObject **") - out.declare(oeffect, src) - else: - out.declare(oeffect, None) - - # out.emit(f"next_instr += OPSIZE({self.inst.name}) - 1;") - - self.write_body(out, 0, self.active_caches, tier=tier) - - # Skip the rest if the block always exits - if self.always_exits: - return - - # Write net stack growth/shrinkage - out.stack_adjust( - [ieff for ieff in self.input_effects], - [oeff for oeff in self.output_effects], - ) - - # Write output stack effect assignments - oeffects = list(reversed(self.output_effects)) - for i, oeffect in enumerate(oeffects): - if oeffect.name in self.unmoved_names: - continue - osize = string_effect_size( - list_effect_size([oeff for oeff in oeffects[: i + 1]]) - ) - if oeffect.size: - dst = StackEffect(f"stack_pointer - {maybe_parenthesize(osize)}", "PyObject **") - else: - dst = StackEffect(f"stack_pointer[-{maybe_parenthesize(osize)}]", "") - out.assign(dst, oeffect) - - # Write cache effect - if tier == TIER_ONE and self.cache_offset: - out.emit(f"next_instr += {self.cache_offset};") - - def write_body( - self, - out: Formatter, - dedent: int, - active_caches: list[ActiveCacheEffect], - tier: Tiers = TIER_ONE, - ) -> None: - """Write the instruction body.""" - # Write cache effect variable declarations and initializations - for active in active_caches: - ceffect = active.effect - bits = ceffect.size * BITS_PER_CODE_UNIT - if bits == 64: - # NOTE: We assume that 64-bit data in the cache - # is always an object pointer. - # If this becomes false, we need a way to specify - # syntactically what type the cache data is. - typ = "PyObject *" - func = "read_obj" - else: - typ = f"uint{bits}_t " - func = f"read_u{bits}" - if tier == TIER_ONE: - out.emit( - f"{typ}{ceffect.name} = {func}(&next_instr[{active.offset}].cache);" - ) - else: - out.emit(f"{typ}{ceffect.name} = ({typ.strip()})operand;") - - # Write the body, substituting a goto for ERROR_IF() and other stuff - assert dedent <= 0 - extra = " " * -dedent - names_to_skip = self.unmoved_names | frozenset({UNUSED, "null"}) - offset = 0 - context = self.block.context - assert context is not None and context.owner is not None - filename = context.owner.filename - for line in self.block_text: - out.set_lineno(self.block_line + offset, filename) - offset += 1 - if m := re.match(r"(\s*)ERROR_IF\((.+), (\w+)\);\s*(?://.*)?$", line): - space, cond, label = m.groups() - space = extra + space - # ERROR_IF() must pop the inputs from the stack. - # The code block is responsible for DECREF()ing them. - # NOTE: If the label doesn't exist, just add it to ceval.c. - - # Don't pop common input/output effects at the bottom! - # These aren't DECREF'ed so they can stay. - ieffs = list(self.input_effects) - oeffs = list(self.output_effects) - while ieffs and oeffs and ieffs[0] == oeffs[0]: - ieffs.pop(0) - oeffs.pop(0) - ninputs, symbolic = list_effect_size(ieffs) - if ninputs: - label = f"pop_{ninputs}_{label}" - if symbolic: - out.write_raw( - f"{space}if ({cond}) {{ STACK_SHRINK({symbolic}); goto {label}; }}\n" - ) - else: - out.write_raw(f"{space}if ({cond}) goto {label};\n") - elif m := re.match(r"(\s*)DECREF_INPUTS\(\);\s*(?://.*)?$", line): - out.reset_lineno() - space = extra + m.group(1) - for ieff in self.input_effects: - if ieff.name in names_to_skip: - continue - if ieff.size: - out.write_raw( - f"{space}for (int _i = {ieff.size}; --_i >= 0;) {{\n" - ) - out.write_raw(f"{space} Py_DECREF({ieff.name}[_i]);\n") - out.write_raw(f"{space}}}\n") - else: - decref = "XDECREF" if ieff.cond else "DECREF" - out.write_raw(f"{space}Py_{decref}({ieff.name});\n") - else: - out.write_raw(extra + line) - out.reset_lineno() - - -InstructionOrCacheEffect = Instruction | parser.CacheEffect -StackEffectMapping = list[tuple[StackEffect, StackEffect]] - - -@dataclasses.dataclass -class Component: - instr: Instruction - input_mapping: StackEffectMapping - output_mapping: StackEffectMapping - active_caches: list[ActiveCacheEffect] - - def write_body(self, out: Formatter) -> None: - with out.block(""): - input_names = {ieffect.name for _, ieffect in self.input_mapping} - for var, ieffect in self.input_mapping: - out.declare(ieffect, var) - for _, oeffect in self.output_mapping: - if oeffect.name not in input_names: - out.declare(oeffect, None) - - self.instr.write_body(out, -4, self.active_caches) - - for var, oeffect in self.output_mapping: - out.assign(var, oeffect) - - -MacroParts = list[Component | parser.CacheEffect] - - -@dataclasses.dataclass -class MacroInstruction: - """A macro instruction.""" - - name: str - stack: list[StackEffect] - initial_sp: int - final_sp: int - instr_fmt: str - instr_flags: InstructionFlags - macro: parser.Macro - parts: MacroParts - cache_offset: int - predicted: bool = False - - -@dataclasses.dataclass -class PseudoInstruction: - """A pseudo instruction.""" - - name: str - targets: list[Instruction] - instr_fmt: str - instr_flags: InstructionFlags - - -@dataclasses.dataclass -class OverriddenInstructionPlaceHolder: - name: str - - -AnyInstruction = Instruction | MacroInstruction | PseudoInstruction -INSTR_FMT_PREFIX = "INSTR_FMT_" - - -class Analyzer: - """Parse input, analyze it, and write to output.""" - - input_filenames: list[str] - output_filename: str - metadata_filename: str - pymetadata_filename: str - executor_filename: str - errors: int = 0 - emit_line_directives: bool = False - - def __init__( - self, - input_filenames: list[str], - output_filename: str, - metadata_filename: str, - pymetadata_filename: str, - executor_filename: str, - ): - """Read the input file.""" - self.input_filenames = input_filenames - self.output_filename = output_filename - self.metadata_filename = metadata_filename - self.pymetadata_filename = pymetadata_filename - self.executor_filename = executor_filename - - def error(self, msg: str, node: parser.Node) -> None: - lineno = 0 - filename = "" - if context := node.context: - filename = context.owner.filename - # Use line number of first non-comment in the node - for token in context.owner.tokens[context.begin : context.end]: - lineno = token.line - if token.kind != "COMMENT": - break - print(f"{filename}:{lineno}: {msg}", file=sys.stderr) - self.errors += 1 - - everything: list[ - parser.InstDef | parser.Macro | parser.Pseudo | OverriddenInstructionPlaceHolder - ] - instrs: dict[str, Instruction] # Includes ops - macros: dict[str, parser.Macro] - macro_instrs: dict[str, MacroInstruction] - families: dict[str, parser.Family] - pseudos: dict[str, parser.Pseudo] - pseudo_instrs: dict[str, PseudoInstruction] - - def parse(self) -> None: - """Parse the source text. - - We only want the parser to see the stuff between the - begin and end markers. - """ - - self.everything = [] - self.instrs = {} - self.macros = {} - self.families = {} - self.pseudos = {} - - instrs_idx: dict[str, int] = dict() - - for filename in self.input_filenames: - self.parse_file(filename, instrs_idx) - - files = " + ".join(self.input_filenames) - print( - f"Read {len(self.instrs)} instructions/ops, " - f"{len(self.macros)} macros, {len(self.pseudos)} pseudos, " - f"and {len(self.families)} families from {files}", - file=sys.stderr, - ) - - def parse_file(self, filename: str, instrs_idx: dict[str, int]) -> None: - with open(filename) as file: - src = file.read() - - - psr = parser.Parser(src, filename=prettify_filename(filename)) - - # Skip until begin marker - while tkn := psr.next(raw=True): - if tkn.text == BEGIN_MARKER: - break - else: - raise psr.make_syntax_error( - f"Couldn't find {BEGIN_MARKER!r} in {psr.filename}" - ) - start = psr.getpos() - - # Find end marker, then delete everything after it - while tkn := psr.next(raw=True): - if tkn.text == END_MARKER: - break - del psr.tokens[psr.getpos() - 1 :] - - # Parse from start - psr.setpos(start) - thing: parser.InstDef | parser.Macro | parser.Pseudo | parser.Family | None - thing_first_token = psr.peek() - while thing := psr.definition(): - if ws := [w for w in RESERVED_WORDS if variable_used(thing, w)]: - self.error(f"'{ws[0]}' is a reserved word. {RESERVED_WORDS[ws[0]]}", thing) - - match thing: - case parser.InstDef(name=name): - if name in self.instrs: - if not thing.override: - raise psr.make_syntax_error( - f"Duplicate definition of '{name}' @ {thing.context} " - f"previous definition @ {self.instrs[name].inst.context}", - thing_first_token, - ) - self.everything[instrs_idx[name]] = OverriddenInstructionPlaceHolder(name=name) - if name not in self.instrs and thing.override: - raise psr.make_syntax_error( - f"Definition of '{name}' @ {thing.context} is supposed to be " - "an override but no previous definition exists.", - thing_first_token, - ) - self.instrs[name] = Instruction(thing) - instrs_idx[name] = len(self.everything) - self.everything.append(thing) - case parser.Macro(name): - self.macros[name] = thing - self.everything.append(thing) - case parser.Family(name): - self.families[name] = thing - case parser.Pseudo(name): - self.pseudos[name] = thing - self.everything.append(thing) - case _: - typing.assert_never(thing) - if not psr.eof(): - raise psr.make_syntax_error(f"Extra stuff at the end of {filename}") - - def analyze(self) -> None: - """Analyze the inputs. - - Raises SystemExit if there is an error. - """ - self.analyze_macros_and_pseudos() - self.find_predictions() - self.map_families() - self.check_families() - - def find_predictions(self) -> None: - """Find the instructions that need PREDICTED() labels.""" - for instr in self.instrs.values(): - targets: set[str] = set() - for line in instr.block_text: - if m := re.match(RE_PREDICTED, line): - targets.add(m.group(1)) - for target in targets: - if target_instr := self.instrs.get(target): - target_instr.predicted = True - elif target_macro := self.macro_instrs.get(target): - target_macro.predicted = True - else: - self.error( - f"Unknown instruction {target!r} predicted in {instr.name!r}", - instr.inst, # TODO: Use better location - ) - - def map_families(self) -> None: - """Link instruction names back to their family, if they have one.""" - for family in self.families.values(): - for member in [family.name] + family.members: - if member_instr := self.instrs.get(member): - if member_instr.family not in (family, None): - self.error( - f"Instruction {member} is a member of multiple families " - f"({member_instr.family.name}, {family.name}).", - family, - ) - else: - member_instr.family = family - elif not self.macro_instrs.get(member): - self.error( - f"Unknown instruction {member!r} referenced in family {family.name!r}", - family, - ) - - def check_families(self) -> None: - """Check each family: - - - Must have at least 2 members (including head) - - Head and all members must be known instructions - - Head and all members must have the same cache, input and output effects - """ - for family in self.families.values(): - if family.name not in self.macro_instrs and family.name not in self.instrs: - self.error( - f"Family {family.name!r} has unknown instruction {family.name!r}", - family, - ) - members = [ - member - for member in family.members - if member in self.instrs or member in self.macro_instrs - ] - if members != family.members: - unknown = set(family.members) - set(members) - self.error( - f"Family {family.name!r} has unknown members: {unknown}", family - ) - expected_effects = self.effect_counts(family.name) - for member in members: - member_effects = self.effect_counts(member) - if member_effects != expected_effects: - self.error( - f"Family {family.name!r} has inconsistent " - f"(cache, input, output) effects:\n" - f" {family.name} = {expected_effects}; " - f"{member} = {member_effects}", - family, - ) - - def effect_counts(self, name: str) -> tuple[int, int, int]: - if instr := self.instrs.get(name): - cache = instr.cache_offset - input = len(instr.input_effects) - output = len(instr.output_effects) - elif mac := self.macro_instrs.get(name): - cache = mac.cache_offset - input, output = 0, 0 - for part in mac.parts: - if isinstance(part, Component): - # A component may pop what the previous component pushed, - # so we offset the input/output counts by that. - delta_i = len(part.instr.input_effects) - delta_o = len(part.instr.output_effects) - offset = min(delta_i, output) - input += delta_i - offset - output += delta_o - offset - else: - assert False, f"Unknown instruction {name!r}" - return cache, input, output - - def analyze_macros_and_pseudos(self) -> None: - """Analyze each macro and pseudo instruction.""" - self.macro_instrs = {} - self.pseudo_instrs = {} - for name, macro in self.macros.items(): - self.macro_instrs[name] = self.analyze_macro(macro) - for name, pseudo in self.pseudos.items(): - self.pseudo_instrs[name] = self.analyze_pseudo(pseudo) - - def analyze_macro(self, macro: parser.Macro) -> MacroInstruction: - components = self.check_macro_components(macro) - stack, initial_sp = self.stack_analysis(components) - sp = initial_sp - parts: MacroParts = [] - flags = InstructionFlags.newEmpty() - offset = 0 - for component in components: - match component: - case parser.CacheEffect() as ceffect: - parts.append(ceffect) - offset += ceffect.size - case Instruction() as instr: - part, sp, offset = self.analyze_instruction(instr, stack, sp, offset) - parts.append(part) - flags.add(instr.instr_flags) - case _: - typing.assert_never(component) - final_sp = sp - format = "IB" - if offset: - format += "C" + "0"*(offset-1) - return MacroInstruction( - macro.name, stack, initial_sp, final_sp, format, flags, macro, parts, offset - ) - - def analyze_pseudo(self, pseudo: parser.Pseudo) -> PseudoInstruction: - targets = [self.instrs[target] for target in pseudo.targets] - assert targets - # Make sure the targets have the same fmt - fmts = list(set([t.instr_fmt for t in targets])) - assert(len(fmts) == 1) - assert(len(list(set([t.instr_flags.bitmap() for t in targets]))) == 1) - return PseudoInstruction(pseudo.name, targets, fmts[0], targets[0].instr_flags) - - def analyze_instruction( - self, instr: Instruction, stack: list[StackEffect], sp: int, offset: int - ) -> tuple[Component, int, int]: - input_mapping: StackEffectMapping = [] - for ieffect in reversed(instr.input_effects): - sp -= 1 - input_mapping.append((stack[sp], ieffect)) - output_mapping: StackEffectMapping = [] - for oeffect in instr.output_effects: - output_mapping.append((stack[sp], oeffect)) - sp += 1 - active_effects: list[ActiveCacheEffect] = [] - for ceffect in instr.cache_effects: - if ceffect.name != UNUSED: - active_effects.append(ActiveCacheEffect(ceffect, offset)) - offset += ceffect.size - return Component(instr, input_mapping, output_mapping, active_effects), sp, offset - - def check_macro_components( - self, macro: parser.Macro - ) -> list[InstructionOrCacheEffect]: - components: list[InstructionOrCacheEffect] = [] - for uop in macro.uops: - match uop: - case parser.OpName(name): - if name not in self.instrs: - self.error(f"Unknown instruction {name!r}", macro) - components.append(self.instrs[name]) - case parser.CacheEffect(): - components.append(uop) - case _: - typing.assert_never(uop) - return components - - def stack_analysis( - self, components: typing.Iterable[InstructionOrCacheEffect] - ) -> tuple[list[StackEffect], int]: - """Analyze a macro. - - Ignore cache effects. - - Return the list of variables (as StackEffects) and the initial stack pointer. - """ - lowest = current = highest = 0 - conditions: dict[int, str] = {} # Indexed by 'current'. - last_instr: Instruction | None = None - for thing in components: - if isinstance(thing, Instruction): - last_instr = thing - for thing in components: - match thing: - case Instruction() as instr: - if any( - eff.size for eff in instr.input_effects + instr.output_effects - ): - # TODO: Eventually this will be needed, at least for macros. - self.error( - f"Instruction {instr.name!r} has variable-sized stack effect, " - "which are not supported in macro instructions", - instr.inst, # TODO: Pass name+location of macro - ) - if any(eff.cond for eff in instr.input_effects): - self.error( - f"Instruction {instr.name!r} has conditional input stack effect, " - "which are not supported in macro instructions", - instr.inst, # TODO: Pass name+location of macro - ) - if any(eff.cond for eff in instr.output_effects) and instr is not last_instr: - self.error( - f"Instruction {instr.name!r} has conditional output stack effect, " - "but is not the last instruction in a macro", - instr.inst, # TODO: Pass name+location of macro - ) - current -= len(instr.input_effects) - lowest = min(lowest, current) - for eff in instr.output_effects: - if eff.cond: - conditions[current] = eff.cond - current += 1 - highest = max(highest, current) - case parser.CacheEffect(): - pass - case _: - typing.assert_never(thing) - # At this point, 'current' is the net stack effect, - # and 'lowest' and 'highest' are the extremes. - # Note that 'lowest' may be negative. - stack = [ - StackEffect(f"_tmp_{i}", "", conditions.get(highest - i, "")) - for i in reversed(range(1, highest - lowest + 1)) - ] - return stack, -lowest - +class Generator(Analyzer): def get_stack_effect_info( - self, thing: parser.InstDef | parser.Macro | parser.Pseudo + self, thing: parsing.InstDef | parsing.Macro | parsing.Pseudo ) -> tuple[AnyInstruction | None, str | None, str | None]: def effect_str(effects: list[StackEffect]) -> str: n_effect, sym_effect = list_effect_size(effects) @@ -1052,8 +104,10 @@ def effect_str(effects: list[StackEffect]) -> str: return str(n_effect) instr: AnyInstruction | None + popped: str | None + pushed: str | None match thing: - case parser.InstDef(): + case parsing.InstDef(): if thing.kind != "op": instr = self.instrs[thing.name] popped = effect_str(instr.input_effects) @@ -1062,40 +116,10 @@ def effect_str(effects: list[StackEffect]) -> str: instr = None popped = "" pushed = "" - case parser.Macro(): + case parsing.Macro(): instr = self.macro_instrs[thing.name] - parts = [comp for comp in instr.parts if isinstance(comp, Component)] - # Note: stack_analysis() already verifies that macro components - # have no variable-sized stack effects. - low = 0 - sp = 0 - high = 0 - pushed_symbolic: list[str] = [] - for comp in parts: - for effect in comp.instr.input_effects: - assert not effect.cond, effect - assert not effect.size, effect - sp -= 1 - low = min(low, sp) - for effect in comp.instr.output_effects: - assert not effect.size, effect - if effect.cond: - if effect.cond in ("0", "1"): - pushed_symbolic.append(effect.cond) - else: - pushed_symbolic.append(maybe_parenthesize(f"{maybe_parenthesize(effect.cond)} ? 1 : 0")) - sp += 1 - high = max(sp, high) - if high != max(0, sp): - # If you get this, intermediate stack growth occurs, - # and stack size calculations may go awry. - # E.g. [push, pop]. The fix would be for stack size - # calculations to use the micro ops. - self.error("Macro has virtual stack growth", thing) - popped = str(-low) - pushed_symbolic.append(str(sp - low - len(pushed_symbolic))) - pushed = " + ".join(pushed_symbolic) - case parser.Pseudo(): + popped, pushed = stacking.get_stack_effect_info_for_macro(instr) + case parsing.Pseudo(): instr = self.pseudo_instrs[thing.name] popped = pushed = None # Calculate stack effect, and check that it's the the same @@ -1134,10 +158,14 @@ def write_function( ) -> None: self.out.emit("") self.out.emit("#ifndef NEED_OPCODE_METADATA") - self.out.emit(f"extern int _PyOpcode_num_{direction}(int opcode, int oparg, bool jump);") + self.out.emit( + f"extern int _PyOpcode_num_{direction}(int opcode, int oparg, bool jump);" + ) self.out.emit("#else") self.out.emit("int") - self.out.emit(f"_PyOpcode_num_{direction}(int opcode, int oparg, bool jump) {{") + self.out.emit( + f"_PyOpcode_num_{direction}(int opcode, int oparg, bool jump) {{" + ) self.out.emit(" switch(opcode) {") for instr, effect in data: self.out.emit(f" case {instr.name}:") @@ -1153,10 +181,15 @@ def write_function( self.out.emit("") def from_source_files(self) -> str: - paths = f"\n{self.out.comment} ".join( - prettify_filename(filename) - for filename in self.input_filenames - ) + filenames = [] + for filename in self.input_filenames: + try: + filename = os.path.relpath(filename, ROOT) + except ValueError: + # May happen on Windows if root and temp on different volumes + pass + filenames.append(filename) + paths = f"\n{self.out.comment} ".join(filenames) return f"{self.out.comment} from:\n{self.out.comment} {paths}\n" def write_provenance_header(self): @@ -1164,20 +197,21 @@ def write_provenance_header(self): self.out.write_raw(self.from_source_files()) self.out.write_raw(f"{self.out.comment} Do not edit!\n") - def write_metadata(self) -> None: + def write_metadata(self, metadata_filename: str, pymetadata_filename: str) -> None: """Write instruction metadata to output file.""" # Compute the set of all instruction formats. all_formats: set[str] = set() for thing in self.everything: + format: str | None match thing: case OverriddenInstructionPlaceHolder(): continue - case parser.InstDef(): + case parsing.InstDef(): format = self.instrs[thing.name].instr_fmt - case parser.Macro(): + case parsing.Macro(): format = self.macro_instrs[thing.name].instr_fmt - case parser.Pseudo(): + case parsing.Pseudo(): format = None for target in self.pseudos[thing.name].targets: target_instr = self.instrs.get(target) @@ -1186,13 +220,15 @@ def write_metadata(self) -> None: format = target_instr.instr_fmt else: assert format == target_instr.instr_fmt + assert format is not None case _: typing.assert_never(thing) all_formats.add(format) - # Turn it into a list of enum definitions. + + # Turn it into a sorted list of enum values. format_enums = [INSTR_FMT_PREFIX + format for format in sorted(all_formats)] - with open(self.metadata_filename, "w") as f: + with open(metadata_filename, "w") as f: # Create formatter self.out = Formatter(f, 0) @@ -1207,14 +243,17 @@ def write_metadata(self) -> None: self.write_stack_effect_functions() - # Write type definitions - self.out.emit(f"enum InstructionFormat {{ {', '.join(format_enums)} }};") + # Write the enum definition for instruction formats. + with self.out.block("enum InstructionFormat", ";"): + for enum in format_enums: + self.out.emit(enum + ",") self.out.emit("") self.out.emit( "#define IS_VALID_OPCODE(OP) \\\n" " (((OP) >= 0) && ((OP) < OPCODE_METADATA_SIZE) && \\\n" - " (_PyOpcode_opcode_metadata[(OP)].valid_entry))") + " (_PyOpcode_opcode_metadata[(OP)].valid_entry))" + ) self.out.emit("") InstructionFlags.emit_macros(self.out) @@ -1228,17 +267,23 @@ def write_metadata(self) -> None: with self.out.block("struct opcode_macro_expansion", ";"): self.out.emit("int nuops;") - self.out.emit("struct { int16_t uop; int8_t size; int8_t offset; } uops[8];") + self.out.emit( + "struct { int16_t uop; int8_t size; int8_t offset; } uops[8];" + ) self.out.emit("") for key, value in OPARG_SIZES.items(): self.out.emit(f"#define {key} {value}") self.out.emit("") - self.out.emit("#define OPCODE_METADATA_FMT(OP) " - "(_PyOpcode_opcode_metadata[(OP)].instr_format)") + self.out.emit( + "#define OPCODE_METADATA_FMT(OP) " + "(_PyOpcode_opcode_metadata[(OP)].instr_format)" + ) self.out.emit("#define SAME_OPCODE_METADATA(OP1, OP2) \\") - self.out.emit(" (OPCODE_METADATA_FMT(OP1) == OPCODE_METADATA_FMT(OP2))") + self.out.emit( + " (OPCODE_METADATA_FMT(OP1) == OPCODE_METADATA_FMT(OP2))" + ) self.out.emit("") # Write metadata array declaration @@ -1247,27 +292,35 @@ def write_metadata(self) -> None: self.out.emit("#define OPCODE_MACRO_EXPANSION_SIZE 256") self.out.emit("") self.out.emit("#ifndef NEED_OPCODE_METADATA") - self.out.emit("extern const struct opcode_metadata " - "_PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE];") - self.out.emit("extern const struct opcode_macro_expansion " - "_PyOpcode_macro_expansion[OPCODE_MACRO_EXPANSION_SIZE];") - self.out.emit("extern const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE];") + self.out.emit( + "extern const struct opcode_metadata " + "_PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE];" + ) + self.out.emit( + "extern const struct opcode_macro_expansion " + "_PyOpcode_macro_expansion[OPCODE_MACRO_EXPANSION_SIZE];" + ) + self.out.emit( + "extern const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE];" + ) self.out.emit("#else // if NEED_OPCODE_METADATA") - self.out.emit("const struct opcode_metadata " - "_PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = {") + self.out.emit( + "const struct opcode_metadata " + "_PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = {" + ) # Write metadata for each instruction for thing in self.everything: match thing: case OverriddenInstructionPlaceHolder(): continue - case parser.InstDef(): + case parsing.InstDef(): if thing.kind != "op": self.write_metadata_for_inst(self.instrs[thing.name]) - case parser.Macro(): + case parsing.Macro(): self.write_metadata_for_macro(self.macro_instrs[thing.name]) - case parser.Pseudo(): + case parsing.Pseudo(): self.write_metadata_for_pseudo(self.pseudo_instrs[thing.name]) case _: typing.assert_never(thing) @@ -1285,32 +338,38 @@ def write_metadata(self) -> None: match thing: case OverriddenInstructionPlaceHolder(): pass - case parser.InstDef(name=name): + case parsing.InstDef(name=name): instr = self.instrs[name] # Since an 'op' is not a bytecode, it has no expansion; but 'inst' is if instr.kind == "inst" and instr.is_viable_uop(): # Construct a dummy Component -- input/output mappings are not used - part = Component(instr, [], [], instr.active_caches) + part = Component(instr, instr.active_caches) self.write_macro_expansions(instr.name, [part]) - elif instr.kind == "inst" and variable_used(instr.inst, "oparg1"): - assert variable_used(instr.inst, "oparg2"), "Half super-instr?" + elif instr.kind == "inst" and variable_used( + instr.inst, "oparg1" + ): + assert variable_used( + instr.inst, "oparg2" + ), "Half super-instr?" self.write_super_expansions(instr.name) - case parser.Macro(): + case parsing.Macro(): mac = self.macro_instrs[thing.name] self.write_macro_expansions(mac.name, mac.parts) - case parser.Pseudo(): + case parsing.Pseudo(): pass case _: typing.assert_never(thing) - with self.out.block("const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE] =", ";"): - self.write_uop_items(lambda name, counter: f"[{name}] = \"{name}\",") + with self.out.block( + "const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE] =", ";" + ): + self.write_uop_items(lambda name, counter: f'[{name}] = "{name}",') self.out.emit("#endif // NEED_OPCODE_METADATA") - with open(self.pymetadata_filename, "w") as f: + with open(pymetadata_filename, "w") as f: # Create formatter - self.out = Formatter(f, 0, comment = "#") + self.out = Formatter(f, 0, comment="#") self.write_provenance_header() @@ -1318,10 +377,10 @@ def write_metadata(self) -> None: self.out.emit("_specializations = {") for name, family in self.families.items(): with self.out.indent(): - self.out.emit(f"\"{family.name}\": [") + self.out.emit(f'"{family.name}": [') with self.out.indent(): for m in family.members: - self.out.emit(f"\"{m}\",") + self.out.emit(f'"{m}",') self.out.emit(f"],") self.out.emit("}") @@ -1329,15 +388,17 @@ def write_metadata(self) -> None: self.out.emit("") self.out.emit("# An irregular case:") self.out.emit( - "_specializations[\"BINARY_OP\"].append(" - "\"BINARY_OP_INPLACE_ADD_UNICODE\")") + '_specializations["BINARY_OP"].append(' + '"BINARY_OP_INPLACE_ADD_UNICODE")' + ) # Make list of specialized instructions self.out.emit("") self.out.emit( "_specialized_instructions = [" - "opcode for family in _specializations.values() for opcode in family" - "]") + "opcode for family in _specializations.values() for opcode in family" + "]" + ) def write_pseudo_instrs(self) -> None: """Write the IS_PSEUDO_INSTR macro""" @@ -1376,7 +437,15 @@ def write_macro_expansions(self, name: str, parts: MacroParts) -> None: if isinstance(part, Component): # All component instructions must be viable uops if not part.instr.is_viable_uop(): - print(f"NOTE: Part {part.instr.name} of {name} is not a viable uop") + # This note just reminds us about macros that cannot + # be expanded to Tier 2 uops. It is not an error. + # It is sometimes emitted for macros that have a + # manual translation in translate_bytecode_to_trace() + # in Python/optimizer.c. + self.note( + f"Part {part.instr.name} of {name} is not a viable uop", + part.instr.inst, + ) return if not part.active_caches: size, offset = OPARG_SIZES["OPARG_FULL"], 0 @@ -1420,22 +489,24 @@ def write_super_expansions(self, name: str) -> None: instr2 = self.instrs[name2] assert not instr1.active_caches, f"{name1} has active caches" assert not instr2.active_caches, f"{name2} has active caches" - expansions = [ + expansions: list[tuple[str, int, int]] = [ (name1, OPARG_SIZES["OPARG_TOP"], 0), (name2, OPARG_SIZES["OPARG_BOTTOM"], 0), ] self.write_expansions(name, expansions) - def write_expansions(self, name: str, expansions: list[tuple[str, int, int]]) -> None: - pieces = [f"{{ {name}, {size}, {offset} }}" for name, size, offset in expansions] + def write_expansions( + self, name: str, expansions: list[tuple[str, int, int]] + ) -> None: + pieces = [ + f"{{ {name}, {size}, {offset} }}" for name, size, offset in expansions + ] self.out.emit( f"[{name}] = " f"{{ .nuops = {len(pieces)}, .uops = {{ {', '.join(pieces)} }} }}," ) - def emit_metadata_entry( - self, name: str, fmt: str, flags: InstructionFlags - ) -> None: + def emit_metadata_entry(self, name: str, fmt: str, flags: InstructionFlags) -> None: flag_names = flags.names(value=True) if not flag_names: flag_names.append("0") @@ -1456,75 +527,90 @@ def write_metadata_for_pseudo(self, ps: PseudoInstruction) -> None: """Write metadata for a macro-instruction.""" self.emit_metadata_entry(ps.name, ps.instr_fmt, ps.instr_flags) - def write_instructions(self) -> None: + def write_instructions( + self, output_filename: str, emit_line_directives: bool + ) -> None: """Write instructions to output file.""" - with open(self.output_filename, "w") as f: + with open(output_filename, "w") as f: # Create formatter - self.out = Formatter(f, 8, self.emit_line_directives) + self.out = Formatter(f, 8, emit_line_directives) self.write_provenance_header() # Write and count instructions of all kinds n_instrs = 0 n_macros = 0 - n_pseudos = 0 for thing in self.everything: match thing: case OverriddenInstructionPlaceHolder(): self.write_overridden_instr_place_holder(thing) - case parser.InstDef(): + case parsing.InstDef(): if thing.kind != "op": n_instrs += 1 self.write_instr(self.instrs[thing.name]) - case parser.Macro(): + case parsing.Macro(): n_macros += 1 - self.write_macro(self.macro_instrs[thing.name]) - case parser.Pseudo(): - n_pseudos += 1 + mac = self.macro_instrs[thing.name] + stacking.write_macro_instr(mac, self.out, self.families.get(mac.name)) + # self.write_macro(self.macro_instrs[thing.name]) + case parsing.Pseudo(): + pass case _: typing.assert_never(thing) print( - f"Wrote {n_instrs} instructions, {n_macros} macros, " - f"and {n_pseudos} pseudos to {self.output_filename}", + f"Wrote {n_instrs} instructions and {n_macros} macros " + f"to {output_filename}", file=sys.stderr, ) - def write_executor_instructions(self) -> None: + def write_executor_instructions( + self, executor_filename: str, emit_line_directives: bool + ) -> None: """Generate cases for the Tier 2 interpreter.""" - with open(self.executor_filename, "w") as f: - self.out = Formatter(f, 8, self.emit_line_directives) + n_instrs = 0 + n_uops = 0 + with open(executor_filename, "w") as f: + self.out = Formatter(f, 8, emit_line_directives) self.write_provenance_header() for thing in self.everything: match thing: case OverriddenInstructionPlaceHolder(): # TODO: Is this helpful? self.write_overridden_instr_place_holder(thing) - case parser.InstDef(): + case parsing.InstDef(): instr = self.instrs[thing.name] if instr.is_viable_uop(): + if instr.kind == "op": + n_uops += 1 + else: + n_instrs += 1 self.out.emit("") with self.out.block(f"case {thing.name}:"): instr.write(self.out, tier=TIER_TWO) + if instr.check_eval_breaker: + self.out.emit("CHECK_EVAL_BREAKER();") self.out.emit("break;") # elif instr.kind != "op": # print(f"NOTE: {thing.name} is not a viable uop") - case parser.Macro(): + case parsing.Macro(): pass - case parser.Pseudo(): + case parsing.Pseudo(): pass case _: typing.assert_never(thing) print( - f"Wrote some stuff to {self.executor_filename}", + f"Wrote {n_instrs} instructions and {n_uops} ops to {executor_filename}", file=sys.stderr, ) - def write_overridden_instr_place_holder(self, - place_holder: OverriddenInstructionPlaceHolder) -> None: + def write_overridden_instr_place_holder( + self, place_holder: OverriddenInstructionPlaceHolder + ) -> None: self.out.emit("") self.out.emit( - f"{self.out.comment} TARGET({place_holder.name}) overridden by later definition") + f"{self.out.comment} TARGET({place_holder.name}) overridden by later definition" + ) def write_instr(self, instr: Instruction) -> None: name = instr.name @@ -1540,162 +626,6 @@ def write_instr(self, instr: Instruction) -> None: self.out.emit("CHECK_EVAL_BREAKER();") self.out.emit(f"DISPATCH();") - def write_macro(self, mac: MacroInstruction) -> None: - """Write code for a macro instruction.""" - last_instr: Instruction | None = None - with self.wrap_macro(mac): - cache_adjust = 0 - for part in mac.parts: - match part: - case parser.CacheEffect(size=size): - cache_adjust += size - case Component() as comp: - last_instr = comp.instr - comp.write_body(self.out) - cache_adjust += comp.instr.cache_offset - - if cache_adjust: - self.out.emit(f"next_instr += {cache_adjust};") - - if ( - (family := self.families.get(mac.name)) - and mac.name == family.name - and (cache_size := family.size) - ): - self.out.emit( - f"static_assert({cache_size} == " - f'{cache_adjust}, "incorrect cache size");' - ) - - @contextlib.contextmanager - def wrap_macro(self, mac: MacroInstruction): - """Boilerplate for macro instructions.""" - # TODO: Somewhere (where?) make it so that if one instruction - # has an output that is input to another, and the variable names - # and types match and don't conflict with other instructions, - # that variable is declared with the right name and type in the - # outer block, rather than trusting the compiler to optimize it. - self.out.emit("") - with self.out.block(f"TARGET({mac.name})"): - if mac.predicted: - self.out.emit(f"PREDICTED({mac.name});") - - # The input effects should have no conditionals. - # Only the output effects do (for now). - ieffects = [ - StackEffect(eff.name, eff.type) if eff.cond else eff - for eff in mac.stack - ] - - for i, var in reversed(list(enumerate(ieffects))): - src = None - if i < mac.initial_sp: - src = StackEffect(f"stack_pointer[-{mac.initial_sp - i}]", "") - self.out.declare(var, src) - - yield - - self.out.stack_adjust(ieffects[:mac.initial_sp], mac.stack[:mac.final_sp]) - - for i, var in enumerate(reversed(mac.stack[: mac.final_sp]), 1): - dst = StackEffect(f"stack_pointer[-{i}]", "") - self.out.assign(dst, var) - - self.out.emit(f"DISPATCH();") - - -def prettify_filename(filename: str) -> str: - # Make filename more user-friendly and less platform-specific, - # it is only used for error reporting at this point. - filename = filename.replace("\\", "/") - if filename.startswith("./"): - filename = filename[2:] - if filename.endswith(".new"): - filename = filename[:-4] - return filename - - -def extract_block_text(block: parser.Block) -> tuple[list[str], bool, int]: - # Get lines of text with proper dedent - blocklines = block.text.splitlines(True) - first_token: lx.Token = block.tokens[0] # IndexError means the context is broken - block_line = first_token.begin[0] - - # Remove blank lines from both ends - while blocklines and not blocklines[0].strip(): - blocklines.pop(0) - block_line += 1 - while blocklines and not blocklines[-1].strip(): - blocklines.pop() - - # Remove leading and trailing braces - assert blocklines and blocklines[0].strip() == "{" - assert blocklines and blocklines[-1].strip() == "}" - blocklines.pop() - blocklines.pop(0) - block_line += 1 - - # Remove trailing blank lines - while blocklines and not blocklines[-1].strip(): - blocklines.pop() - - # Separate CHECK_EVAL_BREAKER() macro from end - check_eval_breaker = \ - blocklines != [] and blocklines[-1].strip() == "CHECK_EVAL_BREAKER();" - if check_eval_breaker: - del blocklines[-1] - - return blocklines, check_eval_breaker, block_line - - -def always_exits(lines: list[str]) -> bool: - """Determine whether a block always ends in a return/goto/etc.""" - if not lines: - return False - line = lines[-1].rstrip() - # Indent must match exactly (TODO: Do something better) - if line[:12] != " " * 12: - return False - line = line[12:] - return line.startswith( - ( - "goto ", - "return ", - "DISPATCH", - "GO_TO_", - "Py_UNREACHABLE()", - "ERROR_IF(true, ", - ) - ) - - -def variable_used(node: parser.Node, name: str) -> bool: - """Determine whether a variable with a given name is used in a node.""" - return any( - token.kind == "IDENTIFIER" and token.text == name for token in node.tokens - ) - - -def variable_used_unspecialized(node: parser.Node, name: str) -> bool: - """Like variable_used(), but skips #if ENABLE_SPECIALIZATION blocks.""" - tokens: list[lx.Token] = [] - skipping = False - for i, token in enumerate(node.tokens): - if token.kind == "MACRO": - text = "".join(token.text.split()) - # TODO: Handle nested #if - if text == "#if": - if ( - i + 1 < len(node.tokens) - and node.tokens[i + 1].text == "ENABLE_SPECIALIZATION" - ): - skipping = True - elif text in ("#else", "#endif"): - skipping = False - if not skipping: - tokens.append(token) - return any(token.kind == "IDENTIFIER" and token.text == name for token in tokens) - def main(): """Parse command line, parse input, analyze, write output.""" @@ -1704,17 +634,17 @@ def main(): args.input.append(DEFAULT_INPUT) # Raises OSError if input unreadable - a = Analyzer(args.input, args.output, args.metadata, args.pymetadata, args.executor_cases) + a = Generator(args.input) - if args.emit_line_directives: - a.emit_line_directives = True a.parse() # Raises SyntaxError on failure a.analyze() # Prints messages and sets a.errors on failure if a.errors: sys.exit(f"Found {a.errors} errors") - a.write_instructions() # Raises OSError if output can't be written - a.write_metadata() - a.write_executor_instructions() + + # These raise OSError if output can't be written + a.write_instructions(args.output, args.emit_line_directives) + a.write_metadata(args.metadata, args.pymetadata) + a.write_executor_instructions(args.executor_cases, args.emit_line_directives) if __name__ == "__main__": diff --git a/Tools/cases_generator/instructions.py b/Tools/cases_generator/instructions.py new file mode 100644 index 00000000000000..aa94dbb07ea1c0 --- /dev/null +++ b/Tools/cases_generator/instructions.py @@ -0,0 +1,343 @@ +import dataclasses +import re +import typing + +from flags import InstructionFlags, variable_used, variable_used_unspecialized +from formatting import ( + Formatter, + UNUSED, + list_effect_size, +) +import lexer as lx +import parsing +from parsing import StackEffect +import stacking + +BITS_PER_CODE_UNIT = 16 + + +@dataclasses.dataclass +class ActiveCacheEffect: + """Wraps a CacheEffect that is actually used, in context.""" + + effect: parsing.CacheEffect + offset: int + + +FORBIDDEN_NAMES_IN_UOPS = ( + "resume_with_error", + "kwnames", + "next_instr", + "oparg1", # Proxy for super-instructions like LOAD_FAST_LOAD_FAST + "JUMPBY", + "DISPATCH", + "INSTRUMENTED_JUMP", + "throwflag", + "exception_unwind", + "import_from", + "import_name", + "_PyObject_CallNoArgs", # Proxy for BEFORE_WITH +) + + +# Interpreter tiers +TIER_ONE: typing.Final = 1 # Specializing adaptive interpreter (PEP 659) +TIER_TWO: typing.Final = 2 # Experimental tracing interpreter +Tiers: typing.TypeAlias = typing.Literal[1, 2] + + +@dataclasses.dataclass +class Instruction: + """An instruction with additional data and code.""" + + # Parts of the underlying instruction definition + inst: parsing.InstDef + kind: typing.Literal["inst", "op"] + name: str + block: parsing.Block + block_text: list[str] # Block.text, less curlies, less PREDICT() calls + block_line: int # First line of block in original code + + # Computed by constructor + always_exits: bool + has_deopt: bool + cache_offset: int + cache_effects: list[parsing.CacheEffect] + input_effects: list[StackEffect] + output_effects: list[StackEffect] + unmoved_names: frozenset[str] + instr_fmt: str + instr_flags: InstructionFlags + active_caches: list[ActiveCacheEffect] + + # Set later + family: parsing.Family | None = None + predicted: bool = False + + def __init__(self, inst: parsing.InstDef): + self.inst = inst + self.kind = inst.kind + self.name = inst.name + self.block = inst.block + self.block_text, self.check_eval_breaker, self.block_line = extract_block_text( + self.block + ) + self.always_exits = always_exits(self.block_text) + self.has_deopt = variable_used(self.inst, "DEOPT_IF") + self.cache_effects = [ + effect for effect in inst.inputs if isinstance(effect, parsing.CacheEffect) + ] + self.cache_offset = sum(c.size for c in self.cache_effects) + self.input_effects = [ + effect for effect in inst.inputs if isinstance(effect, StackEffect) + ] + self.output_effects = inst.outputs # For consistency/completeness + unmoved_names: set[str] = set() + for ieffect, oeffect in zip(self.input_effects, self.output_effects): + if ieffect == oeffect and ieffect.name == oeffect.name: + unmoved_names.add(ieffect.name) + else: + break + self.unmoved_names = frozenset(unmoved_names) + + self.instr_flags = InstructionFlags.fromInstruction(inst) + + self.active_caches = [] + offset = 0 + for effect in self.cache_effects: + if effect.name != UNUSED: + self.active_caches.append(ActiveCacheEffect(effect, offset)) + offset += effect.size + + if self.instr_flags.HAS_ARG_FLAG: + fmt = "IB" + else: + fmt = "IX" + if offset: + fmt += "C" + "0" * (offset - 1) + self.instr_fmt = fmt + + def is_viable_uop(self) -> bool: + """Whether this instruction is viable as a uop.""" + dprint: typing.Callable[..., None] = lambda *args, **kwargs: None + # if self.name.startswith("CALL"): + # dprint = print + + if self.name == "EXIT_TRACE": + return True # This has 'return frame' but it's okay + if self.always_exits: + dprint(f"Skipping {self.name} because it always exits") + return False + if len(self.active_caches) > 1: + # print(f"Skipping {self.name} because it has >1 cache entries") + return False + res = True + for forbidden in FORBIDDEN_NAMES_IN_UOPS: + # NOTE: To disallow unspecialized uops, use + # if variable_used(self.inst, forbidden): + if variable_used_unspecialized(self.inst, forbidden): + dprint(f"Skipping {self.name} because it uses {forbidden}") + res = False + return res + + def write(self, out: Formatter, tier: Tiers = TIER_ONE) -> None: + """Write one instruction, sans prologue and epilogue.""" + + # Write a static assertion that a family's cache size is correct + out.static_assert_family_size(self.name, self.family, self.cache_offset) + + # Write input stack effect variable declarations and initializations + stacking.write_single_instr(self, out, tier) + + # Skip the rest if the block always exits + if self.always_exits: + return + + # Write cache effect + if tier == TIER_ONE and self.cache_offset: + out.emit(f"next_instr += {self.cache_offset};") + + def write_body( + self, + out: Formatter, + dedent: int, + active_caches: list[ActiveCacheEffect], + tier: Tiers = TIER_ONE, + ) -> None: + """Write the instruction body.""" + # Write cache effect variable declarations and initializations + for active in active_caches: + ceffect = active.effect + bits = ceffect.size * BITS_PER_CODE_UNIT + if bits == 64: + # NOTE: We assume that 64-bit data in the cache + # is always an object pointer. + # If this becomes false, we need a way to specify + # syntactically what type the cache data is. + typ = "PyObject *" + func = "read_obj" + else: + typ = f"uint{bits}_t " + func = f"read_u{bits}" + if tier == TIER_ONE: + out.emit( + f"{typ}{ceffect.name} = {func}(&next_instr[{active.offset}].cache);" + ) + else: + out.emit(f"{typ}{ceffect.name} = ({typ.strip()})operand;") + + # Write the body, substituting a goto for ERROR_IF() and other stuff + assert dedent <= 0 + extra = " " * -dedent + names_to_skip = self.unmoved_names | frozenset({UNUSED, "null"}) + offset = 0 + context = self.block.context + assert context is not None and context.owner is not None + filename = context.owner.filename + for line in self.block_text: + out.set_lineno(self.block_line + offset, filename) + offset += 1 + if m := re.match(r"(\s*)ERROR_IF\((.+), (\w+)\);\s*(?://.*)?$", line): + space, cond, label = m.groups() + space = extra + space + # ERROR_IF() must pop the inputs from the stack. + # The code block is responsible for DECREF()ing them. + # NOTE: If the label doesn't exist, just add it to ceval.c. + + # Don't pop common input/output effects at the bottom! + # These aren't DECREF'ed so they can stay. + ieffs = list(self.input_effects) + oeffs = list(self.output_effects) + while ( + ieffs + and oeffs + and ieffs[0] == oeffs[0] + and ieffs[0].name == oeffs[0].name + ): + ieffs.pop(0) + oeffs.pop(0) + ninputs, symbolic = list_effect_size(ieffs) + if ninputs: + label = f"pop_{ninputs}_{label}" + if symbolic: + out.write_raw( + f"{space}if ({cond}) {{ STACK_SHRINK({symbolic}); goto {label}; }}\n" + ) + else: + out.write_raw(f"{space}if ({cond}) goto {label};\n") + elif m := re.match(r"(\s*)DECREF_INPUTS\(\);\s*(?://.*)?$", line): + out.reset_lineno() + space = extra + m.group(1) + for ieff in self.input_effects: + if ieff.name in names_to_skip: + continue + if ieff.size: + out.write_raw( + f"{space}for (int _i = {ieff.size}; --_i >= 0;) {{\n" + ) + out.write_raw(f"{space} Py_DECREF({ieff.name}[_i]);\n") + out.write_raw(f"{space}}}\n") + else: + decref = "XDECREF" if ieff.cond else "DECREF" + out.write_raw(f"{space}Py_{decref}({ieff.name});\n") + else: + out.write_raw(extra + line) + out.reset_lineno() + + +InstructionOrCacheEffect = Instruction | parsing.CacheEffect + + +@dataclasses.dataclass +class Component: + instr: Instruction + active_caches: list[ActiveCacheEffect] + + +MacroParts = list[Component | parsing.CacheEffect] + + +@dataclasses.dataclass +class MacroInstruction: + """A macro instruction.""" + + name: str + instr_fmt: str + instr_flags: InstructionFlags + macro: parsing.Macro + parts: MacroParts + cache_offset: int + predicted: bool = False + + +@dataclasses.dataclass +class PseudoInstruction: + """A pseudo instruction.""" + + name: str + targets: list[Instruction] + instr_fmt: str + instr_flags: InstructionFlags + + +@dataclasses.dataclass +class OverriddenInstructionPlaceHolder: + name: str + + +AnyInstruction = Instruction | MacroInstruction | PseudoInstruction + + +def extract_block_text(block: parsing.Block) -> tuple[list[str], bool, int]: + # Get lines of text with proper dedent + blocklines = block.text.splitlines(True) + first_token: lx.Token = block.tokens[0] # IndexError means the context is broken + block_line = first_token.begin[0] + + # Remove blank lines from both ends + while blocklines and not blocklines[0].strip(): + blocklines.pop(0) + block_line += 1 + while blocklines and not blocklines[-1].strip(): + blocklines.pop() + + # Remove leading and trailing braces + assert blocklines and blocklines[0].strip() == "{" + assert blocklines and blocklines[-1].strip() == "}" + blocklines.pop() + blocklines.pop(0) + block_line += 1 + + # Remove trailing blank lines + while blocklines and not blocklines[-1].strip(): + blocklines.pop() + + # Separate CHECK_EVAL_BREAKER() macro from end + check_eval_breaker = ( + blocklines != [] and blocklines[-1].strip() == "CHECK_EVAL_BREAKER();" + ) + if check_eval_breaker: + del blocklines[-1] + + return blocklines, check_eval_breaker, block_line + + +def always_exits(lines: list[str]) -> bool: + """Determine whether a block always ends in a return/goto/etc.""" + if not lines: + return False + line = lines[-1].rstrip() + # Indent must match exactly (TODO: Do something better) + if line[:12] != " " * 12: + return False + line = line[12:] + return line.startswith( + ( + "goto ", + "return ", + "DISPATCH", + "GO_TO_", + "Py_UNREACHABLE()", + "ERROR_IF(true, ", + ) + ) diff --git a/Tools/cases_generator/interpreter_definition.md b/Tools/cases_generator/interpreter_definition.md index f141848631d04a..5c4238756748a7 100644 --- a/Tools/cases_generator/interpreter_definition.md +++ b/Tools/cases_generator/interpreter_definition.md @@ -108,7 +108,7 @@ and a piece of C code describing its semantics:: NAME [":" type] [ "if" "(" C-expression ")" ] type: - NAME + NAME ["*"] stream: NAME "/" size diff --git a/Tools/cases_generator/parser.py b/Tools/cases_generator/parsing.py similarity index 91% rename from Tools/cases_generator/parser.py rename to Tools/cases_generator/parsing.py index ac77e7eae81ad3..cdd20d7a0b3f59 100644 --- a/Tools/cases_generator/parser.py +++ b/Tools/cases_generator/parsing.py @@ -1,7 +1,7 @@ """Parser for bytecodes.inst.""" from dataclasses import dataclass, field -from typing import NamedTuple, Callable, TypeVar, Literal +from typing import NamedTuple, Callable, TypeVar, Literal, cast import lexer as lx from plexer import PLexer @@ -19,7 +19,7 @@ def contextual_wrapper(self: P) -> N | None: res = func(self) if res is None: self.setpos(begin) - return + return None end = self.getpos() res.context = Context(begin, end, self) return res @@ -69,12 +69,18 @@ class Block(Node): @dataclass class StackEffect(Node): - name: str + name: str = field(compare=False) # __eq__ only uses type, cond, size type: str = "" # Optional `:type` cond: str = "" # Optional `if (cond)` size: str = "" # Optional `[size]` # Note: size cannot be combined with type or cond + def __repr__(self): + items = [self.name, self.type, self.cond, self.size] + while items and items[-1] == "": + del items[-1] + return f"StackEffect({', '.join(repr(item) for item in items)})" + @dataclass class Expression(Node): @@ -130,6 +136,7 @@ class Family(Node): size: str # Variable giving the cache size in code units members: list[str] + @dataclass class Pseudo(Node): name: str @@ -147,13 +154,20 @@ def definition(self) -> InstDef | Macro | Pseudo | Family | None: return family if pseudo := self.pseudo_def(): return pseudo + return None @contextual def inst_def(self) -> InstDef | None: if hdr := self.inst_header(): if block := self.block(): return InstDef( - hdr.override, hdr.register, hdr.kind, hdr.name, hdr.inputs, hdr.outputs, block + hdr.override, + hdr.register, + hdr.kind, + hdr.name, + hdr.inputs, + hdr.outputs, + block, ) raise self.make_syntax_error("Expected block") return None @@ -166,7 +180,8 @@ def inst_header(self) -> InstHeader | None: # TODO: Make INST a keyword in the lexer. override = bool(self.expect(lx.OVERRIDE)) register = bool(self.expect(lx.REGISTER)) - if (tkn := self.expect(lx.IDENTIFIER)) and (kind := tkn.text) in ("inst", "op"): + if (tkn := self.expect(lx.IDENTIFIER)) and tkn.text in ("inst", "op"): + kind = cast(Literal["inst", "op"], tkn.text) if self.expect(lx.LPAREN) and (tkn := self.expect(lx.IDENTIFIER)): name = tkn.text if self.expect(lx.COMMA): @@ -190,6 +205,7 @@ def inputs(self) -> list[InputEffect] | None: # input (',' input)* here = self.getpos() if inp := self.input(): + inp = cast(InputEffect, inp) near = self.getpos() if self.expect(lx.COMMA): if rest := self.inputs(): @@ -232,15 +248,18 @@ def cache_effect(self) -> CacheEffect | None: raise self.make_syntax_error(f"Expected integer, got {num!r}") else: return CacheEffect(tkn.text, size) + return None @contextual def stack_effect(self) -> StackEffect | None: - # IDENTIFIER [':' IDENTIFIER] ['if' '(' expression ')'] + # IDENTIFIER [':' IDENTIFIER [TIMES]] ['if' '(' expression ')'] # | IDENTIFIER '[' expression ']' if tkn := self.expect(lx.IDENTIFIER): type_text = "" if self.expect(lx.COLON): type_text = self.require(lx.IDENTIFIER).text.strip() + if self.expect(lx.TIMES): + type_text += " *" cond_text = "" if self.expect(lx.IF): self.require(lx.LPAREN) @@ -258,6 +277,7 @@ def stack_effect(self) -> StackEffect | None: type_text = "PyObject **" size_text = size.text.strip() return StackEffect(tkn.text, type_text, cond_text, size_text) + return None @contextual def expression(self) -> Expression | None: @@ -288,6 +308,7 @@ def expression(self) -> Expression | None: def op(self) -> OpName | None: if tkn := self.expect(lx.IDENTIFIER): return OpName(tkn.text) + return None @contextual def macro_def(self) -> Macro | None: @@ -300,16 +321,20 @@ def macro_def(self) -> Macro | None: self.require(lx.SEMI) res = Macro(tkn.text, uops) return res + return None def uops(self) -> list[UOp] | None: if uop := self.uop(): + uop = cast(UOp, uop) uops = [uop] while self.expect(lx.PLUS): if uop := self.uop(): + uop = cast(UOp, uop) uops.append(uop) else: raise self.make_syntax_error("Expected op name or cache effect") return uops + return None @contextual def uop(self) -> UOp | None: @@ -327,6 +352,7 @@ def uop(self) -> UOp | None: raise self.make_syntax_error("Expected integer") else: return OpName(tkn.text) + return None @contextual def family_def(self) -> Family | None: @@ -360,9 +386,7 @@ def pseudo_def(self) -> Pseudo | None: raise self.make_syntax_error("Expected {") if members := self.members(): if self.expect(lx.RBRACE) and self.expect(lx.SEMI): - return Pseudo( - tkn.text, members - ) + return Pseudo(tkn.text, members) return None def members(self) -> list[str] | None: @@ -385,6 +409,7 @@ def members(self) -> list[str] | None: def block(self) -> Block | None: if self.c_blob(): return Block() + return None def c_blob(self) -> list[lx.Token]: tokens: list[lx.Token] = [] diff --git a/Tools/cases_generator/plexer.py b/Tools/cases_generator/plexer.py index a73254ed5b1daa..cb6c5375866490 100644 --- a/Tools/cases_generator/plexer.py +++ b/Tools/cases_generator/plexer.py @@ -1,4 +1,5 @@ import lexer as lx + Token = lx.Token @@ -64,7 +65,9 @@ def require(self, kind: str) -> Token: tkn = self.next() if tkn is not None and tkn.kind == kind: return tkn - raise self.make_syntax_error(f"Expected {kind!r} but got {tkn and tkn.text!r}", tkn) + raise self.make_syntax_error( + f"Expected {kind!r} but got {tkn and tkn.text!r}", tkn + ) def extract_line(self, lineno: int) -> str: # Return source line `lineno` (1-based) @@ -73,18 +76,20 @@ def extract_line(self, lineno: int) -> str: return "" return lines[lineno - 1] - def make_syntax_error(self, message: str, tkn: Token|None = None) -> SyntaxError: + def make_syntax_error(self, message: str, tkn: Token | None = None) -> SyntaxError: # Construct a SyntaxError instance from message and token if tkn is None: tkn = self.peek() if tkn is None: tkn = self.tokens[-1] - return lx.make_syntax_error(message, - self.filename, tkn.line, tkn.column, self.extract_line(tkn.line)) + return lx.make_syntax_error( + message, self.filename, tkn.line, tkn.column, self.extract_line(tkn.line) + ) if __name__ == "__main__": import sys + if sys.argv[1:]: filename = sys.argv[1] if filename == "-c" and sys.argv[2:]: diff --git a/Tools/cases_generator/stacking.py b/Tools/cases_generator/stacking.py new file mode 100644 index 00000000000000..d457ce01a8f438 --- /dev/null +++ b/Tools/cases_generator/stacking.py @@ -0,0 +1,393 @@ +import dataclasses +import typing + +from formatting import ( + Formatter, + UNUSED, + maybe_parenthesize, + parenthesize_cond, +) +from instructions import ( + ActiveCacheEffect, + Instruction, + MacroInstruction, + Component, + Tiers, + TIER_ONE, +) +from parsing import StackEffect, CacheEffect, Family + + +@dataclasses.dataclass +class StackOffset: + """Represent the stack offset for a PEEK or POKE. + + - At stack_pointer[0], deep and high are both empty. + (Note that that is an invalid stack reference.) + - Below stack top, only deep is non-empty. + - Above stack top, only high is non-empty. + - In complex cases, both deep and high may be non-empty. + + All this would be much simpler if all stack entries were the same + size, but with conditional and array effects, they aren't. + The offsets are each represented by a list of StackEffect objects. + The name in the StackEffects is unused. + """ + + deep: list[StackEffect] = dataclasses.field(default_factory=list) + high: list[StackEffect] = dataclasses.field(default_factory=list) + + def clone(self) -> "StackOffset": + return StackOffset(list(self.deep), list(self.high)) + + def negate(self) -> "StackOffset": + return StackOffset(list(self.high), list(self.deep)) + + def deeper(self, eff: StackEffect) -> None: + if eff in self.high: + self.high.remove(eff) + else: + self.deep.append(eff) + + def higher(self, eff: StackEffect) -> None: + if eff in self.deep: + self.deep.remove(eff) + else: + self.high.append(eff) + + def as_terms(self) -> list[tuple[str, str]]: + num = 0 + terms: list[tuple[str, str]] = [] + for eff in self.deep: + if eff.size: + terms.append(("-", maybe_parenthesize(eff.size))) + elif eff.cond and eff.cond != "1": + terms.append(("-", f"({parenthesize_cond(eff.cond)} ? 1 : 0)")) + elif eff.cond != "0": + num -= 1 + for eff in self.high: + if eff.size: + terms.append(("+", maybe_parenthesize(eff.size))) + elif eff.cond and eff.cond != "1": + terms.append(("+", f"({parenthesize_cond(eff.cond)} ? 1 : 0)")) + elif eff.cond != "0": + num += 1 + if num < 0: + terms.insert(0, ("-", str(-num))) + elif num > 0: + terms.append(("+", str(num))) + return terms + + def as_index(self) -> str: + terms = self.as_terms() + return make_index(terms) + + +def make_index(terms: list[tuple[str, str]]) -> str: + # Produce an index expression from the terms honoring PEP 8, + # surrounding binary ops with spaces but not unary minus + index = "" + for sign, term in terms: + if index: + index += f" {sign} {term}" + elif sign == "+": + index = term + else: + index = sign + term + return index or "0" + + +@dataclasses.dataclass +class StackItem: + offset: StackOffset + effect: StackEffect + + def as_variable(self, lax: bool = False) -> str: + """Return e.g. stack_pointer[-1].""" + terms = self.offset.as_terms() + if self.effect.size: + terms.insert(0, ("+", "stack_pointer")) + index = make_index(terms) + if self.effect.size: + res = index + else: + res = f"stack_pointer[{index}]" + if not lax: + # Check that we're not reading or writing above stack top. + # Skip this for output variable initialization (lax=True). + assert ( + self.effect in self.offset.deep and not self.offset.high + ), f"Push or pop above current stack level: {res}" + return res + + def as_stack_effect(self, lax: bool = False) -> StackEffect: + return StackEffect( + self.as_variable(lax=lax), + self.effect.type if self.effect.size else "", + self.effect.cond, + self.effect.size, + ) + + +@dataclasses.dataclass +class CopyEffect: + src: StackEffect + dst: StackEffect + + +class EffectManager: + """Manage stack effects and offsets for an instruction.""" + + instr: Instruction + active_caches: list[ActiveCacheEffect] + peeks: list[StackItem] + pokes: list[StackItem] + copies: list[CopyEffect] # See merge() + # Track offsets from stack pointer + min_offset: StackOffset + final_offset: StackOffset + + def __init__( + self, + instr: Instruction, + active_caches: list[ActiveCacheEffect], + pred: "EffectManager | None" = None, + ): + self.instr = instr + self.active_caches = active_caches + self.peeks = [] + self.pokes = [] + self.copies = [] + self.final_offset = pred.final_offset.clone() if pred else StackOffset() + for eff in reversed(instr.input_effects): + self.final_offset.deeper(eff) + self.peeks.append(StackItem(offset=self.final_offset.clone(), effect=eff)) + self.min_offset = self.final_offset.clone() + for eff in instr.output_effects: + self.pokes.append(StackItem(offset=self.final_offset.clone(), effect=eff)) + self.final_offset.higher(eff) + + if pred: + # Replace push(x) + pop(y) with copy(x, y). + # Check that the sources and destinations are disjoint. + sources: set[str] = set() + destinations: set[str] = set() + while ( + pred.pokes + and self.peeks + and pred.pokes[-1].effect == self.peeks[-1].effect + ): + src = pred.pokes.pop(-1).effect + dst = self.peeks.pop(0).effect + pred.final_offset.deeper(src) + if dst.name != UNUSED: + destinations.add(dst.name) + if dst.name != src.name: + sources.add(src.name) + self.copies.append(CopyEffect(src, dst)) + # TODO: Turn this into an error (pass an Analyzer instance?) + assert sources & destinations == set(), ( + pred.instr.name, + self.instr.name, + sources, + destinations, + ) + + def adjust_deeper(self, eff: StackEffect) -> None: + for peek in self.peeks: + peek.offset.deeper(eff) + for poke in self.pokes: + poke.offset.deeper(eff) + self.min_offset.deeper(eff) + self.final_offset.deeper(eff) + + def adjust_higher(self, eff: StackEffect) -> None: + for peek in self.peeks: + peek.offset.higher(eff) + for poke in self.pokes: + poke.offset.higher(eff) + self.min_offset.higher(eff) + self.final_offset.higher(eff) + + def adjust(self, offset: StackOffset) -> None: + for down in offset.deep: + self.adjust_deeper(down) + for up in offset.high: + self.adjust_higher(up) + + def adjust_inverse(self, offset: StackOffset) -> None: + for down in offset.deep: + self.adjust_higher(down) + for up in offset.high: + self.adjust_deeper(up) + + def collect_vars(self) -> dict[str, StackEffect]: + """Collect all variables, skipping unused ones.""" + vars: dict[str, StackEffect] = {} + + def add(eff: StackEffect) -> None: + if eff.name != UNUSED: + if eff.name in vars: + # TODO: Make this an error + assert vars[eff.name] == eff, ( + self.instr.name, + eff.name, + vars[eff.name], + eff, + ) + else: + vars[eff.name] = eff + + for copy in self.copies: + add(copy.src) + add(copy.dst) + for peek in self.peeks: + add(peek.effect) + for poke in self.pokes: + add(poke.effect) + + return vars + + +def less_than(a: StackOffset, b: StackOffset) -> bool: + # TODO: Handle more cases + if a.high != b.high: + return False + return a.deep[: len(b.deep)] == b.deep + + +def get_managers(parts: list[Component]) -> list[EffectManager]: + managers: list[EffectManager] = [] + pred: EffectManager | None = None + for part in parts: + mgr = EffectManager(part.instr, part.active_caches, pred) + managers.append(mgr) + pred = mgr + return managers + + +def get_stack_effect_info_for_macro(mac: MacroInstruction) -> tuple[str, str]: + """Get the stack effect info for a macro instruction. + + Returns a tuple (popped, pushed) where each is a string giving a + symbolic expression for the number of values popped/pushed. + """ + parts = [part for part in mac.parts if isinstance(part, Component)] + managers = get_managers(parts) + popped = StackOffset() + for mgr in managers: + if less_than(mgr.min_offset, popped): + popped = mgr.min_offset.clone() + # Compute pushed = final - popped + pushed = managers[-1].final_offset.clone() + for effect in popped.deep: + pushed.higher(effect) + for effect in popped.high: + pushed.deeper(effect) + return popped.negate().as_index(), pushed.as_index() + + +def write_single_instr( + instr: Instruction, out: Formatter, tier: Tiers = TIER_ONE +) -> None: + try: + write_components( + [Component(instr, instr.active_caches)], + out, + tier, + ) + except AssertionError as err: + raise AssertionError(f"Error writing instruction {instr.name}") from err + + +def write_macro_instr( + mac: MacroInstruction, out: Formatter, family: Family | None +) -> None: + parts = [part for part in mac.parts if isinstance(part, Component)] + + cache_adjust = 0 + for part in mac.parts: + match part: + case CacheEffect(size=size): + cache_adjust += size + case Component(instr=instr): + cache_adjust += instr.cache_offset + case _: + typing.assert_never(part) + + out.emit("") + with out.block(f"TARGET({mac.name})"): + if mac.predicted: + out.emit(f"PREDICTED({mac.name});") + out.static_assert_family_size(mac.name, family, cache_adjust) + try: + write_components(parts, out, TIER_ONE) + except AssertionError as err: + raise AssertionError(f"Error writing macro {mac.name}") from err + if cache_adjust: + out.emit(f"next_instr += {cache_adjust};") + out.emit("DISPATCH();") + + +def write_components( + parts: list[Component], + out: Formatter, + tier: Tiers, +) -> None: + managers = get_managers(parts) + + all_vars: dict[str, StackEffect] = {} + for mgr in managers: + for name, eff in mgr.collect_vars().items(): + if name in all_vars: + # TODO: Turn this into an error -- variable conflict + assert all_vars[name] == eff, ( + name, + mgr.instr.name, + all_vars[name], + eff, + ) + else: + all_vars[name] = eff + + # Declare all variables + for name, eff in all_vars.items(): + out.declare(eff, None) + + for mgr in managers: + if len(parts) > 1: + out.emit(f"// {mgr.instr.name}") + + for copy in mgr.copies: + if copy.src.name != copy.dst.name: + out.assign(copy.dst, copy.src) + for peek in mgr.peeks: + out.assign( + peek.effect, + peek.as_stack_effect(), + ) + # Initialize array outputs + for poke in mgr.pokes: + if poke.effect.size and poke.effect.name not in mgr.instr.unmoved_names: + out.assign( + poke.effect, + poke.as_stack_effect(lax=True), + ) + + if len(parts) == 1: + mgr.instr.write_body(out, 0, mgr.active_caches, tier) + else: + with out.block(""): + mgr.instr.write_body(out, -4, mgr.active_caches, tier) + + if mgr is managers[-1]: + out.stack_adjust(mgr.final_offset.deep, mgr.final_offset.high) + # Use clone() since adjust_inverse() mutates final_offset + mgr.adjust_inverse(mgr.final_offset.clone()) + + for poke in mgr.pokes: + if not poke.effect.size and poke.effect.name not in mgr.instr.unmoved_names: + out.assign( + poke.as_stack_effect(), + poke.effect, + ) diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index bff8935df13bc6..70b066cce82fae 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -7,6 +7,7 @@ from __future__ import annotations import abc +import argparse import ast import builtins as bltns import collections @@ -27,7 +28,6 @@ import string import sys import textwrap -import traceback from collections.abc import ( Callable, @@ -37,13 +37,15 @@ ) from types import FunctionType, NoneType from typing import ( + TYPE_CHECKING, Any, Final, Literal, NamedTuple, NoReturn, Protocol, - TypeGuard, + TypeVar, + cast, overload, ) @@ -134,6 +136,28 @@ def text_accumulator() -> TextAccumulator: text, append, output = _text_accumulator() return TextAccumulator(append, output) + +@dc.dataclass +class ClinicError(Exception): + message: str + _: dc.KW_ONLY + lineno: int | None = None + filename: str | None = None + + def __post_init__(self) -> None: + super().__init__(self.message) + + def report(self, *, warn_only: bool = False) -> str: + msg = "Warning" if warn_only else "Error" + if self.filename is not None: + msg += f" in file {self.filename!r}" + if self.lineno is not None: + msg += f" on line {self.lineno}" + msg += ":\n" + msg += f"{self.message}\n" + return msg + + @overload def warn_or_fail( *args: object, @@ -157,25 +181,16 @@ def warn_or_fail( line_number: int | None = None, ) -> None: joined = " ".join([str(a) for a in args]) - add, output = text_accumulator() - if fail: - add("Error") - else: - add("Warning") if clinic: if filename is None: filename = clinic.filename if getattr(clinic, 'block_parser', None) and (line_number is None): line_number = clinic.block_parser.line_number - if filename is not None: - add(' in file "' + filename + '"') - if line_number is not None: - add(" on line " + str(line_number)) - add(':\n') - add(joined) - print(output()) + error = ClinicError(joined, filename=filename, lineno=line_number) if fail: - sys.exit(-1) + raise error + else: + print(error.report(warn_only=True)) def warn( @@ -206,6 +221,20 @@ def c_repr(s: str) -> str: return '"' + s + '"' +def wrapped_c_string_literal( + text: str, + *, + width: int = 72, + suffix: str = '', + initial_indent: int = 0, + subsequent_indent: int = 4 +) -> str: + wrapped = textwrap.wrap(text, width=width, replace_whitespace=False, + drop_whitespace=False, break_on_hyphens=False) + separator = '"' + suffix + '\n' + subsequent_indent * ' ' + '"' + return initial_indent * ' ' + '"' + separator.join(wrapped) + '"' + + is_legal_c_identifier = re.compile('^[A-Za-z_][A-Za-z0-9_]*$').match def is_legal_py_identifier(s: str) -> bool: @@ -275,9 +304,11 @@ def linear_format(s: str, **kwargs: str) -> str: continue if trailing: - fail("Text found after {" + name + "} block marker! It must be on a line by itself.") + fail(f"Text found after {{{name}}} block marker! " + "It must be on a line by itself.") if indent.strip(): - fail("Non-whitespace characters found before {" + name + "} block marker! It must be on a line by itself.") + fail(f"Non-whitespace characters found before {{{name}}} block marker! " + "It must be on a line by itself.") value = kwargs[name] if not value: @@ -330,6 +361,13 @@ def suffix_all_lines(s: str, suffix: str) -> str: return ''.join(final) +def pprint_words(items: list[str]) -> str: + if len(items) <= 2: + return " and ".join(items) + else: + return ", ".join(items[:-1]) + " and " + items[-1] + + def version_splitter(s: str) -> tuple[int, ...]: """Splits a version string into a tuple of integers. @@ -344,7 +382,7 @@ def version_splitter(s: str) -> tuple[int, ...]: accumulator: list[str] = [] def flush() -> None: if not accumulator: - raise ValueError('Unsupported version string: ' + repr(s)) + fail(f'Unsupported version string: {s!r}') version.append(int(''.join(accumulator))) accumulator.clear() @@ -357,13 +395,15 @@ def flush() -> None: flush() version.append('abc'.index(c) - 3) else: - raise ValueError('Illegal character ' + repr(c) + ' in version string ' + repr(s)) + fail(f'Illegal character {c!r} in version string {s!r}') flush() return tuple(version) def version_comparitor(version1: str, version2: str) -> Literal[-1, 0, 1]: - iterator = itertools.zip_longest(version_splitter(version1), version_splitter(version2), fillvalue=0) - for i, (a, b) in enumerate(iterator): + iterator = itertools.zip_longest( + version_splitter(version1), version_splitter(version2), fillvalue=0 + ) + for a, b in iterator: if a < b: return -1 if a > b: @@ -372,36 +412,36 @@ def version_comparitor(version1: str, version2: str) -> Literal[-1, 0, 1]: class CRenderData: - def __init__(self): + def __init__(self) -> None: # The C statements to declare variables. # Should be full lines with \n eol characters. - self.declarations = [] + self.declarations: list[str] = [] # The C statements required to initialize the variables before the parse call. # Should be full lines with \n eol characters. - self.initializers = [] + self.initializers: list[str] = [] # The C statements needed to dynamically modify the values # parsed by the parse call, before calling the impl. - self.modifications = [] + self.modifications: list[str] = [] # The entries for the "keywords" array for PyArg_ParseTuple. # Should be individual strings representing the names. - self.keywords = [] + self.keywords: list[str] = [] # The "format units" for PyArg_ParseTuple. # Should be individual strings that will get - self.format_units = [] + self.format_units: list[str] = [] # The varargs arguments for PyArg_ParseTuple. - self.parse_arguments = [] + self.parse_arguments: list[str] = [] # The parameter declarations for the impl function. - self.impl_parameters = [] + self.impl_parameters: list[str] = [] # The arguments to the impl function at the time it's called. - self.impl_arguments = [] + self.impl_arguments: list[str] = [] # For return converters: the name of the variable that # should receive the value returned by the impl. @@ -411,17 +451,17 @@ def __init__(self): # value from the parse function. This is also where # you should check the _return_value for errors, and # "goto exit" if there are any. - self.return_conversion = [] + self.return_conversion: list[str] = [] self.converter_retval = "_return_value" # The C statements required to do some operations # after the end of parsing but before cleaning up. # These operations may be, for example, memory deallocations which # can only be done without any error happening during argument parsing. - self.post_parsing = [] + self.post_parsing: list[str] = [] # The C statements required to clean up after the impl call. - self.cleanup = [] + self.cleanup: list[str] = [] class FormatCounterFormatter(string.Formatter): @@ -436,7 +476,9 @@ class FormatCounterFormatter(string.Formatter): def __init__(self) -> None: self.counts = collections.Counter[str]() - def get_value(self, key: str, args, kwargs) -> str: # type: ignore[override] + def get_value( + self, key: str, args: object, kwargs: object # type: ignore[override] + ) -> Literal['']: self.counts[key] += 1 return '' @@ -448,7 +490,7 @@ class Language(metaclass=abc.ABCMeta): checksum_line = "" def __init__(self, filename: str) -> None: - pass + ... @abc.abstractmethod def render( @@ -456,10 +498,10 @@ def render( clinic: Clinic | None, signatures: Iterable[Module | Class | Function] ) -> str: - pass + ... def parse_line(self, line: str) -> None: - pass + ... def validate(self) -> None: def assert_only_one( @@ -530,12 +572,11 @@ class PythonLanguage(Language): checksum_line = "#/*[{dsl_name} end generated code: {arguments}]*/" -ParamGroup = Iterable["Parameter"] ParamTuple = tuple["Parameter", ...] def permute_left_option_groups( - l: Sequence[ParamGroup] + l: Sequence[Iterable[Parameter]] ) -> Iterator[ParamTuple]: """ Given [(1,), (2,), (3,)], should yield: @@ -552,7 +593,7 @@ def permute_left_option_groups( def permute_right_option_groups( - l: Sequence[ParamGroup] + l: Sequence[Iterable[Parameter]] ) -> Iterator[ParamTuple]: """ Given [(1,), (2,), (3,)], should yield: @@ -569,9 +610,9 @@ def permute_right_option_groups( def permute_optional_groups( - left: Sequence[ParamGroup], - required: ParamGroup, - right: Sequence[ParamGroup] + left: Sequence[Iterable[Parameter]], + required: Iterable[Parameter], + right: Sequence[Iterable[Parameter]] ) -> tuple[ParamTuple, ...]: """ Generator function that computes the set of acceptable @@ -756,6 +797,81 @@ class CLanguage(Language): stop_line = "[{dsl_name} start generated code]*/" checksum_line = "/*[{dsl_name} end generated code: {arguments}]*/" + PARSER_PROTOTYPE_KEYWORD: Final[str] = normalize_snippet(""" + static PyObject * + {c_basename}({self_type}{self_name}, PyObject *args, PyObject *kwargs) + """) + PARSER_PROTOTYPE_KEYWORD___INIT__: Final[str] = normalize_snippet(""" + static int + {c_basename}({self_type}{self_name}, PyObject *args, PyObject *kwargs) + """) + PARSER_PROTOTYPE_VARARGS: Final[str] = normalize_snippet(""" + static PyObject * + {c_basename}({self_type}{self_name}, PyObject *args) + """) + PARSER_PROTOTYPE_FASTCALL: Final[str] = normalize_snippet(""" + static PyObject * + {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs) + """) + PARSER_PROTOTYPE_FASTCALL_KEYWORDS: Final[str] = normalize_snippet(""" + static PyObject * + {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) + """) + PARSER_PROTOTYPE_DEF_CLASS: Final[str] = normalize_snippet(""" + static PyObject * + {c_basename}({self_type}{self_name}, PyTypeObject *{defining_class_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) + """) + PARSER_PROTOTYPE_NOARGS: Final[str] = normalize_snippet(""" + static PyObject * + {c_basename}({self_type}{self_name}, PyObject *Py_UNUSED(ignored)) + """) + METH_O_PROTOTYPE: Final[str] = normalize_snippet(""" + static PyObject * + {c_basename}({impl_parameters}) + """) + DOCSTRING_PROTOTYPE_VAR: Final[str] = normalize_snippet(""" + PyDoc_VAR({c_basename}__doc__); + """) + DOCSTRING_PROTOTYPE_STRVAR: Final[str] = normalize_snippet(""" + PyDoc_STRVAR({c_basename}__doc__, + {docstring}); + """) + IMPL_DEFINITION_PROTOTYPE: Final[str] = normalize_snippet(""" + static {impl_return_type} + {c_basename}_impl({impl_parameters}) + """) + METHODDEF_PROTOTYPE_DEFINE: Final[str] = normalize_snippet(r""" + #define {methoddef_name} \ + {{"{name}", {methoddef_cast}{c_basename}{methoddef_cast_end}, {methoddef_flags}, {c_basename}__doc__}}, + """) + METHODDEF_PROTOTYPE_IFNDEF: Final[str] = normalize_snippet(""" + #ifndef {methoddef_name} + #define {methoddef_name} + #endif /* !defined({methoddef_name}) */ + """) + DEPRECATED_POSITIONAL_PROTOTYPE: Final[str] = r""" + // Emit compiler warnings when we get to Python {major}.{minor}. + #if PY_VERSION_HEX >= 0x{major:02x}{minor:02x}00C0 + # error \ + {cpp_message} + #elif PY_VERSION_HEX >= 0x{major:02x}{minor:02x}00A0 + # ifdef _MSC_VER + # pragma message ( \ + {cpp_message}) + # else + # warning \ + {cpp_message} + # endif + #endif + if ({condition}) {{{{ + if (PyErr_WarnEx(PyExc_DeprecationWarning, + {depr_message}, 1)) + {{{{ + goto exit; + }}}} + }}}} + """ + def __init__(self, filename: str) -> None: super().__init__(filename) self.cpp = cpp.Monitor(filename) @@ -777,13 +893,78 @@ def render( function = o return self.render_function(clinic, function) + def deprecate_positional_use( + self, + func: Function, + params: dict[int, Parameter], + ) -> str: + assert len(params) > 0 + names = [repr(p.name) for p in params.values()] + first_pos, first_param = next(iter(params.items())) + last_pos, last_param = next(reversed(params.items())) + + # Pretty-print list of names. + pstr = pprint_words(names) + + # For now, assume there's only one deprecation level. + assert first_param.deprecated_positional == last_param.deprecated_positional + thenceforth = first_param.deprecated_positional + assert thenceforth is not None + + # Format the preprocessor warning and error messages. + assert isinstance(self.cpp.filename, str) + source = os.path.basename(self.cpp.filename) + major, minor = thenceforth + cpp_message = ( + f"In {source}, update parameter(s) {pstr} in the clinic " + f"input of {func.full_name!r} to be keyword-only." + ) + + # Format the deprecation message. + if first_pos == 0: + preamble = "Passing positional arguments to " + if len(params) == 1: + condition = f"nargs == {first_pos+1}" + if first_pos: + preamble = f"Passing {first_pos+1} positional arguments to " + depr_message = preamble + ( + f"{func.fulldisplayname}() is deprecated. Parameter {pstr} will " + f"become a keyword-only parameter in Python {major}.{minor}." + ) + else: + condition = f"nargs > {first_pos} && nargs <= {last_pos+1}" + if first_pos: + preamble = ( + f"Passing more than {first_pos} positional " + f"argument{'s' if first_pos != 1 else ''} to " + ) + depr_message = preamble + ( + f"{func.fulldisplayname}() is deprecated. Parameters {pstr} will " + f"become keyword-only parameters in Python {major}.{minor}." + ) + + # Append deprecation warning to docstring. + lines = textwrap.wrap(f"Note: {depr_message}") + docstring = "\n".join(lines) + func.docstring += f"\n\n{docstring}\n" + + # Format and return the code block. + code = self.DEPRECATED_POSITIONAL_PROTOTYPE.format( + condition=condition, + major=major, + minor=minor, + cpp_message=wrapped_c_string_literal(cpp_message, suffix=" \\", + width=64, + subsequent_indent=16), + depr_message=wrapped_c_string_literal(depr_message, width=64, + subsequent_indent=20), + ) + return normalize_snippet(code, indent=4) + def docstring_for_c_string( self, f: Function ) -> str: - if re.search(r'[^\x00-\x7F]', f.docstring): - warn("Non-ascii character appear in docstring.") - text, add, output = _text_accumulator() # turn docstring into a properly quoted C string for line in f.docstring.split('\n'): @@ -802,7 +983,8 @@ def docstring_for_c_string( def output_templates( self, - f: Function + f: Function, + clinic: Clinic ) -> dict[str, str]: parameters = list(f.parameters.values()) assert parameters @@ -815,9 +997,7 @@ def output_templates( converters = [p.converter for p in parameters] has_option_groups = parameters and (parameters[0].group or parameters[-1].group) - default_return_converter = (not f.return_converter or - f.return_converter.type == 'PyObject *') - + default_return_converter = f.return_converter.type == 'PyObject *' new_or_init = f.kind.new_or_init vararg: int | str = NO_VARARG @@ -860,52 +1040,15 @@ def output_templates( # methoddef_ifndef return_value_declaration = "PyObject *return_value = NULL;" - - methoddef_define = normalize_snippet(""" - #define {methoddef_name} \\ - {{"{name}", {methoddef_cast}{c_basename}{methoddef_cast_end}, {methoddef_flags}, {c_basename}__doc__}}, - """) + methoddef_define = self.METHODDEF_PROTOTYPE_DEFINE if new_or_init and not f.docstring: docstring_prototype = docstring_definition = '' else: - docstring_prototype = normalize_snippet(""" - PyDoc_VAR({c_basename}__doc__); - """) - docstring_definition = normalize_snippet(""" - PyDoc_STRVAR({c_basename}__doc__, - {docstring}); - """) - impl_definition = normalize_snippet(""" - static {impl_return_type} - {c_basename}_impl({impl_parameters}) - """) + docstring_prototype = self.DOCSTRING_PROTOTYPE_VAR + docstring_definition = self.DOCSTRING_PROTOTYPE_STRVAR + impl_definition = self.IMPL_DEFINITION_PROTOTYPE impl_prototype = parser_prototype = parser_definition = None - parser_prototype_keyword = normalize_snippet(""" - static PyObject * - {c_basename}({self_type}{self_name}, PyObject *args, PyObject *kwargs) - """) - - parser_prototype_varargs = normalize_snippet(""" - static PyObject * - {c_basename}({self_type}{self_name}, PyObject *args) - """) - - parser_prototype_fastcall = normalize_snippet(""" - static PyObject * - {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs) - """) - - parser_prototype_fastcall_keywords = normalize_snippet(""" - static PyObject * - {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) - """) - - parser_prototype_def_class = normalize_snippet(""" - static PyObject * - {c_basename}({self_type}{self_name}, PyTypeObject *{defining_class_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) - """) - # parser_body_fields remembers the fields passed in to the # previous call to parser_body. this is used for an awful hack. parser_body_fields: tuple[str, ...] = () @@ -948,19 +1091,13 @@ def parser_body( if not requires_defining_class: # no parameters, METH_NOARGS flags = "METH_NOARGS" - - parser_prototype = normalize_snippet(""" - static PyObject * - {c_basename}({self_type}{self_name}, PyObject *Py_UNUSED(ignored)) - """) + parser_prototype = self.PARSER_PROTOTYPE_NOARGS parser_code = [] - else: assert not new_or_init flags = "METH_METHOD|METH_FASTCALL|METH_KEYWORDS" - - parser_prototype = parser_prototype_def_class + parser_prototype = self.PARSER_PROTOTYPE_DEF_CLASS return_error = ('return NULL;' if default_return_converter else 'goto exit;') parser_code = [normalize_snippet(""" @@ -985,10 +1122,7 @@ def parser_body( if (isinstance(converters[0], object_converter) and converters[0].format_unit == 'O'): - meth_o_prototype = normalize_snippet(""" - static PyObject * - {c_basename}({impl_parameters}) - """) + meth_o_prototype = self.METH_O_PROTOTYPE if default_return_converter: # maps perfectly to METH_O, doesn't need a return converter. @@ -1028,8 +1162,7 @@ def parser_body( # in a big switch statement) flags = "METH_VARARGS" - parser_prototype = parser_prototype_varargs - + parser_prototype = self.PARSER_PROTOTYPE_VARARGS parser_definition = parser_body(parser_prototype, ' {option_group_parsing}') elif not requires_defining_class and pos_only == len(parameters) - pseudo_args: @@ -1038,7 +1171,7 @@ def parser_body( # we only need one call to _PyArg_ParseStack flags = "METH_FASTCALL" - parser_prototype = parser_prototype_fastcall + parser_prototype = self.PARSER_PROTOTYPE_FASTCALL nargs = 'nargs' argname_fmt = 'args[%d]' else: @@ -1046,7 +1179,7 @@ def parser_body( # we only need one call to PyArg_ParseTuple flags = "METH_VARARGS" - parser_prototype = parser_prototype_varargs + parser_prototype = self.PARSER_PROTOTYPE_VARARGS nargs = 'PyTuple_GET_SIZE(args)' argname_fmt = 'PyTuple_GET_ITEM(args, %d)' @@ -1093,7 +1226,6 @@ def parser_body( parsearg = p.converter.parse_arg(argname, displayname) if parsearg is None: - #print('Cannot convert %s %r for %s' % (p.converter.__class__.__name__, p.converter.format_unit, p.converter.name), file=sys.stderr) parser_code = None break if has_optional or p.is_optional(): @@ -1144,7 +1276,7 @@ def parser_body( nargs = f"Py_MIN(nargs, {max_pos})" if max_pos else "0" if not new_or_init: flags = "METH_FASTCALL|METH_KEYWORDS" - parser_prototype = parser_prototype_fastcall_keywords + parser_prototype = self.PARSER_PROTOTYPE_FASTCALL_KEYWORDS argname_fmt = 'args[%d]' declarations = declare_parser(f) declarations += "\nPyObject *argsbuf[%s];" % len(converters) @@ -1159,7 +1291,7 @@ def parser_body( else: # positional-or-keyword arguments flags = "METH_VARARGS|METH_KEYWORDS" - parser_prototype = parser_prototype_keyword + parser_prototype = self.PARSER_PROTOTYPE_KEYWORD argname_fmt = 'fastargs[%d]' declarations = declare_parser(f) declarations += "\nPyObject *argsbuf[%s];" % len(converters) @@ -1176,8 +1308,9 @@ def parser_body( if requires_defining_class: flags = 'METH_METHOD|' + flags - parser_prototype = parser_prototype_def_class + parser_prototype = self.PARSER_PROTOTYPE_DEF_CLASS + deprecated_positionals: dict[int, Parameter] = {} add_label: str | None = None for i, p in enumerate(parameters): if isinstance(p.converter, defining_class_converter): @@ -1186,13 +1319,14 @@ def parser_body( displayname = p.get_displayname(i+1) parsearg = p.converter.parse_arg(argname_fmt % i, displayname) if parsearg is None: - #print('Cannot convert %s %r for %s' % (p.converter.__class__.__name__, p.converter.format_unit, p.converter.name), file=sys.stderr) parser_code = None break if add_label and (i == pos_only or i == max_pos): parser_code.append("%s:" % add_label) add_label = None if not p.is_optional(): + if p.deprecated_positional: + deprecated_positionals[i] = p parser_code.append(normalize_snippet(parsearg, indent=4)) elif i < pos_only: add_label = 'skip_optional_posonly' @@ -1222,6 +1356,8 @@ def parser_body( goto %s; }} """ % add_label, indent=4)) + if p.deprecated_positional: + deprecated_positionals[i] = p if i + 1 == len(parameters): parser_code.append(normalize_snippet(parsearg, indent=4)) else: @@ -1237,6 +1373,12 @@ def parser_body( }} """ % add_label, indent=4)) + if deprecated_positionals: + code = self.deprecate_positional_use(f, deprecated_positionals) + assert parser_code is not None + # Insert the deprecation code before parameter parsing. + parser_code.insert(0, code) + if parser_code is not None: if add_label: parser_code.append("%s:" % add_label) @@ -1264,13 +1406,10 @@ def parser_body( methoddef_define = '' if f.kind is METHOD_NEW: - parser_prototype = parser_prototype_keyword + parser_prototype = self.PARSER_PROTOTYPE_KEYWORD else: return_value_declaration = "int return_value = -1;" - parser_prototype = normalize_snippet(""" - static int - {c_basename}({self_type}{self_name}, PyObject *args, PyObject *kwargs) - """) + parser_prototype = self.PARSER_PROTOTYPE_KEYWORD___INIT__ fields = list(parser_body_fields) parses_positional = 'METH_NOARGS' not in flags @@ -1321,16 +1460,9 @@ def parser_body( cpp_if = "#if " + conditional cpp_endif = "#endif /* " + conditional + " */" - assert clinic is not None - assert f.full_name is not None if methoddef_define and f.full_name not in clinic.ifndef_symbols: clinic.ifndef_symbols.add(f.full_name) - methoddef_ifndef = normalize_snippet(""" - #ifndef {methoddef_name} - #define {methoddef_name} - #endif /* !defined({methoddef_name}) */ - """) - + methoddef_ifndef = self.METHODDEF_PROTOTYPE_IFNDEF # add ';' to the end of parser_prototype and impl_prototype # (they mustn't be None, but they could be an empty string.) @@ -1374,7 +1506,11 @@ def group_to_variable_name(group: int) -> str: adjective = "left_" if group < 0 else "right_" return "group_" + adjective + str(abs(group)) - def render_option_group_parsing(self, f, template_dict): + def render_option_group_parsing( + self, + f: Function, + template_dict: TemplateDict + ) -> None: # positional only, grouped, optional arguments! # can be optional on the left or right. # here's an example: @@ -1398,11 +1534,11 @@ def render_option_group_parsing(self, f, template_dict): if isinstance(parameters[0].converter, self_converter): del parameters[0] - group = None + group: list[Parameter] | None = None left = [] right = [] - required = [] - last = unspecified + required: list[Parameter] = [] + last: int | Literal[Sentinels.unspecified] = unspecified for p in parameters: group_id = p.group @@ -1415,6 +1551,7 @@ def render_option_group_parsing(self, f, template_dict): group = required else: right.append(group) + assert group is not None group.append(p) count_min = sys.maxsize @@ -1433,19 +1570,21 @@ def render_option_group_parsing(self, f, template_dict): continue group_ids = {p.group for p in subset} # eliminate duplicates - d = {} + d: dict[str, str | int] = {} d['count'] = count d['name'] = f.name d['format_units'] = "".join(p.converter.format_unit for p in subset) - parse_arguments = [] + parse_arguments: list[str] = [] for p in subset: p.converter.parse_argument(parse_arguments) d['parse_arguments'] = ", ".join(parse_arguments) group_ids.discard(0) - lines = [self.group_to_variable_name(g) + " = 1;" for g in group_ids] - lines = "\n".join(lines) + lines = "\n".join([ + self.group_to_variable_name(g) + " = 1;" + for g in group_ids + ]) s = """\ case {count}: @@ -1481,7 +1620,7 @@ def render_function( parameters = f.render_parameters converters = [p.converter for p in parameters] - templates = self.output_templates(f) + templates = self.output_templates(f, clinic) f_self = parameters[0] selfless = parameters[1:] @@ -1517,7 +1656,8 @@ def render_function( c.render(p, data) if has_option_groups and (not positional): - fail("You cannot use optional groups ('[' and ']')\nunless all parameters are positional-only ('/').") + fail("You cannot use optional groups ('[' and ']') " + "unless all parameters are positional-only ('/').") # HACK # when we're METH_O, but have a custom return converter, @@ -1531,19 +1671,9 @@ def render_function( '{impl_parameters}' in templates['parser_prototype']): data.declarations.pop(0) - template_dict = {} - - assert isinstance(f.full_name, str) full_name = f.full_name - template_dict['full_name'] = full_name - - if new_or_init: - assert isinstance(f.cls, Class) - name = f.cls.name - else: - name = f.name - - template_dict['name'] = name + template_dict = {'full_name': full_name} + template_dict['name'] = f.displayname if f.c_basename: c_basename = f.c_basename @@ -1669,27 +1799,30 @@ class Block: found on the start line of the block between the square brackets. - signatures is either list or None. If it's a list, - it may only contain clinic.Module, clinic.Class, and + signatures is a list. + It may only contain clinic.Module, clinic.Class, and clinic.Function objects. At the moment it should contain at most one of each. output is either str or None. If str, it's the output from this block, with embedded '\n' characters. - indent is either str or None. It's the leading whitespace + indent is a str. It's the leading whitespace that was found on every line of input. (If body_prefix is not empty, this is the indent *after* removing the body_prefix.) - preindent is either str or None. It's the whitespace that + "indent" is different from the concept of "preindent" + (which is not stored as state on Block objects). + "preindent" is the whitespace that was found in front of every line of input *before* the "body_prefix" (see the Language object). If body_prefix is empty, preindent must always be empty too. - To illustrate indent and preindent: Assume that '_' - represents whitespace. If the block processed was in a - Python file, and looked like this: + To illustrate the difference between "indent" and "preindent": + + Assume that '_' represents whitespace. + If the block processed was in a Python file, and looked like this: ____#/*[python] ____#__for a in range(20): ____#____print(a) @@ -1702,7 +1835,6 @@ class Block: signatures: list[Module | Class | Function] = dc.field(default_factory=list) output: Any = None # TODO: Very dynamic; probably untypeable in its current form? indent: str = '' - preindent: str = '' def __repr__(self) -> str: dsl_name = self.dsl_name or "text" @@ -1711,8 +1843,12 @@ def summarize(s: object) -> str: if len(s) > 30: return s[:26] + "..." + s[0] return s - return "".join(( - "")) + parts = ( + repr(dsl_name), + f"input={summarize(self.input)}", + f"output={summarize(self.output)}" + ) + return f"" class BlockParser: @@ -1857,7 +1993,7 @@ def is_stop_line(line: str) -> bool: for field in shlex.split(arguments): name, equals, value = field.partition('=') if not equals: - fail("Mangled Argument Clinic marker line:", repr(line)) + fail(f"Mangled Argument Clinic marker line: {line!r}") d[name.strip()] = value.strip() if self.verify: @@ -1868,11 +2004,10 @@ def is_stop_line(line: str) -> bool: computed = compute_checksum(output, len(checksum)) if checksum != computed: - fail("Checksum mismatch!\nExpected: {}\nComputed: {}\n" + fail("Checksum mismatch! " + f"Expected {checksum!r}, computed {computed!r}. " "Suggested fix: remove all generated code including " - "the end marker,\n" - "or use the '-f' option." - .format(checksum, computed)) + "the end marker, or use the '-f' option.") else: # put back output output_lines = output.splitlines(keepends=True) @@ -1959,12 +2094,12 @@ class BufferSeries: e.g. o[-1] is an element immediately preceding o[0]. """ - def __init__(self): + def __init__(self) -> None: self._start = 0 - self._array = [] + self._array: list[_TextAccumulator] = [] self._constructor = _text_accumulator - def __getitem__(self, i): + def __getitem__(self, i: int) -> _TextAccumulator: i -= self._start if i < 0: self._start += i @@ -1975,11 +2110,11 @@ def __getitem__(self, i): self._array.append(self._constructor()) return self._array[i] - def clear(self): + def clear(self) -> None: for ta in self._array: - ta._text.clear() + ta.text.clear() - def dump(self): + def dump(self) -> str: texts = [ta.output() for ta in self._array] return "".join(texts) @@ -2003,13 +2138,13 @@ def __post_init__(self, args: tuple[str, ...]) -> None: ) extra_arguments = 1 if self.type == "file" else 0 if len(args) < extra_arguments: - fail(f"Not enough arguments for destination {self.name} new {self.type}") + fail(f"Not enough arguments for destination " + f"{self.name!r} new {self.type!r}") if len(args) > extra_arguments: - fail(f"Too many arguments for destination {self.name} new {self.type}") + fail(f"Too many arguments for destination {self.name!r} new {self.type!r}") if self.type =='file': d = {} filename = self.clinic.filename - assert filename is not None d['path'] = filename dirname, basename = os.path.split(filename) if not dirname: @@ -2021,26 +2156,22 @@ def __post_init__(self, args: tuple[str, ...]) -> None: def __repr__(self) -> str: if self.type == 'file': - file_repr = " " + repr(self.filename) + type_repr = f"type='file' file={self.filename!r}" else: - file_repr = '' - return "".join(("")) + type_repr = f"type={self.type!r}" + return f"" def clear(self) -> None: if self.type != 'buffer': - fail("Can't clear destination" + self.name + " , it's not of type buffer") + fail(f"Can't clear destination {self.name!r}: it's not of type 'buffer'") self.buffers.clear() def dump(self) -> str: return self.buffers.dump() -# maps strings to Language objects. -# "languages" maps the name of the language ("C", "Python"). -# "extensions" maps the file extension ("c", "py"). +# "extensions" maps the file extension ("c", "py") to Language classes. LangDict = dict[str, Callable[[str], Language]] - -languages = { 'C': CLanguage, 'Python': PythonLanguage } extensions: LangDict = { name: CLanguage for name in "c cc cpp cxx h hh hpp hxx".split() } extensions['py'] = PythonLanguage @@ -2127,8 +2258,8 @@ def __init__( language: CLanguage, printer: BlockPrinter | None = None, *, + filename: str, verify: bool = True, - filename: str | None = None ) -> None: # maps strings to Parser objects. # (instantiated from the "parsers" global.) @@ -2166,7 +2297,7 @@ def __init__( 'impl_definition': d('block'), } - DestBufferType = dict[str, Callable[..., Any]] + DestBufferType = dict[str, _TextAccumulator] DestBufferList = list[DestBufferType] self.destination_buffers_stack: DestBufferList = [] @@ -2207,20 +2338,20 @@ def add_destination( *args: str ) -> None: if name in self.destinations: - fail("Destination already exists: " + repr(name)) + fail(f"Destination already exists: {name!r}") self.destinations[name] = Destination(name, type, self, args) def get_destination(self, name: str) -> Destination: d = self.destinations.get(name) if not d: - fail("Destination does not exist: " + repr(name)) + fail(f"Destination does not exist: {name!r}") return d def get_destination_buffer( self, name: str, item: int = 0 - ): + ) -> _TextAccumulator: d = self.get_destination(name) return d.buffers[item] @@ -2234,11 +2365,7 @@ def parse(self, input: str) -> str: assert dsl_name in parsers, f"No parser to handle {dsl_name!r} block." self.parsers[dsl_name] = parsers[dsl_name](self) parser = self.parsers[dsl_name] - try: - parser.parse(block) - except Exception: - fail('Exception raised during parsing:\n' + - traceback.format_exc().rstrip()) + parser.parse(block) printer.print_block(block) # these are destinations not buffers @@ -2264,15 +2391,16 @@ def parse(self, input: str) -> str: os.makedirs(dirname) except FileExistsError: if not os.path.isdir(dirname): - fail("Can't write to destination {}, " - "can't make directory {}!".format( - destination.filename, dirname)) + fail(f"Can't write to destination " + f"{destination.filename!r}; " + f"can't make directory {dirname!r}!") if self.verify: with open(destination.filename) as f: parser_2 = BlockParser(f.read(), language=self.language) blocks = list(parser_2) if (len(blocks) != 1) or (blocks[0].input != 'preserve\n'): - fail("Modified destination file " + repr(destination.filename) + ", not overwriting!") + fail(f"Modified destination file " + f"{destination.filename!r}; not overwriting!") except FileNotFoundError: pass @@ -2284,8 +2412,9 @@ def parse(self, input: str) -> str: return printer.f.getvalue() - - def _module_and_class(self, fields): + def _module_and_class( + self, fields: Iterable[str] + ) -> tuple[Module | Clinic, Class | None]: """ fields should be an iterable of field names. returns a tuple of (module, class). @@ -2293,28 +2422,34 @@ def _module_and_class(self, fields): this function is only ever used to find the parent of where a new class/module should go. """ - in_classes = False + parent: Clinic | Module | Class + child: Module | Class | None + module: Clinic | Module + cls: Class | None = None + so_far: list[str] = [] + parent = module = self - cls = None - so_far = [] for field in fields: so_far.append(field) - if not in_classes: + if not isinstance(parent, Class): child = parent.modules.get(field) if child: parent = module = child continue - in_classes = True if not hasattr(parent, 'classes'): return module, cls child = parent.classes.get(field) if not child: - fail('Parent class or module ' + '.'.join(so_far) + " does not exist.") + fullname = ".".join(so_far) + fail(f"Parent class or module {fullname!r} does not exist.") cls = parent = child return module, cls + def __repr__(self) -> str: + return "" + def parse_file( filename: str, @@ -2327,12 +2462,12 @@ def parse_file( extension = os.path.splitext(filename)[1][1:] if not extension: - fail("Can't extract file type for file " + repr(filename)) + fail(f"Can't extract file type for file {filename!r}") try: language = extensions[extension](filename) except KeyError: - fail("Can't identify file type for file " + repr(filename)) + fail(f"Can't identify file type for file {filename!r}") with open(filename, encoding="utf-8") as f: raw = f.read() @@ -2373,7 +2508,7 @@ def parse(self, block: Block) -> None: @dc.dataclass(repr=False) class Module: name: str - module: Module | None = None + module: Module | Clinic def __post_init__(self) -> None: self.parent = self.module @@ -2388,10 +2523,10 @@ def __repr__(self) -> str: @dc.dataclass(repr=False) class Class: name: str - module: Module | None = None - cls: Class | None = None - typedef: str | None = None - type_object: str | None = None + module: Module | Clinic + cls: Class | None + typedef: str + type_object: str def __post_init__(self) -> None: self.parent = self.cls or self.module @@ -2487,7 +2622,7 @@ def new_or_init(self) -> bool: return self in {FunctionKind.METHOD_INIT, FunctionKind.METHOD_NEW} def __repr__(self) -> str: - return f"" + return f"" INVALID: Final = FunctionKind.INVALID @@ -2516,15 +2651,15 @@ class Function: parameters: ParamDict = dc.field(default_factory=dict) _: dc.KW_ONLY name: str - module: Module - cls: Class | None = None - c_basename: str | None = None - full_name: str | None = None + module: Module | Clinic + cls: Class | None + c_basename: str | None + full_name: str return_converter: CReturnConverter + kind: FunctionKind + coexist: bool return_annotation: object = inspect.Signature.empty docstring: str = '' - kind: FunctionKind = CALLABLE - coexist: bool = False # docstring_only means "don't generate a machine-readable # signature, just a normal docstring". it's True for # functions with optional groups because we can't represent @@ -2532,10 +2667,25 @@ class Function: docstring_only: bool = False def __post_init__(self) -> None: - self.parent: Class | Module = self.cls or self.module + self.parent = self.cls or self.module self.self_converter: self_converter | None = None self.__render_parameters__: list[Parameter] | None = None + @functools.cached_property + def displayname(self) -> str: + """Pretty-printable name.""" + if self.kind.new_or_init: + assert isinstance(self.cls, Class) + return self.cls.name + else: + return self.name + + @functools.cached_property + def fulldisplayname(self) -> str: + if isinstance(self.module, Module): + return f"{self.module.name}.{self.displayname}" + return self.displayname + @property def render_parameters(self) -> list[Parameter]: if not self.__render_parameters__: @@ -2564,7 +2714,7 @@ def methoddef_flags(self) -> str | None: return '|'.join(flags) def __repr__(self) -> str: - return '' + return f'' def copy(self, **overrides: Any) -> Function: f = dc.replace(self, **overrides) @@ -2575,6 +2725,9 @@ def copy(self, **overrides: Any) -> Function: return f +VersionTuple = tuple[int, int] + + @dc.dataclass(repr=False, slots=True) class Parameter: """ @@ -2589,10 +2742,12 @@ class Parameter: annotation: object = inspect.Parameter.empty docstring: str = '' group: int = 0 + # (`None` signifies that there is no deprecation) + deprecated_positional: VersionTuple | None = None right_bracket_count: int = dc.field(init=False, default=0) def __repr__(self) -> str: - return '' + return f'' def is_keyword_only(self) -> bool: return self.kind == inspect.Parameter.KEYWORD_ONLY @@ -2628,23 +2783,20 @@ def get_displayname(self, i: int) -> str: else: return f'"argument {i}"' + def render_docstring(self) -> str: + add, out = text_accumulator() + add(f" {self.name}\n") + for line in self.docstring.split("\n"): + add(f" {line}\n") + return out().rstrip() -@dc.dataclass -class LandMine: - # try to access any - __message__: str - - def __getattribute__(self, name: str): - if name in ('__repr__', '__message__'): - return super().__getattribute__(name) - # raise RuntimeError(repr(name)) - fail("Stepped on a land mine, trying to access attribute " + repr(name) + ":\n" + self.__message__) +CConverterClassT = TypeVar("CConverterClassT", bound=type["CConverter"]) def add_c_converter( - f: type[CConverter], + f: CConverterClassT, name: str | None = None -) -> type[CConverter]: +) -> CConverterClassT: if not name: name = f.__name__ if not name.endswith('_converter'): @@ -2653,7 +2805,7 @@ def add_c_converter( converters[name] = f return f -def add_default_legacy_c_converter(cls): +def add_default_legacy_c_converter(cls: CConverterClassT) -> CConverterClassT: # automatically add converter for default format unit # (but without stomping on the existing one if it's already # set, in case you subclass) @@ -2664,25 +2816,31 @@ def add_default_legacy_c_converter(cls): def add_legacy_c_converter( format_unit: str, - **kwargs -) -> Callable[[ConverterType], ConverterType]: + **kwargs: Any +) -> Callable[[CConverterClassT], CConverterClassT]: """ Adds a legacy converter. """ - def closure(f): + def closure(f: CConverterClassT) -> CConverterClassT: + added_f: Callable[..., CConverter] if not kwargs: added_f = f else: - added_f = functools.partial(f, **kwargs) + # mypy's special-casing for functools.partial + # can't quite grapple with this code here + added_f = functools.partial(f, **kwargs) # type: ignore[arg-type] if format_unit: legacy_converters[format_unit] = added_f return f return closure class CConverterAutoRegister(type): - def __init__(cls, name, bases, classdict): - add_c_converter(cls) - add_default_legacy_c_converter(cls) + def __init__( + cls, name: str, bases: tuple[type, ...], classdict: dict[str, Any] + ) -> None: + converter_cls = cast(type["CConverter"], cls) + add_c_converter(converter_cls) + add_default_legacy_c_converter(converter_cls) class CConverter(metaclass=CConverterAutoRegister): """ @@ -2692,10 +2850,10 @@ class CConverter(metaclass=CConverterAutoRegister): """ # The C name to use for this variable. - name: str | None = None + name: str # The Python name to use for this variable. - py_name: str | None = None + py_name: str # The C type to use for this variable. # 'type' should be a Python string specifying the type, e.g. "int". @@ -2770,7 +2928,7 @@ class CConverter(metaclass=CConverterAutoRegister): # Only used by the 'O!' format unit (and the "object" converter). subclass_of: str | None = None - # Do we want an adjacent '_length' variable for this variable? + # See also the 'length_name' property. # Only used by format units ending with '#'. length = False @@ -2785,7 +2943,7 @@ class CConverter(metaclass=CConverterAutoRegister): # This lets the self_converter overrule the user-settable # name, *just* for the text signature. # Only set by self_converter. - signature_name = None + signature_name: str | None = None # keep in sync with self_converter.__init__! def __init__(self, @@ -2799,8 +2957,8 @@ def __init__(self, py_default: str | None = None, annotation: str | Literal[Sentinels.unspecified] = unspecified, unused: bool = False, - **kwargs - ): + **kwargs: Any + ) -> None: self.name = ensure_legal_c_identifier(name) self.py_name = py_name self.unused = unused @@ -2815,8 +2973,9 @@ def __init__(self, else: names = [cls.__name__ for cls in self.default_type] types_str = ', '.join(names) - fail("{}: default value {!r} for field {} is not of type {}".format( - self.__class__.__name__, default, name, types_str)) + cls_name = self.__class__.__name__ + fail(f"{cls_name}: default value {default!r} for field " + f"{name!r} is not of type {types_str!r}") self.default = default if c_default: @@ -2827,17 +2986,32 @@ def __init__(self, if annotation is not unspecified: fail("The 'annotation' parameter is not currently permitted.") - # this is deliberate, to prevent you from caching information - # about the function in the init. - # (that breaks if we get cloned.) - # so after this change we will noisily fail. - self.function: Function | LandMine = LandMine( - "Don't access members of self.function inside converter_init!" - ) + # Make sure not to set self.function until after converter_init() has been called. + # This prevents you from caching information + # about the function in converter_init(). + # (That breaks if we get cloned.) self.converter_init(**kwargs) self.function = function - def converter_init(self): + # Add a custom __getattr__ method to improve the error message + # if somebody tries to access self.function in converter_init(). + # + # mypy will assume arbitrary access is okay for a class with a __getattr__ method, + # and that's not what we want, + # so put it inside an `if not TYPE_CHECKING` block + if not TYPE_CHECKING: + def __getattr__(self, attr): + if attr == "function": + fail( + f"{self.__class__.__name__!r} object has no attribute 'function'.\n" + f"Note: accessing self.function inside converter_init is disallowed!" + ) + return super().__getattr__(attr) + # this branch is just here for coverage reporting + else: # pragma: no cover + pass + + def converter_init(self) -> None: pass def is_optional(self) -> bool: @@ -2851,14 +3025,18 @@ def _render_self(self, parameter: Parameter, data: CRenderData) -> None: s = ("&" if self.impl_by_reference else "") + name data.impl_arguments.append(s) if self.length: - data.impl_arguments.append(self.length_name()) + data.impl_arguments.append(self.length_name) # impl_parameters data.impl_parameters.append(self.simple_declaration(by_reference=self.impl_by_reference)) if self.length: - data.impl_parameters.append("Py_ssize_t " + self.length_name()) + data.impl_parameters.append(f"Py_ssize_t {self.length_name}") - def _render_non_self(self, parameter, data): + def _render_non_self( + self, + parameter: Parameter, + data: CRenderData + ) -> None: self.parameter = parameter name = self.name @@ -2911,42 +3089,48 @@ def render(self, parameter: Parameter, data: CRenderData) -> None: self._render_self(parameter, data) self._render_non_self(parameter, data) - def length_name(self): + @functools.cached_property + def length_name(self) -> str: """Computes the name of the associated "length" variable.""" - if not self.length: - return None + assert self.length is not None return self.parser_name + "_length" # Why is this one broken out separately? # For "positional-only" function parsing, # which generates a bunch of PyArg_ParseTuple calls. - def parse_argument(self, list): + def parse_argument(self, args: list[str]) -> None: assert not (self.converter and self.encoding) if self.format_unit == 'O&': assert self.converter - list.append(self.converter) + args.append(self.converter) if self.encoding: - list.append(c_repr(self.encoding)) + args.append(c_repr(self.encoding)) elif self.subclass_of: - list.append(self.subclass_of) + args.append(self.subclass_of) s = ("&" if self.parse_by_reference else "") + self.name - list.append(s) + args.append(s) if self.length: - list.append("&" + self.length_name()) + args.append(f"&{self.length_name}") # # All the functions after here are intended as extension points. # - def simple_declaration(self, by_reference=False, *, in_parser=False): + def simple_declaration( + self, + by_reference: bool = False, + *, + in_parser: bool = False + ) -> str: """ Computes the basic declaration of the variable. Used in computing the prototype declaration and the variable declaration. """ + assert isinstance(self.type, str) prototype = [self.type] if by_reference or not self.type.endswith('*'): prototype.append(" ") @@ -2961,7 +3145,7 @@ def simple_declaration(self, by_reference=False, *, in_parser=False): prototype.append(name) return "".join(prototype) - def declaration(self, *, in_parser=False) -> str: + def declaration(self, *, in_parser: bool = False) -> str: """ The C statement to declare this variable. """ @@ -2974,9 +3158,8 @@ def declaration(self, *, in_parser=False) -> str: declaration.append(default) declaration.append(";") if self.length: - declaration.append('\nPy_ssize_t ') - declaration.append(self.length_name()) - declaration.append(';') + declaration.append('\n') + declaration.append(f"Py_ssize_t {self.length_name};") return "".join(declaration) def initialize(self) -> str: @@ -3011,7 +3194,7 @@ def cleanup(self) -> str: """ return "" - def pre_render(self): + def pre_render(self) -> None: """ A second initialization function, like converter_init, called just before rendering. @@ -3019,7 +3202,7 @@ def pre_render(self): """ pass - def parse_arg(self, argname: str, displayname: str): + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'O&': return """ if (!{converter}({argname}, &{paramname})) {{{{ @@ -3060,7 +3243,7 @@ def set_template_dict(self, template_dict: TemplateDict) -> None: pass @property - def parser_name(self): + def parser_name(self) -> str: if self.name in CLINIC_PREFIXED_ARGS: # bpo-39741 return CLINIC_PREFIX + self.name else: @@ -3117,12 +3300,12 @@ def converter_init(self, *, accept: TypeSet = {object}) -> None: if accept == {int}: self.format_unit = 'i' elif accept != {object}: - fail("bool_converter: illegal 'accept' argument " + repr(accept)) + fail(f"bool_converter: illegal 'accept' argument {accept!r}") if self.default is not unspecified: self.default = bool(self.default) self.c_default = str(int(self.default)) - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'i': return """ {paramname} = _PyLong_AsInt({argname}); @@ -3148,13 +3331,13 @@ class defining_class_converter(CConverter): format_unit = '' show_in_signature = False - def converter_init(self, *, type=None) -> None: + def converter_init(self, *, type: str | None = None) -> None: self.specified_type = type - def render(self, parameter, data) -> None: + def render(self, parameter: Parameter, data: CRenderData) -> None: self._render_self(parameter, data) - def set_template_dict(self, template_dict): + def set_template_dict(self, template_dict: TemplateDict) -> None: template_dict['defining_class_name'] = self.name @@ -3167,13 +3350,13 @@ class char_converter(CConverter): def converter_init(self) -> None: if isinstance(self.default, self.default_type): if len(self.default) != 1: - fail("char_converter: illegal default value " + repr(self.default)) + fail(f"char_converter: illegal default value {self.default!r}") self.c_default = repr(bytes(self.default))[1:] if self.c_default == '"\'"': self.c_default = r"'\''" - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'c': return """ if (PyBytes_Check({argname}) && PyBytes_GET_SIZE({argname}) == 1) {{{{ @@ -3202,7 +3385,7 @@ def converter_init(self, *, bitwise: bool = False) -> None: if bitwise: self.format_unit = 'B' - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'b': return """ {{{{ @@ -3247,7 +3430,7 @@ class short_converter(CConverter): format_unit = 'h' c_ignored_default = "0" - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'h': return """ {{{{ @@ -3283,7 +3466,7 @@ def converter_init(self, *, bitwise: bool = False) -> None: else: self.converter = '_PyLong_UnsignedShort_Converter' - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'H': return """ {paramname} = (unsigned short)PyLong_AsUnsignedLongMask({argname}); @@ -3300,15 +3483,17 @@ class int_converter(CConverter): format_unit = 'i' c_ignored_default = "0" - def converter_init(self, *, accept: TypeSet = {int}, type=None) -> None: + def converter_init( + self, *, accept: TypeSet = {int}, type: str | None = None + ) -> None: if accept == {str}: self.format_unit = 'C' elif accept != {int}: - fail("int_converter: illegal 'accept' argument " + repr(accept)) + fail(f"int_converter: illegal 'accept' argument {accept!r}") if type is not None: self.type = type - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'i': return """ {paramname} = _PyLong_AsInt({argname}); @@ -3342,7 +3527,7 @@ def converter_init(self, *, bitwise: bool = False) -> None: else: self.converter = '_PyLong_UnsignedInt_Converter' - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'I': return """ {paramname} = (unsigned int)PyLong_AsUnsignedLongMask({argname}); @@ -3358,7 +3543,7 @@ class long_converter(CConverter): format_unit = 'l' c_ignored_default = "0" - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'l': return """ {paramname} = PyLong_AsLong({argname}); @@ -3379,7 +3564,7 @@ def converter_init(self, *, bitwise: bool = False) -> None: else: self.converter = '_PyLong_UnsignedLong_Converter' - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'k': return """ if (!PyLong_Check({argname})) {{{{ @@ -3397,7 +3582,7 @@ class long_long_converter(CConverter): format_unit = 'L' c_ignored_default = "0" - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'L': return """ {paramname} = PyLong_AsLongLong({argname}); @@ -3418,7 +3603,7 @@ def converter_init(self, *, bitwise: bool = False) -> None: else: self.converter = '_PyLong_UnsignedLongLong_Converter' - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'K': return """ if (!PyLong_Check({argname})) {{{{ @@ -3441,9 +3626,9 @@ def converter_init(self, *, accept: TypeSet = {int}) -> None: elif accept == {int, NoneType}: self.converter = '_Py_convert_optional_to_ssize_t' else: - fail("Py_ssize_t_converter: illegal 'accept' argument " + repr(accept)) + fail(f"Py_ssize_t_converter: illegal 'accept' argument {accept!r}") - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'n': return """ {{{{ @@ -3471,14 +3656,14 @@ def converter_init(self, *, accept: TypeSet = {int, NoneType}) -> None: elif accept == {int, NoneType}: self.converter = '_PyEval_SliceIndex' else: - fail("slice_index_converter: illegal 'accept' argument " + repr(accept)) + fail(f"slice_index_converter: illegal 'accept' argument {accept!r}") class size_t_converter(CConverter): type = 'size_t' converter = '_PyLong_Size_t_Converter' c_ignored_default = "0" - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'n': return """ {paramname} = PyNumber_AsSsize_t({argname}, PyExc_OverflowError); @@ -3493,7 +3678,7 @@ class fildes_converter(CConverter): type = 'int' converter = '_PyLong_FileDescriptor_Converter' - def _parse_arg(self, argname: str, displayname: str) -> str: + def _parse_arg(self, argname: str, displayname: str) -> str | None: return """ {paramname} = PyObject_AsFileDescriptor({argname}); if ({paramname} == -1) {{{{ @@ -3508,7 +3693,7 @@ class float_converter(CConverter): format_unit = 'f' c_ignored_default = "0.0" - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'f': return """ if (PyFloat_CheckExact({argname})) {{{{ @@ -3530,7 +3715,7 @@ class double_converter(CConverter): format_unit = 'd' c_ignored_default = "0.0" - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'd': return """ if (PyFloat_CheckExact({argname})) {{{{ @@ -3553,7 +3738,7 @@ class Py_complex_converter(CConverter): format_unit = 'D' c_ignored_default = "{0.0, 0.0}" - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'D': return """ {paramname} = PyComplex_AsCComplex({argname}); @@ -3570,9 +3755,9 @@ class object_converter(CConverter): def converter_init( self, *, - converter=None, - type=None, - subclass_of=None + converter: str | None = None, + type: str | None = None, + subclass_of: str | None = None ) -> None: if converter: if subclass_of: @@ -3599,10 +3784,14 @@ class buffer: pass class rwbuffer: pass class robuffer: pass -def str_converter_key(types, encoding, zeroes): +StrConverterKeyType = tuple[frozenset[type], bool, bool] + +def str_converter_key( + types: TypeSet, encoding: bool | str | None, zeroes: bool +) -> StrConverterKeyType: return (frozenset(types), bool(encoding), bool(zeroes)) -str_converter_argument_map: dict[str, str] = {} +str_converter_argument_map: dict[StrConverterKeyType, str] = {} class str_converter(CConverter): type = 'const char *' @@ -3635,41 +3824,43 @@ def converter_init( if NoneType in accept and self.c_default == "Py_None": self.c_default = "NULL" - def post_parsing(self): + def post_parsing(self) -> str: if self.encoding: name = self.name return f"PyMem_FREE({name});\n" + else: + return "" - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 's': return """ if (!PyUnicode_Check({argname})) {{{{ _PyArg_BadArgument("{{name}}", {displayname}, "str", {argname}); goto exit; }}}} - Py_ssize_t {paramname}_length; - {paramname} = PyUnicode_AsUTF8AndSize({argname}, &{paramname}_length); + Py_ssize_t {length_name}; + {paramname} = PyUnicode_AsUTF8AndSize({argname}, &{length_name}); if ({paramname} == NULL) {{{{ goto exit; }}}} - if (strlen({paramname}) != (size_t){paramname}_length) {{{{ + if (strlen({paramname}) != (size_t){length_name}) {{{{ PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; }}}} """.format(argname=argname, paramname=self.parser_name, - displayname=displayname) + displayname=displayname, length_name=self.length_name) if self.format_unit == 'z': return """ if ({argname} == Py_None) {{{{ {paramname} = NULL; }}}} else if (PyUnicode_Check({argname})) {{{{ - Py_ssize_t {paramname}_length; - {paramname} = PyUnicode_AsUTF8AndSize({argname}, &{paramname}_length); + Py_ssize_t {length_name}; + {paramname} = PyUnicode_AsUTF8AndSize({argname}, &{length_name}); if ({paramname} == NULL) {{{{ goto exit; }}}} - if (strlen({paramname}) != (size_t){paramname}_length) {{{{ + if (strlen({paramname}) != (size_t){length_name}) {{{{ PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; }}}} @@ -3679,7 +3870,7 @@ def parse_arg(self, argname: str, displayname: str) -> str: goto exit; }}}} """.format(argname=argname, paramname=self.parser_name, - displayname=displayname) + displayname=displayname, length_name=self.length_name) return super().parse_arg(argname, displayname) # @@ -3742,7 +3933,7 @@ class PyBytesObject_converter(CConverter): format_unit = 'S' # accept = {bytes} - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'S': return """ if (!PyBytes_Check({argname})) {{{{ @@ -3759,7 +3950,7 @@ class PyByteArrayObject_converter(CConverter): format_unit = 'Y' # accept = {bytearray} - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'Y': return """ if (!PyByteArray_Check({argname})) {{{{ @@ -3776,7 +3967,7 @@ class unicode_converter(CConverter): default_type = (str, Null, NoneType) format_unit = 'U' - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'U': return """ if (!PyUnicode_Check({argname})) {{{{ @@ -3813,16 +4004,18 @@ def converter_init( elif accept == {str, NoneType}: self.converter = '_PyUnicode_WideCharString_Opt_Converter' else: - fail("Py_UNICODE_converter: illegal 'accept' argument " + repr(accept)) + fail(f"Py_UNICODE_converter: illegal 'accept' argument {accept!r}") self.c_default = "NULL" - def cleanup(self): - if not self.length: + def cleanup(self) -> str: + if self.length: + return "" + else: return """\ PyMem_Free((void *){name}); """.format(name=self.name) - def parse_arg(self, argname: str, argnum: str) -> str: + def parse_arg(self, argname: str, argnum: str) -> str | None: if not self.length: if self.accept == {str}: return """ @@ -3881,11 +4074,11 @@ def converter_init(self, *, accept: TypeSet = {buffer}) -> None: self.format_unit = format_unit - def cleanup(self): + def cleanup(self) -> str: name = self.name return "".join(["if (", name, ".obj) {\n PyBuffer_Release(&", name, ");\n}\n"]) - def parse_arg(self, argname: str, displayname: str) -> str: + def parse_arg(self, argname: str, displayname: str) -> str | None: if self.format_unit == 'y*': return """ if (PyObject_GetBuffer({argname}, &{paramname}, PyBUF_SIMPLE) != 0) {{{{ @@ -3945,7 +4138,7 @@ def correct_name_for_self( return "void *", "null" if f.kind in (CLASS_METHOD, METHOD_NEW): return "PyTypeObject *", "type" - raise RuntimeError("Unhandled type of function f: " + repr(f.kind)) + raise AssertionError(f"Unhandled type of function f: {f.kind!r}") def required_type_for_self_for_parser( f: Function @@ -3961,13 +4154,13 @@ class self_converter(CConverter): A special-case converter: this is the default converter used for "self". """ - type = None + type: str | None = None format_unit = '' - def converter_init(self, *, type=None) -> None: + def converter_init(self, *, type: str | None = None) -> None: self.specified_type = type - def pre_render(self): + def pre_render(self) -> None: f = self.function default_type, default_name = correct_name_for_self(f) self.signature_name = default_name @@ -4017,10 +4210,11 @@ def pre_render(self): # in the impl call. @property - def parser_type(self): + def parser_type(self) -> str: + assert self.type is not None return required_type_for_self_for_parser(self.function) or self.type - def render(self, parameter, data): + def render(self, parameter: Parameter, data: CRenderData) -> None: """ parameter is a clinic.Parameter instance. data is a CRenderData instance. @@ -4036,9 +4230,10 @@ def render(self, parameter, data): # because we render parameters in order, and self is always first. assert len(data.impl_arguments) == 1 assert data.impl_arguments[0] == self.name + assert self.type is not None data.impl_arguments[0] = '(' + self.type + ")" + data.impl_arguments[0] - def set_template_dict(self, template_dict): + def set_template_dict(self, template_dict: TemplateDict) -> None: template_dict['self_name'] = self.name template_dict['self_type'] = self.parser_type kind = self.function.kind @@ -4057,7 +4252,7 @@ def set_template_dict(self, template_dict): line = f'{type_check} &&\n ' template_dict['self_type_check'] = line - type_object = self.function.cls.type_object + type_object = cls.type_object type_ptr = f'PyTypeObject *base_tp = {type_object};' template_dict['base_type_ptr'] = type_ptr @@ -4100,7 +4295,7 @@ def __init__( self, *, py_default: str | None = None, - **kwargs + **kwargs: Any ) -> None: self.py_default = py_default try: @@ -4248,10 +4443,11 @@ def eval_ast_expr( globals: dict[str, Any], *, filename: str = '-' -) -> FunctionType: +) -> Any: """ - Takes an ast.Expr node. Compiles and evaluates it. - Returns the result of the expression. + Takes an ast.Expr node. Compiles it into a function object, + then calls the function object with 0 arguments. + Returns the result of that function call. globals represents the globals dict the expression should see. (There's no equivalent for "locals" here.) @@ -4267,15 +4463,15 @@ def eval_ast_expr( class IndentStack: - def __init__(self): - self.indents = [] - self.margin = None + def __init__(self) -> None: + self.indents: list[int] = [] + self.margin: str | None = None - def _ensure(self): + def _ensure(self) -> None: if not self.indents: fail('IndentStack expected indents, but none are defined.') - def measure(self, line): + def measure(self, line: str) -> int: """ Returns the length of the line's margin. """ @@ -4289,7 +4485,7 @@ def measure(self, line): return self.indents[-1] return len(line) - len(stripped) - def infer(self, line): + def infer(self, line: str) -> int: """ Infer what is now the current margin based on this line. Returns: @@ -4322,31 +4518,25 @@ def infer(self, line): return outdent_count @property - def depth(self): + def depth(self) -> int: """ Returns how many margins are currently defined. """ return len(self.indents) - def indent(self, line): - """ - Indents a line by the currently defined margin. - """ - return self.margin + line - - def dedent(self, line): + def dedent(self, line: str) -> str: """ Dedents a line by the currently defined margin. - (The inverse of 'indent'.) """ + assert self.margin is not None, "Cannot call .dedent() before calling .infer()" margin = self.margin indent = self.indents[-1] if not line.startswith(margin): - fail('Cannot dedent, line does not start with the previous margin:') + fail('Cannot dedent; line does not start with the previous margin.') return line[indent:] -StateKeeper = Callable[[str | None], None] +StateKeeper = Callable[[str], None] ConverterArgs = dict[str, Any] class ParamState(enum.IntEnum): @@ -4385,14 +4575,19 @@ class DSLParser: state: StateKeeper keyword_only: bool positional_only: bool + deprecated_positional: VersionTuple | None group: int - parameter_state: int - seen_positional_with_default: bool + parameter_state: ParamState indent: IndentStack kind: FunctionKind coexist: bool parameter_continuation: str preserve_output: bool + star_from_version_re = create_regex( + before="* [from ", + after="]", + word=False, + ) def __init__(self, clinic: Clinic) -> None: self.clinic = clinic @@ -4416,19 +4611,22 @@ def reset(self) -> None: self.state = self.state_dsl_start self.keyword_only = False self.positional_only = False + self.deprecated_positional = None self.group = 0 self.parameter_state: ParamState = ParamState.START - self.seen_positional_with_default = False self.indent = IndentStack() self.kind = CALLABLE self.coexist = False + self.forced_text_signature: str | None = None self.parameter_continuation = '' self.preserve_output = False def directive_version(self, required: str) -> None: global version if version_comparitor(version, required) < 0: - fail("Insufficient Clinic version!\n Version: " + version + "\n Required: " + required) + fail("Insufficient Clinic version!\n" + f" Version: {version}\n" + f" Required: {required}") def directive_module(self, name: str) -> None: fields = name.split('.')[:-1] @@ -4436,8 +4634,8 @@ def directive_module(self, name: str) -> None: if cls: fail("Can't nest a module inside a class!") - if name in module.classes: - fail("Already defined module " + repr(name) + "!") + if name in module.modules: + fail(f"Already defined module {name!r}!") m = Module(name, module) module.modules[name] = m @@ -4455,7 +4653,7 @@ def directive_class( parent = cls or module if name in parent.classes: - fail("Already defined class " + repr(name) + "!") + fail(f"Already defined class {name!r}!") c = Class(name, module, cls, typedef, type_object) parent.classes[name] = c @@ -4463,7 +4661,7 @@ def directive_class( def directive_set(self, name: str, value: str) -> None: if name not in ("line_prefix", "line_suffix"): - fail("unknown variable", repr(name)) + fail(f"unknown variable {name!r}") value = value.format_map({ 'block comment start': '/*', @@ -4476,15 +4674,15 @@ def directive_destination( self, name: str, command: str, - *args + *args: str ) -> None: - if command == 'new': - self.clinic.add_destination(name, *args) - return - - if command == 'clear': - self.clinic.get_destination(name).clear() - fail("unknown destination command", repr(command)) + match command: + case "new": + self.clinic.add_destination(name, *args) + case "clear": + self.clinic.get_destination(name).clear() + case _: + fail(f"unknown destination command {command!r}") def directive_output( @@ -4497,7 +4695,7 @@ def directive_output( if command_or_name == "preset": preset = self.clinic.presets.get(destination) if not preset: - fail("Unknown preset " + repr(destination) + "!") + fail(f"Unknown preset {destination!r}!") fd.update(preset) return @@ -4526,7 +4724,11 @@ def directive_output( return if command_or_name not in fd: - fail("Invalid command / destination name " + repr(command_or_name) + ", must be one of:\n preset push pop print everything " + " ".join(fd)) + allowed = ["preset", "push", "pop", "print", "everything"] + allowed.extend(fd) + fail(f"Invalid command or destination name {command_or_name!r}. " + "Must be one of:\n -", + "\n - ".join([repr(word) for word in allowed])) fd[command_or_name] = d def directive_dump(self, name: str) -> None: @@ -4538,7 +4740,7 @@ def directive_printout(self, *args: str) -> None: def directive_preserve(self) -> None: if self.preserve_output: - fail("Can't have preserve twice in one block!") + fail("Can't have 'preserve' twice in one block!") self.preserve_output = True def at_classmethod(self) -> None: @@ -4556,6 +4758,11 @@ def at_coexist(self) -> None: fail("Called @coexist twice!") self.coexist = True + def at_text_signature(self, text_signature: str) -> None: + if self.forced_text_signature: + fail("Called @text_signature twice!") + self.forced_text_signature = text_signature + def parse(self, block: Block) -> None: self.reset() self.block = block @@ -4565,51 +4772,51 @@ def parse(self, block: Block) -> None: lines = block.input.split('\n') for line_number, line in enumerate(lines, self.clinic.block_parser.block_start_line_number): if '\t' in line: - fail('Tab characters are illegal in the Clinic DSL.\n\t' + repr(line), line_number=block_start) - self.state(line) - - self.next(self.state_terminal) - self.state(None) + fail(f'Tab characters are illegal in the Clinic DSL: {line!r}', + line_number=block_start) + try: + self.state(line) + except ClinicError as exc: + exc.lineno = line_number + raise - block.output.extend(self.clinic.language.render(clinic, block.signatures)) + self.do_post_block_processing_cleanup(line_number) + block.output.extend(self.clinic.language.render(self.clinic, block.signatures)) if self.preserve_output: if block.output: fail("'preserve' only works for blocks that don't produce any output!") block.output = self.saved_output - @staticmethod - def valid_line(line: str | None) -> TypeGuard[str]: - if line is None: - return False + def in_docstring(self) -> bool: + """Return true if we are processing a docstring.""" + return self.state in { + self.state_parameter_docstring, + self.state_function_docstring, + } + def valid_line(self, line: str) -> bool: # ignore comment-only lines if line.lstrip().startswith('#'): return False # Ignore empty lines too # (but not in docstring sections!) - if not line.strip(): + if not self.in_docstring() and not line.strip(): return False return True - @staticmethod - def calculate_indent(line: str) -> int: - return len(line) - len(line.strip()) - def next( self, state: StateKeeper, line: str | None = None ) -> None: - # real_print(self.state.__name__, "->", state.__name__, ", line=", line) self.state = state if line is not None: self.state(line) - def state_dsl_start(self, line: str | None) -> None: - # self.block = self.ClinicOutputBlock(self) + def state_dsl_start(self, line: str) -> None: if not self.valid_line(line): return @@ -4626,7 +4833,7 @@ def state_dsl_start(self, line: str | None) -> None: self.next(self.state_modulename_name, line) - def state_modulename_name(self, line: str | None) -> None: + def state_modulename_name(self, line: str) -> None: # looking for declaration, which establishes the leftmost column # line should be # modulename.fnname [as c_basename] [-> return annotation] @@ -4643,9 +4850,7 @@ def state_modulename_name(self, line: str | None) -> None: # this line is permitted to start with whitespace. # we'll call this number of spaces F (for "function"). - if not self.valid_line(line): - return - + assert self.valid_line(line) self.indent.infer(line) # are we cloning? @@ -4668,18 +4873,17 @@ def state_modulename_name(self, line: str | None) -> None: if existing_function.name == function_name: break else: - existing_function = None - if not existing_function: - print("class", cls, "module", module, "existing", existing) - print("cls. functions", cls.functions) - fail("Couldn't find existing function " + repr(existing) + "!") + print(f"{cls=}, {module=}, {existing=}", file=sys.stderr) + print(f"{(cls or module).functions=}", file=sys.stderr) + fail(f"Couldn't find existing function {existing!r}!") fields = [x.strip() for x in full_name.split('.')] function_name = fields.pop() module, cls = self.clinic._module_and_class(fields) if not (existing_function.kind is self.kind and existing_function.coexist == self.coexist): - fail("'kind' of function and cloned function don't match! (@classmethod/@staticmethod/@coexist)") + fail("'kind' of function and cloned function don't match! " + "(@classmethod/@staticmethod/@coexist)") function = existing_function.copy( name=function_name, full_name=full_name, module=module, cls=cls, c_basename=c_basename, docstring='' @@ -4691,36 +4895,35 @@ def state_modulename_name(self, line: str | None) -> None: return line, _, returns = line.partition('->') + returns = returns.strip() full_name, _, c_basename = line.partition(' as ') full_name = full_name.strip() c_basename = c_basename.strip() or None if not is_legal_py_identifier(full_name): - fail("Illegal function name:", full_name) + fail(f"Illegal function name: {full_name!r}") if c_basename and not is_legal_c_identifier(c_basename): - fail("Illegal C basename:", c_basename) + fail(f"Illegal C basename: {c_basename!r}") return_converter = None if returns: ast_input = f"def x() -> {returns}: pass" - module = None try: - module = ast.parse(ast_input) + module_node = ast.parse(ast_input) except SyntaxError: - pass - if not module: - fail("Badly-formed annotation for " + full_name + ": " + returns) + fail(f"Badly formed annotation for {full_name!r}: {returns!r}") + function_node = module_node.body[0] + assert isinstance(function_node, ast.FunctionDef) try: - name, legacy, kwargs = self.parse_converter(module.body[0].returns) + name, legacy, kwargs = self.parse_converter(function_node.returns) if legacy: - fail("Legacy converter {!r} not allowed as a return converter" - .format(name)) + fail(f"Legacy converter {name!r} not allowed as a return converter") if name not in return_converters: - fail("No available return converter called " + repr(name)) + fail(f"No available return converter called {name!r}") return_converter = return_converters[name](**kwargs) except ValueError: - fail("Badly-formed annotation for " + full_name + ": " + returns) + fail(f"Badly formed annotation for {full_name!r}: {returns!r}") fields = [x.strip() for x in full_name.split('.')] function_name = fields.pop() @@ -4744,8 +4947,6 @@ def state_modulename_name(self, line: str | None) -> None: if not return_converter: return_converter = CReturnConverter() - if not module: - fail("Undefined module used in declaration of " + repr(full_name.strip()) + ".") self.function = Function(name=function_name, full_name=full_name, module=module, cls=cls, c_basename=c_basename, return_converter=return_converter, kind=self.kind, coexist=self.coexist) self.block.signatures.append(self.function) @@ -4818,7 +5019,7 @@ def state_modulename_name(self, line: str | None) -> None: # separate boolean state variables.) The states are defined in the # ParamState class. - def state_parameters_start(self, line: str | None) -> None: + def state_parameters_start(self, line: str) -> None: if not self.valid_line(line): return @@ -4830,16 +5031,17 @@ def state_parameters_start(self, line: str | None) -> None: return self.next(self.state_parameter, line) - def to_required(self): + def to_required(self) -> None: """ Transition to the "required" parameter state. """ if self.parameter_state is not ParamState.REQUIRED: self.parameter_state = ParamState.REQUIRED + assert self.function is not None for p in self.function.parameters.values(): p.group = -p.group - def state_parameter(self, line: str | None) -> None: + def state_parameter(self, line: str) -> None: assert isinstance(self.function, Function) if not self.valid_line(line): @@ -4864,8 +5066,14 @@ def state_parameter(self, line: str | None) -> None: self.parameter_continuation = line[:-1] return + line = line.lstrip() + match = self.star_from_version_re.match(line) + if match: + self.parse_deprecated_positional(match.group(1)) + return + func = self.function - match line.lstrip(): + match line: case '*': self.parse_star(func) case '[': @@ -4930,18 +5138,22 @@ def parse_parameter(self, line: str) -> None: except SyntaxError: pass if not module: - fail("Function " + self.function.name + " has an invalid parameter declaration:\n\t" + line) + fail(f"Function {self.function.name!r} has an invalid parameter declaration:\n\t", + repr(line)) function = module.body[0] assert isinstance(function, ast.FunctionDef) function_args = function.args if len(function_args.args) > 1: - fail("Function " + self.function.name + " has an invalid parameter declaration (comma?):\n\t" + line) + fail(f"Function {self.function.name!r} has an " + f"invalid parameter declaration (comma?): {line!r}") if function_args.defaults or function_args.kw_defaults: - fail("Function " + self.function.name + " has an invalid parameter declaration (default value?):\n\t" + line) + fail(f"Function {self.function.name!r} has an " + f"invalid parameter declaration (default value?): {line!r}") if function_args.kwarg: - fail("Function " + self.function.name + " has an invalid parameter declaration (**kwargs?):\n\t" + line) + fail(f"Function {self.function.name!r} has an " + f"invalid parameter declaration (**kwargs?): {line!r}") if function_args.vararg: is_vararg = True @@ -4955,7 +5167,7 @@ def parse_parameter(self, line: str) -> None: if not default: if self.parameter_state is ParamState.OPTIONAL: - fail(f"Can't have a parameter without a default ({parameter_name!r})\n" + fail(f"Can't have a parameter without a default ({parameter_name!r}) " "after a parameter with a default!") value: Sentinels | Null if is_vararg: @@ -4983,7 +5195,7 @@ def parse_parameter(self, line: str) -> None: # of disallowed ast nodes. class DetectBadNodes(ast.NodeVisitor): bad = False - def bad_node(self, node): + def bad_node(self, node: ast.AST) -> None: self.bad = True # inline function call @@ -5010,15 +5222,16 @@ def bad_node(self, node): # but at least make an attempt at ensuring it's a valid expression. try: value = eval(default) - if value is unspecified: - fail("'unspecified' is not a legal default value!") except NameError: pass # probably a named constant except Exception as e: - fail("Malformed expression given as default value\n" - "{!r} caused {!r}".format(default, e)) + fail("Malformed expression given as default value " + f"{default!r} caused {e!r}") + else: + if value is unspecified: + fail("'unspecified' is not a legal default value!") if bad: - fail("Unsupported expression as default value: " + repr(default)) + fail(f"Unsupported expression as default value: {default!r}") assignment = module.body[0] assert isinstance(assignment, ast.Assign) @@ -5036,7 +5249,10 @@ def bad_node(self, node): )): c_default = kwargs.get("c_default") if not (isinstance(c_default, str) and c_default): - fail("When you specify an expression (" + repr(default) + ") as your default value,\nyou MUST specify a valid c_default." + ast.dump(expr)) + fail(f"When you specify an expression ({default!r}) " + f"as your default value, " + f"you MUST specify a valid c_default.", + ast.dump(expr)) py_default = default value = unknown elif isinstance(expr, ast.Attribute): @@ -5046,13 +5262,16 @@ def bad_node(self, node): a.append(n.attr) n = n.value if not isinstance(n, ast.Name): - fail("Unsupported default value " + repr(default) + " (looked like a Python constant)") + fail(f"Unsupported default value {default!r} " + "(looked like a Python constant)") a.append(n.id) py_default = ".".join(reversed(a)) c_default = kwargs.get("c_default") if not (isinstance(c_default, str) and c_default): - fail("When you specify a named constant (" + repr(py_default) + ") as your default value,\nyou MUST specify a valid c_default.") + fail(f"When you specify a named constant ({py_default!r}) " + "as your default value, " + "you MUST specify a valid c_default.") try: value = eval(py_default) @@ -5069,13 +5288,15 @@ def bad_node(self, node): c_default = py_default except SyntaxError as e: - fail("Syntax error: " + repr(e.text)) + fail(f"Syntax error: {e.text!r}") except (ValueError, AttributeError): value = unknown c_default = kwargs.get("c_default") py_default = default if not (isinstance(c_default, str) and c_default): - fail("When you specify a named constant (" + repr(py_default) + ") as your default value,\nyou MUST specify a valid c_default.") + fail("When you specify a named constant " + f"({py_default!r}) as your default value, " + "you MUST specify a valid c_default.") kwargs.setdefault('c_default', c_default) kwargs.setdefault('py_default', py_default) @@ -5083,7 +5304,7 @@ def bad_node(self, node): dict = legacy_converters if legacy else converters legacy_str = "legacy " if legacy else "" if name not in dict: - fail(f'{name} is not a valid {legacy_str}converter') + fail(f'{name!r} is not a valid {legacy_str}converter') # if you use a c_name for the parameter, we just give that name to the converter # but the parameter object gets the python name converter = dict[name](c_name or parameter_name, parameter_name, self.function, value, **kwargs) @@ -5108,7 +5329,8 @@ def bad_node(self, node): self.parameter_state = ParamState.START self.function.parameters.clear() else: - fail("A 'self' parameter, if specified, must be the very first thing in the parameter block.") + fail("A 'self' parameter, if specified, must be the " + "very first thing in the parameter block.") if isinstance(converter, defining_class_converter): _lp = len(self.function.parameters) @@ -5120,16 +5342,20 @@ def bad_node(self, node): if self.group: fail("A 'defining_class' parameter cannot be in an optional group.") else: - fail("A 'defining_class' parameter, if specified, must either be the first thing in the parameter block, or come just after 'self'.") + fail("A 'defining_class' parameter, if specified, must either " + "be the first thing in the parameter block, or come just " + "after 'self'.") - p = Parameter(parameter_name, kind, function=self.function, converter=converter, default=value, group=self.group) + p = Parameter(parameter_name, kind, function=self.function, + converter=converter, default=value, group=self.group, + deprecated_positional=self.deprecated_positional) names = [k.name for k in self.function.parameters.values()] if parameter_name in names[1:]: - fail("You can't have two parameters named " + repr(parameter_name) + "!") + fail(f"You can't have two parameters named {parameter_name!r}!") elif names and parameter_name == names[0] and c_name is None: - fail(f"Parameter '{parameter_name}' requires a custom C name") + fail(f"Parameter {parameter_name!r} requires a custom C name") key = f"{parameter_name}_as_{c_name}" if c_name else parameter_name self.function.parameters[key] = p @@ -5156,10 +5382,28 @@ def parse_converter( "Annotations must be either a name, a function call, or a string." ) + def parse_deprecated_positional(self, thenceforth: str) -> None: + assert isinstance(self.function, Function) + fname = self.function.full_name + + if self.keyword_only: + fail(f"Function {fname!r}: '* [from ...]' must come before '*'") + if self.deprecated_positional: + fail(f"Function {fname!r} uses '[from ...]' more than once.") + try: + major, minor = thenceforth.split(".") + self.deprecated_positional = int(major), int(minor) + except ValueError: + fail( + f"Function {fname!r}: expected format '* [from major.minor]' " + f"where 'major' and 'minor' are integers; got {thenceforth!r}" + ) + def parse_star(self, function: Function) -> None: """Parse keyword-only parameter marker '*'.""" if self.keyword_only: - fail(f"Function {function.name} uses '*' more than once.") + fail(f"Function {function.name!r} uses '*' more than once.") + self.deprecated_positional = None self.keyword_only = True def parse_opening_square_bracket(self, function: Function) -> None: @@ -5170,7 +5414,8 @@ def parse_opening_square_bracket(self, function: Function) -> None: case ParamState.REQUIRED | ParamState.GROUP_AFTER: self.parameter_state = ParamState.GROUP_AFTER case st: - fail(f"Function {function.name} has an unsupported group configuration. " + fail(f"Function {function.name!r} " + f"has an unsupported group configuration. " f"(Unexpected state {st}.b)") self.group += 1 function.docstring_only = True @@ -5178,9 +5423,9 @@ def parse_opening_square_bracket(self, function: Function) -> None: def parse_closing_square_bracket(self, function: Function) -> None: """Parse closing parameter group symbol ']'.""" if not self.group: - fail(f"Function {function.name} has a ] without a matching [.") + fail(f"Function {function.name!r} has a ']' without a matching '['.") if not any(p.group == self.group for p in function.parameters.values()): - fail(f"Function {function.name} has an empty group.\n" + fail(f"Function {function.name!r} has an empty group. " "All groups must contain at least one parameter.") self.group -= 1 match self.parameter_state: @@ -5189,13 +5434,14 @@ def parse_closing_square_bracket(self, function: Function) -> None: case ParamState.GROUP_AFTER | ParamState.RIGHT_SQUARE_AFTER: self.parameter_state = ParamState.RIGHT_SQUARE_AFTER case st: - fail(f"Function {function.name} has an unsupported group configuration. " + fail(f"Function {function.name!r} " + f"has an unsupported group configuration. " f"(Unexpected state {st}.c)") def parse_slash(self, function: Function) -> None: """Parse positional-only parameter marker '/'.""" if self.positional_only: - fail(f"Function {function.name} uses '/' more than once.") + fail(f"Function {function.name!r} uses '/' more than once.") self.positional_only = True # REQUIRED and OPTIONAL are allowed here, that allows positional-only # without option groups to work (and have default values!) @@ -5206,10 +5452,10 @@ def parse_slash(self, function: Function) -> None: ParamState.GROUP_BEFORE, } if (self.parameter_state not in allowed) or self.group: - fail(f"Function {function.name} has an unsupported group configuration. " + fail(f"Function {function.name!r} has an unsupported group configuration. " f"(Unexpected state {self.parameter_state}.d)") if self.keyword_only: - fail(f"Function {function.name} mixes keyword-only and " + fail(f"Function {function.name!r} mixes keyword-only and " "positional-only parameters, which is unsupported.") # fixup preceding parameters for p in function.parameters.values(): @@ -5218,21 +5464,40 @@ def parse_slash(self, function: Function) -> None: if (p.kind is not inspect.Parameter.POSITIONAL_OR_KEYWORD and not isinstance(p.converter, self_converter) ): - fail(f"Function {function.name} mixes keyword-only and " + fail(f"Function {function.name!r} mixes keyword-only and " "positional-only parameters, which is unsupported.") p.kind = inspect.Parameter.POSITIONAL_ONLY - def state_parameter_docstring_start(self, line: str | None) -> None: + def state_parameter_docstring_start(self, line: str) -> None: + assert self.indent.margin is not None, "self.margin.infer() has not yet been called to set the margin" self.parameter_docstring_indent = len(self.indent.margin) assert self.indent.depth == 3 return self.next(self.state_parameter_docstring, line) + def docstring_append(self, obj: Function | Parameter, line: str) -> None: + """Add a rstripped line to the current docstring.""" + # gh-80282: We filter out non-ASCII characters from the docstring, + # since historically, some compilers may balk on non-ASCII input. + # If you're using Argument Clinic in an external project, + # you may not need to support the same array of platforms as CPython, + # so you may be able to remove this restriction. + matches = re.finditer(r'[^\x00-\x7F]', line) + if offending := ", ".join([repr(m[0]) for m in matches]): + warn("Non-ascii characters are not allowed in docstrings:", + offending) + + docstring = obj.docstring + if docstring: + docstring += "\n" + if stripped := line.rstrip(): + docstring += self.indent.dedent(stripped) + obj.docstring = docstring + # every line of the docstring must start with at least F spaces, # where F > P. # these F spaces will be stripped. - def state_parameter_docstring(self, line): - stripped = line.strip() - if stripped.startswith('#'): + def state_parameter_docstring(self, line: str) -> None: + if not self.valid_line(line): return indent = self.indent.measure(line) @@ -5245,195 +5510,166 @@ def state_parameter_docstring(self, line): assert self.indent.depth == 1 return self.next(self.state_function_docstring, line) - assert self.function.parameters - last_parameter = next(reversed(list(self.function.parameters.values()))) - - new_docstring = last_parameter.docstring - - if new_docstring: - new_docstring += '\n' - if stripped: - new_docstring += self.indent.dedent(line) - - last_parameter.docstring = new_docstring + assert self.function and self.function.parameters + last_param = next(reversed(self.function.parameters.values())) + self.docstring_append(last_param, line) # the final stanza of the DSL is the docstring. - def state_function_docstring(self, line): + def state_function_docstring(self, line: str) -> None: + assert self.function is not None + if self.group: - fail("Function " + self.function.name + " has a ] without a matching [.") + fail(f"Function {self.function.name!r} has a ']' without a matching '['.") - stripped = line.strip() - if stripped.startswith('#'): + if not self.valid_line(line): return - new_docstring = self.function.docstring - if new_docstring: - new_docstring += "\n" - if stripped: - line = self.indent.dedent(line).rstrip() - else: - line = '' - new_docstring += line - self.function.docstring = new_docstring - - def format_docstring(self): - f = self.function - - new_or_init = f.kind.new_or_init - if new_or_init and not f.docstring: - # don't render a docstring at all, no signature, nothing. - return f.docstring + self.docstring_append(self.function, line) + def format_docstring_signature( + self, f: Function, parameters: list[Parameter] + ) -> str: text, add, output = _text_accumulator() - parameters = f.render_parameters - - ## - ## docstring first line - ## - - if new_or_init: - # classes get *just* the name of the class - # not __new__, not __init__, and not module.classname - assert f.cls - add(f.cls.name) + add(f.displayname) + if self.forced_text_signature: + add(self.forced_text_signature) else: - add(f.name) - add('(') - - # populate "right_bracket_count" field for every parameter - assert parameters, "We should always have a self parameter. " + repr(f) - assert isinstance(parameters[0].converter, self_converter) - # self is always positional-only. - assert parameters[0].is_positional_only() - assert parameters[0].right_bracket_count == 0 - positional_only = True - for p in parameters[1:]: - if not p.is_positional_only(): - positional_only = False - else: - assert positional_only - if positional_only: - p.right_bracket_count = abs(p.group) - else: - # don't put any right brackets around non-positional-only parameters, ever. - p.right_bracket_count = 0 - - right_bracket_count = 0 - - def fix_right_bracket_count(desired): - nonlocal right_bracket_count - s = '' - while right_bracket_count < desired: - s += '[' - right_bracket_count += 1 - while right_bracket_count > desired: - s += ']' - right_bracket_count -= 1 - return s - - need_slash = False - added_slash = False - need_a_trailing_slash = False - - # we only need a trailing slash: - # * if this is not a "docstring_only" signature - # * and if the last *shown* parameter is - # positional only - if not f.docstring_only: - for p in reversed(parameters): - if not p.converter.show_in_signature: - continue - if p.is_positional_only(): - need_a_trailing_slash = True - break + add('(') + + # populate "right_bracket_count" field for every parameter + assert parameters, "We should always have a self parameter. " + repr(f) + assert isinstance(parameters[0].converter, self_converter) + # self is always positional-only. + assert parameters[0].is_positional_only() + assert parameters[0].right_bracket_count == 0 + positional_only = True + for p in parameters[1:]: + if not p.is_positional_only(): + positional_only = False + else: + assert positional_only + if positional_only: + p.right_bracket_count = abs(p.group) + else: + # don't put any right brackets around non-positional-only parameters, ever. + p.right_bracket_count = 0 + + right_bracket_count = 0 + + def fix_right_bracket_count(desired: int) -> str: + nonlocal right_bracket_count + s = '' + while right_bracket_count < desired: + s += '[' + right_bracket_count += 1 + while right_bracket_count > desired: + s += ']' + right_bracket_count -= 1 + return s + + need_slash = False + added_slash = False + need_a_trailing_slash = False + + # we only need a trailing slash: + # * if this is not a "docstring_only" signature + # * and if the last *shown* parameter is + # positional only + if not f.docstring_only: + for p in reversed(parameters): + if not p.converter.show_in_signature: + continue + if p.is_positional_only(): + need_a_trailing_slash = True + break - added_star = False + added_star = False - first_parameter = True - last_p = parameters[-1] - line_length = len(''.join(text)) - indent = " " * line_length - def add_parameter(text): - nonlocal line_length - nonlocal first_parameter - if first_parameter: - s = text - first_parameter = False - else: - s = ' ' + text - if line_length + len(s) >= 72: - add('\n') - add(indent) - line_length = len(indent) + first_parameter = True + last_p = parameters[-1] + line_length = len(''.join(text)) + indent = " " * line_length + def add_parameter(text: str) -> None: + nonlocal line_length + nonlocal first_parameter + if first_parameter: s = text - line_length += len(s) - add(s) - - for p in parameters: - if not p.converter.show_in_signature: - continue - assert p.name + first_parameter = False + else: + s = ' ' + text + if line_length + len(s) >= 72: + add('\n') + add(indent) + line_length = len(indent) + s = text + line_length += len(s) + add(s) + + for p in parameters: + if not p.converter.show_in_signature: + continue + assert p.name - is_self = isinstance(p.converter, self_converter) - if is_self and f.docstring_only: - # this isn't a real machine-parsable signature, - # so let's not print the "self" parameter - continue + is_self = isinstance(p.converter, self_converter) + if is_self and f.docstring_only: + # this isn't a real machine-parsable signature, + # so let's not print the "self" parameter + continue - if p.is_positional_only(): - need_slash = not f.docstring_only - elif need_slash and not (added_slash or p.is_positional_only()): - added_slash = True - add_parameter('/,') - - if p.is_keyword_only() and not added_star: - added_star = True - add_parameter('*,') - - p_add, p_output = text_accumulator() - p_add(fix_right_bracket_count(p.right_bracket_count)) - - if isinstance(p.converter, self_converter): - # annotate first parameter as being a "self". - # - # if inspect.Signature gets this function, - # and it's already bound, the self parameter - # will be stripped off. - # - # if it's not bound, it should be marked - # as positional-only. - # - # note: we don't print "self" for __init__, - # because this isn't actually the signature - # for __init__. (it can't be, __init__ doesn't - # have a docstring.) if this is an __init__ - # (or __new__), then this signature is for - # calling the class to construct a new instance. - p_add('$') + if p.is_positional_only(): + need_slash = not f.docstring_only + elif need_slash and not (added_slash or p.is_positional_only()): + added_slash = True + add_parameter('/,') + + if p.is_keyword_only() and not added_star: + added_star = True + add_parameter('*,') + + p_add, p_output = text_accumulator() + p_add(fix_right_bracket_count(p.right_bracket_count)) + + if isinstance(p.converter, self_converter): + # annotate first parameter as being a "self". + # + # if inspect.Signature gets this function, + # and it's already bound, the self parameter + # will be stripped off. + # + # if it's not bound, it should be marked + # as positional-only. + # + # note: we don't print "self" for __init__, + # because this isn't actually the signature + # for __init__. (it can't be, __init__ doesn't + # have a docstring.) if this is an __init__ + # (or __new__), then this signature is for + # calling the class to construct a new instance. + p_add('$') - if p.is_vararg(): - p_add("*") + if p.is_vararg(): + p_add("*") - name = p.converter.signature_name or p.name - p_add(name) + name = p.converter.signature_name or p.name + p_add(name) - if not p.is_vararg() and p.converter.is_optional(): - p_add('=') - value = p.converter.py_default - if not value: - value = repr(p.converter.default) - p_add(value) + if not p.is_vararg() and p.converter.is_optional(): + p_add('=') + value = p.converter.py_default + if not value: + value = repr(p.converter.default) + p_add(value) - if (p != last_p) or need_a_trailing_slash: - p_add(',') + if (p != last_p) or need_a_trailing_slash: + p_add(',') - add_parameter(p_output()) + add_parameter(p_output()) - add(fix_right_bracket_count(0)) - if need_a_trailing_slash: - add_parameter('/') - add(')') + add(fix_right_bracket_count(0)) + if need_a_trailing_slash: + add_parameter('/') + add(')') # PEP 8 says: # @@ -5451,35 +5687,27 @@ def add_parameter(text): if not f.docstring_only: add("\n" + sig_end_marker + "\n") - docstring_first_line = output() + signature_line = output() # now fix up the places where the brackets look wrong - docstring_first_line = docstring_first_line.replace(', ]', ',] ') + return signature_line.replace(', ]', ',] ') - # okay. now we're officially building the "parameters" section. - # create substitution text for {parameters} - spacer_line = False - for p in parameters: - if not p.docstring.strip(): - continue - if spacer_line: + @staticmethod + def format_docstring_parameters(params: list[Parameter]) -> str: + """Create substitution text for {parameters}""" + add, output = text_accumulator() + for p in params: + if p.docstring: + add(p.render_docstring()) add('\n') - else: - spacer_line = True - add(" ") - add(p.name) - add('\n') - add(textwrap.indent(rstrip_lines(p.docstring.rstrip()), " ")) - parameters = output() - if parameters: - parameters += '\n' + return output() - ## - ## docstring body - ## - - docstring = f.docstring.rstrip() - lines = [line.rstrip() for line in docstring.split('\n')] + def format_docstring(self) -> str: + assert self.function is not None + f = self.function + if f.kind.new_or_init and not f.docstring: + # don't render a docstring at all, no signature, nothing. + return f.docstring # Enforce the summary line! # The first line of a docstring should be a summary of the function. @@ -5493,62 +5721,64 @@ def add_parameter(text): # Guido said Clinic should enforce this: # http://mail.python.org/pipermail/python-dev/2013-June/127110.html + lines = f.docstring.split('\n') if len(lines) >= 2: if lines[1]: - fail("Docstring for " + f.full_name + " does not have a summary line!\n" + - "Every non-blank function docstring must start with\n" + - "a single line summary followed by an empty line.") + fail(f"Docstring for {f.full_name!r} does not have a summary line!\n" + "Every non-blank function docstring must start with " + "a single line summary followed by an empty line.") elif len(lines) == 1: # the docstring is only one line right now--the summary line. # add an empty line after the summary line so we have space # between it and the {parameters} we're about to add. lines.append('') - parameters_marker_count = len(docstring.split('{parameters}')) - 1 + parameters_marker_count = len(f.docstring.split('{parameters}')) - 1 if parameters_marker_count > 1: fail('You may not specify {parameters} more than once in a docstring!') + # insert signature at front and params after the summary line if not parameters_marker_count: - # insert after summary line lines.insert(2, '{parameters}') + lines.insert(0, '{signature}') - # insert at front of docstring - lines.insert(0, docstring_first_line) - + # finalize docstring + params = f.render_parameters + parameters = self.format_docstring_parameters(params) + signature = self.format_docstring_signature(f, params) docstring = "\n".join(lines) + return linear_format(docstring, + signature=signature, + parameters=parameters).rstrip() - add(docstring) - docstring = output() - - docstring = linear_format(docstring, parameters=parameters) - docstring = docstring.rstrip() - - return docstring - - def state_terminal(self, line): + def do_post_block_processing_cleanup(self, lineno: int) -> None: """ Called when processing the block is done. """ - assert not line - if not self.function: return - if self.keyword_only: - values = self.function.parameters.values() - if not values: - no_parameter_after_star = True + def check_remaining( + symbol: str, + condition: Callable[[Parameter], bool] + ) -> None: + assert isinstance(self.function, Function) + + if values := self.function.parameters.values(): + last_param = next(reversed(values)) + no_param_after_symbol = condition(last_param) else: - last_parameter = next(reversed(list(values))) - no_parameter_after_star = last_parameter.kind != inspect.Parameter.KEYWORD_ONLY - if no_parameter_after_star: - fail("Function " + self.function.name + " specifies '*' without any parameters afterwards.") - - # remove trailing whitespace from all parameter docstrings - for name, value in self.function.parameters.items(): - if not value: - continue - value.docstring = value.docstring.rstrip() + no_param_after_symbol = True + if no_param_after_symbol: + fname = self.function.full_name + fail(f"Function {fname!r} specifies {symbol!r} " + "without any parameters afterwards.", line_number=lineno) + + if self.keyword_only: + check_remaining("*", lambda p: p.kind != inspect.Parameter.KEYWORD_ONLY) + + if self.deprecated_positional: + check_remaining("* [from ...]", lambda p: not p.deprecated_positional) self.function.docstring = self.format_docstring() @@ -5573,10 +5803,9 @@ def state_terminal(self, line): clinic = None -def main(argv): - import sys - import argparse +def create_cli() -> argparse.ArgumentParser: cmdline = argparse.ArgumentParser( + prog="clinic.py", description="""Preprocessor for CPython C files. The purpose of the Argument Clinic is automating all the boilerplate involved @@ -5584,25 +5813,35 @@ def main(argv): signatures ("docstrings") for CPython builtins. For more information see https://docs.python.org/3/howto/clinic.html""") - cmdline.add_argument("-f", "--force", action='store_true') - cmdline.add_argument("-o", "--output", type=str) - cmdline.add_argument("-v", "--verbose", action='store_true') - cmdline.add_argument("--converters", action='store_true') + cmdline.add_argument("-f", "--force", action='store_true', + help="force output regeneration") + cmdline.add_argument("-o", "--output", type=str, + help="redirect file output to OUTPUT") + cmdline.add_argument("-v", "--verbose", action='store_true', + help="enable verbose mode") + cmdline.add_argument("--converters", action='store_true', + help=("print a list of all supported converters " + "and return converters")) cmdline.add_argument("--make", action='store_true', - help="Walk --srcdir to run over all relevant files.") + help="walk --srcdir to run over all relevant files") cmdline.add_argument("--srcdir", type=str, default=os.curdir, - help="The directory tree to walk in --make mode.") - cmdline.add_argument("filename", type=str, nargs="*") - ns = cmdline.parse_args(argv) + help="the directory tree to walk in --make mode") + cmdline.add_argument("--exclude", type=str, action="append", + help=("a file to exclude in --make mode; " + "can be given multiple times")) + cmdline.add_argument("filename", metavar="FILE", type=str, nargs="*", + help="the list of files to process") + return cmdline + +def run_clinic(parser: argparse.ArgumentParser, ns: argparse.Namespace) -> None: if ns.converters: if ns.filename: - print("Usage error: can't specify --converters and a filename at the same time.") - print() - cmdline.print_usage() - sys.exit(-1) - converters = [] - return_converters = [] + parser.error( + "can't specify --converters and a filename at the same time" + ) + converters: list[tuple[str, str]] = [] + return_converters: list[tuple[str, str]] = [] ignored = set(""" add_c_converter add_c_return_converter @@ -5654,19 +5893,18 @@ def main(argv): print() print("All converters also accept (c_default=None, py_default=None, annotation=None).") print("All return converters also accept (py_default=None).") - sys.exit(0) + return if ns.make: if ns.output or ns.filename: - print("Usage error: can't use -o or filenames with --make.") - print() - cmdline.print_usage() - sys.exit(-1) + parser.error("can't use -o or filenames with --make") if not ns.srcdir: - print("Usage error: --srcdir must not be empty with --make.") - print() - cmdline.print_usage() - sys.exit(-1) + parser.error("--srcdir must not be empty with --make") + if ns.exclude: + excludes = [os.path.join(ns.srcdir, f) for f in ns.exclude] + excludes = [os.path.normpath(f) for f in excludes] + else: + excludes = [] for root, dirs, files in os.walk(ns.srcdir): for rcs_dir in ('.svn', '.git', '.hg', 'build', 'externals'): if rcs_dir in dirs: @@ -5676,20 +5914,19 @@ def main(argv): if not filename.endswith(('.c', '.cpp', '.h')): continue path = os.path.join(root, filename) + path = os.path.normpath(path) + if path in excludes: + continue if ns.verbose: print(path) parse_file(path, verify=not ns.force) return if not ns.filename: - cmdline.print_usage() - sys.exit(-1) + parser.error("no input files") if ns.output and len(ns.filename) > 1: - print("Usage error: can't use -o with multiple filenames.") - print() - cmdline.print_usage() - sys.exit(-1) + parser.error("can't use -o with multiple filenames") for filename in ns.filename: if ns.verbose: @@ -5697,5 +5934,17 @@ def main(argv): parse_file(filename, output=ns.output, verify=not ns.force) +def main(argv: list[str] | None = None) -> NoReturn: + parser = create_cli() + args = parser.parse_args(argv) + try: + run_clinic(parser, args) + except ClinicError as exc: + sys.stderr.write(exc.report()) + sys.exit(1) + else: + sys.exit(0) + + if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) + main() diff --git a/Tools/clinic/cpp.py b/Tools/clinic/cpp.py index fbac81336b545e..16eee6fc399491 100644 --- a/Tools/clinic/cpp.py +++ b/Tools/clinic/cpp.py @@ -1,7 +1,6 @@ import dataclasses as dc import re import sys -from collections.abc import Callable from typing import NoReturn @@ -44,9 +43,12 @@ def __post_init__(self) -> None: self.line_number = 0 def __repr__(self) -> str: - return ( - f"" + parts = ( + str(id(self)), + f"line={self.line_number}", + f"condition={self.condition()!r}" ) + return f"" def status(self) -> str: return str(self.line_number).rjust(4) + ": " + self.condition() @@ -66,14 +68,6 @@ def fail(self, *a: object) -> NoReturn: print(" ", ' '.join(str(x) for x in a)) sys.exit(-1) - def close(self) -> None: - if self.stack: - self.fail("Ended file while still in a preprocessor conditional block!") - - def write(self, s: str) -> None: - for line in s.split("\n"): - self.writeline(line) - def writeline(self, line: str) -> None: self.line_number += 1 line = line.strip() @@ -184,11 +178,17 @@ def pop_stack() -> TokenAndCondition: if self.verbose: print(self.status()) -if __name__ == '__main__': - for filename in sys.argv[1:]: + +def _main(filenames: list[str] | None = None) -> None: + filenames = filenames or sys.argv[1:] + for filename in filenames: with open(filename) as f: cpp = Monitor(filename, verbose=True) print() print(filename) - for line_number, line in enumerate(f.read().split('\n'), 1): + for line in f: cpp.writeline(line) + + +if __name__ == '__main__': + _main() diff --git a/Tools/clinic/mypy.ini b/Tools/clinic/mypy.ini index 672911dc19abda..b1fdad673c61a1 100644 --- a/Tools/clinic/mypy.ini +++ b/Tools/clinic/mypy.ini @@ -1,12 +1,12 @@ [mypy] +files = Tools/clinic/ +pretty = True + # make sure clinic can still be run on Python 3.10 python_version = 3.10 -pretty = True -enable_error_code = ignore-without-code -disallow_any_generics = True + +# and be strict! +strict = True strict_concatenate = True -warn_redundant_casts = True -warn_unused_ignores = True -warn_unused_configs = True +enable_error_code = ignore-without-code,redundant-expr,truthy-bool warn_unreachable = True -files = Tools/clinic/ diff --git a/Tools/msi/test/test_files.wxs b/Tools/msi/test/test_files.wxs index b5f68faef30e02..87e164cb6759f6 100644 --- a/Tools/msi/test/test_files.wxs +++ b/Tools/msi/test/test_files.wxs @@ -1,6 +1,6 @@ - + diff --git a/Tools/peg_generator/pegen/keywordgen.py b/Tools/peg_generator/pegen/keywordgen.py index 35a5e1a229cdec..bbf13267e5763b 100644 --- a/Tools/peg_generator/pegen/keywordgen.py +++ b/Tools/peg_generator/pegen/keywordgen.py @@ -35,9 +35,6 @@ issoftkeyword = frozenset(softkwlist).__contains__ '''.lstrip() -EXTRA_KEYWORDS = ["async", "await"] - - def main() -> None: parser = argparse.ArgumentParser( description="Generate the Lib/keywords.py file from the grammar." @@ -62,7 +59,7 @@ def main() -> None: gen.collect_rules() with args.keyword_file as thefile: - all_keywords = sorted(list(gen.keywords.keys()) + EXTRA_KEYWORDS) + all_keywords = sorted(list(gen.keywords.keys())) all_soft_keywords = sorted(gen.soft_keywords) keywords = "" if not all_keywords else " " + ",\n ".join(map(repr, all_keywords)) diff --git a/Tools/peg_generator/pegen/python_generator.py b/Tools/peg_generator/pegen/python_generator.py index 5329d0ebe5e64c..4a2883eb4ee202 100644 --- a/Tools/peg_generator/pegen/python_generator.py +++ b/Tools/peg_generator/pegen/python_generator.py @@ -102,7 +102,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> Tuple[Optional[str], str]: if name in ("NAME", "NUMBER", "STRING", "OP", "TYPE_COMMENT"): name = name.lower() return name, f"self.{name}()" - if name in ("NEWLINE", "DEDENT", "INDENT", "ENDMARKER", "ASYNC", "AWAIT"): + if name in ("NEWLINE", "DEDENT", "INDENT", "ENDMARKER"): # Avoid using names that can be Python keywords return "_" + name.lower(), f"self.expect({name!r})" return name, f"self.{name}()" diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py index 9c881897c2de1d..f798b2f772d08a 100644 --- a/Tools/scripts/summarize_stats.py +++ b/Tools/scripts/summarize_stats.py @@ -494,6 +494,22 @@ def calculate_object_stats(stats): rows.append((label, value, ratio)) return rows +def calculate_gc_stats(stats): + gc_stats = [] + for key, value in stats.items(): + if not key.startswith("GC"): + continue + n, _, rest = key[3:].partition("]") + name = rest.strip() + gen_n = int(n) + while len(gc_stats) <= gen_n: + gc_stats.append({}) + gc_stats[gen_n][name] = value + return [ + (i, gen["collections"], gen["objects collected"], gen["object visits"]) + for (i, gen) in enumerate(gc_stats) + ] + def emit_object_stats(stats): with Section("Object stats", summary="allocations, frees and dict materializatons"): rows = calculate_object_stats(stats) @@ -505,6 +521,22 @@ def emit_comparative_object_stats(base_stats, head_stats): head_rows = calculate_object_stats(head_stats) emit_table(("", "Base Count:", "Base Ratio:", "Head Count:", "Head Ratio:"), join_rows(base_rows, head_rows)) +def emit_gc_stats(stats): + with Section("GC stats", summary="GC collections and effectiveness"): + rows = calculate_gc_stats(stats) + emit_table(("Generation:", "Collections:", "Objects collected:", "Object visits:"), rows) + +def emit_comparative_gc_stats(base_stats, head_stats): + with Section("GC stats", summary="GC collections and effectiveness"): + base_rows = calculate_gc_stats(base_stats) + head_rows = calculate_gc_stats(head_stats) + emit_table( + ("Generation:", + "Base collections:", "Head collections:", + "Base objects collected:", "Head objects collected:", + "Base object visits:", "Head object visits:"), + join_rows(base_rows, head_rows)) + def get_total(opcode_stats): total = 0 for opcode_stat in opcode_stats: @@ -574,6 +606,7 @@ def output_single_stats(stats): emit_specialization_overview(opcode_stats, total) emit_call_stats(stats) emit_object_stats(stats) + emit_gc_stats(stats) with Section("Meta stats", summary="Meta statistics"): emit_table(("", "Count:"), [('Number of data files', stats['__nfiles__'])]) @@ -596,6 +629,7 @@ def output_comparative_stats(base_stats, head_stats): ) emit_comparative_call_stats(base_stats, head_stats) emit_comparative_object_stats(base_stats, head_stats) + emit_comparative_gc_stats(base_stats, head_stats) def output_stats(inputs, json_output=None): if len(inputs) == 1: diff --git a/configure b/configure index e6fb5e3c2b0c2f..aaacf8d2669c16 100755 --- a/configure +++ b/configure @@ -10154,6 +10154,9 @@ rm -f core conftest.err conftest.$ac_objext conftest.beam \ esac case "$CC" in +*mpicc*) + CFLAGS_NODIST="$CFLAGS_NODIST" + ;; *icc*) # ICC needs -fp-model strict or floats behave badly CFLAGS_NODIST="$CFLAGS_NODIST -fp-model strict" @@ -10546,6 +10549,12 @@ if test "x$ac_cv_header_linux_fs_h" = xyes then : printf "%s\n" "#define HAVE_LINUX_FS_H 1" >>confdefs.h +fi +ac_fn_c_check_header_compile "$LINENO" "linux/limits.h" "ac_cv_header_linux_limits_h" "$ac_includes_default" +if test "x$ac_cv_header_linux_limits_h" = xyes +then : + printf "%s\n" "#define HAVE_LINUX_LIMITS_H 1" >>confdefs.h + fi ac_fn_c_check_header_compile "$LINENO" "linux/memfd.h" "ac_cv_header_linux_memfd_h" "$ac_includes_default" if test "x$ac_cv_header_linux_memfd_h" = xyes @@ -11144,6 +11153,7 @@ fi # On Linux, netlink.h requires asm/types.h +# On FreeBSD, netlink.h is located in netlink/netlink.h ac_fn_c_check_header_compile "$LINENO" "linux/netlink.h" "ac_cv_header_linux_netlink_h" " #ifdef HAVE_ASM_TYPES_H #include @@ -11158,6 +11168,20 @@ then : printf "%s\n" "#define HAVE_LINUX_NETLINK_H 1" >>confdefs.h fi +ac_fn_c_check_header_compile "$LINENO" "netlink/netlink.h" "ac_cv_header_netlink_netlink_h" " +#ifdef HAVE_ASM_TYPES_H +#include +#endif +#ifdef HAVE_SYS_SOCKET_H +#include +#endif + +" +if test "x$ac_cv_header_netlink_netlink_h" = xyes +then : + printf "%s\n" "#define HAVE_NETLINK_NETLINK_H 1" >>confdefs.h + +fi # On Linux, qrtr.h requires asm/types.h diff --git a/configure.ac b/configure.ac index a1ee78047692fd..ddf6da0b9da123 100644 --- a/configure.ac +++ b/configure.ac @@ -2656,6 +2656,9 @@ yes) esac case "$CC" in +*mpicc*) + CFLAGS_NODIST="$CFLAGS_NODIST" + ;; *icc*) # ICC needs -fp-model strict or floats behave badly CFLAGS_NODIST="$CFLAGS_NODIST -fp-model strict" @@ -2845,7 +2848,7 @@ AC_DEFINE([STDC_HEADERS], [1], # checks for header files AC_CHECK_HEADERS([ \ alloca.h asm/types.h bluetooth.h conio.h direct.h dlfcn.h endian.h errno.h fcntl.h grp.h \ - ieeefp.h io.h langinfo.h libintl.h libutil.h linux/auxvec.h sys/auxv.h linux/fs.h linux/memfd.h \ + ieeefp.h io.h langinfo.h libintl.h libutil.h linux/auxvec.h sys/auxv.h linux/fs.h linux/limits.h linux/memfd.h \ linux/random.h linux/soundcard.h \ linux/tipc.h linux/wait.h netdb.h net/ethernet.h netinet/in.h netpacket/packet.h poll.h process.h pthread.h pty.h \ sched.h setjmp.h shadow.h signal.h spawn.h stropts.h sys/audioio.h sys/bsdtty.h sys/devpoll.h \ @@ -2877,7 +2880,8 @@ AC_CHECK_HEADERS([net/if.h], [], [], ]) # On Linux, netlink.h requires asm/types.h -AC_CHECK_HEADERS([linux/netlink.h], [], [], [ +# On FreeBSD, netlink.h is located in netlink/netlink.h +AC_CHECK_HEADERS([linux/netlink.h netlink/netlink.h], [], [], [ #ifdef HAVE_ASM_TYPES_H #include #endif diff --git a/pyconfig.h.in b/pyconfig.h.in index 0828dc8d4b58cd..181dc3d7d11370 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -721,6 +721,9 @@ /* Define to 1 if you have the header file. */ #undef HAVE_LINUX_FS_H +/* Define to 1 if you have the header file. */ +#undef HAVE_LINUX_LIMITS_H + /* Define to 1 if you have the header file. */ #undef HAVE_LINUX_MEMFD_H @@ -838,6 +841,9 @@ /* Define to 1 if you have the header file. */ #undef HAVE_NETINET_IN_H +/* Define to 1 if you have the header file. */ +#undef HAVE_NETLINK_NETLINK_H + /* Define to 1 if you have the header file. */ #undef HAVE_NETPACKET_PACKET_H