diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 50d1561518bd82..dbef550643e813 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,31 +1,42 @@ name: Tests +# bpo-40548: "paths-ignore" is not used to skip documentation-only PRs, because +# it prevents to mark a job as mandatory. A PR cannot be merged if a job is +# mandatory but not scheduled because of "paths-ignore". on: push: branches: - master - 3.8 - 3.7 - paths-ignore: - - 'Doc/**' - - 'Misc/**' - - '**/*.md' - - '**/*.rst' pull_request: branches: - master - 3.8 - 3.7 - paths-ignore: - - 'Doc/**' - - 'Misc/**' - - '**/*.md' - - '**/*.rst' jobs: + check_source: + name: 'Check for source changes' + runs-on: ubuntu-latest + outputs: + run_tests: ${{ steps.check.outputs.run_tests }} + steps: + - uses: actions/checkout@v2 + - name: Check for source changes + id: check + run: | + if [ -z "GITHUB_BASE_REF" ]; then + echo '::set-output name=run_tests::true' + else + git fetch origin $GITHUB_BASE_REF --depth=1 + git diff --name-only origin/$GITHUB_BASE_REF... | grep -qvE '(\.rst$|^Doc|^Misc)' && echo '::set-output name=run_tests::true' || true + fi build_win32: name: 'Windows (x86)' runs-on: windows-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout@v1 - name: Build CPython @@ -38,6 +49,8 @@ jobs: build_win_amd64: name: 'Windows (x64)' runs-on: windows-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout@v1 - name: Build CPython @@ -50,6 +63,8 @@ jobs: build_macos: name: 'macOS' runs-on: macos-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' steps: - uses: actions/checkout@v1 - name: Configure CPython @@ -64,6 +79,8 @@ jobs: build_ubuntu: name: 'Ubuntu' runs-on: ubuntu-latest + needs: check_source + if: needs.check_source.outputs.run_tests == 'true' env: OPENSSL_VER: 1.1.1f steps: diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst index fc3467bee4d3cf..634e971952e8eb 100644 --- a/Doc/c-api/structures.rst +++ b/Doc/c-api/structures.rst @@ -147,23 +147,56 @@ Implementing functions and methods value of the function as exposed in Python. The function must return a new reference. + The function signature is:: + + PyObject *PyCFunction(PyObject *self, + PyObject *args); .. c:type:: PyCFunctionWithKeywords Type of the functions used to implement Python callables in C with signature :const:`METH_VARARGS | METH_KEYWORDS`. + The function signature is:: + + PyObject *PyCFunctionWithKeywords(PyObject *self, + PyObject *args, + PyObject *kwargs); .. c:type:: _PyCFunctionFast Type of the functions used to implement Python callables in C with signature :const:`METH_FASTCALL`. + The function signature is:: + PyObject *_PyCFunctionFast(PyObject *self, + PyObject *const *args, + Py_ssize_t nargs); .. c:type:: _PyCFunctionFastWithKeywords Type of the functions used to implement Python callables in C with signature :const:`METH_FASTCALL | METH_KEYWORDS`. + The function signature is:: + + PyObject *_PyCFunctionFastWithKeywords(PyObject *self, + PyObject *const *args, + Py_ssize_t nargs, + PyObject *kwnames); + +.. c:type:: PyCMethod + + Type of the functions used to implement Python callables in C + with signature :const:`METH_METHOD | METH_FASTCALL | METH_KEYWORDS`. + The function signature is:: + + PyObject *PyCMethod(PyObject *self, + PyTypeObject *defining_class, + PyObject *const *args, + Py_ssize_t nargs, + PyObject *kwnames) + + .. versionadded:: 3.9 .. c:type:: PyMethodDef @@ -197,9 +230,7 @@ The :attr:`ml_flags` field is a bitfield which can include the following flags. The individual flags indicate either a calling convention or a binding convention. -There are four basic calling conventions for positional arguments -and two of them can be combined with :const:`METH_KEYWORDS` to support -also keyword arguments. So there are a total of 6 calling conventions: +There are these calling conventions: .. data:: METH_VARARGS @@ -250,6 +281,19 @@ also keyword arguments. So there are a total of 6 calling conventions: .. versionadded:: 3.7 +.. data:: METH_METHOD | METH_FASTCALL | METH_KEYWORDS + + Extension of :const:`METH_FASTCALL | METH_KEYWORDS` supporting the *defining + class*, that is, the class that contains the method in question. + The defining class might be a superclass of ``Py_TYPE(self)``. + + The method needs to be of type :c:type:`PyCMethod`, the same as for + ``METH_FASTCALL | METH_KEYWORDS`` with ``defining_class`` argument added after + ``self``. + + .. versionadded:: 3.9 + + .. data:: METH_NOARGS Methods without parameters don't need to check whether arguments are given if @@ -380,9 +424,11 @@ Accessing attributes of extension types Heap allocated types (created using :c:func:`PyType_FromSpec` or similar), ``PyMemberDef`` may contain definitions for the special members - ``__dictoffset__`` and ``__weaklistoffset__``, corresponding to - :c:member:`~PyTypeObject.tp_dictoffset` and - :c:member:`~PyTypeObject.tp_weaklistoffset` in type objects. + ``__dictoffset__``, ``__weaklistoffset__`` and ``__vectorcalloffset__``, + corresponding to + :c:member:`~PyTypeObject.tp_dictoffset`, + :c:member:`~PyTypeObject.tp_weaklistoffset` and + :c:member:`~PyTypeObject.tp_vectorcall_offset` in type objects. These must be defined with ``T_PYSSIZET`` and ``READONLY``, for example:: static PyMemberDef spam_type_members[] = { diff --git a/Doc/c-api/type.rst b/Doc/c-api/type.rst index f774ca35edab92..f387279d143eec 100644 --- a/Doc/c-api/type.rst +++ b/Doc/c-api/type.rst @@ -109,6 +109,30 @@ Type Objects .. versionadded:: 3.4 +.. c:function:: PyObject* PyType_GetModule(PyTypeObject *type) + + Return the module object associated with the given type when the type was + created using :c:func:`PyType_FromModuleAndSpec`. + + If no module is associated with the given type, sets :py:class:`TypeError` + and returns ``NULL``. + + .. versionadded:: 3.9 + +.. c:function:: void* PyType_GetModuleState(PyTypeObject *type) + + Return the state of the module object associated with the given type. + This is a shortcut for calling :c:func:`PyModule_GetState()` on the result + of :c:func:`PyType_GetModule`. + + If no module is associated with the given type, sets :py:class:`TypeError` + and returns ``NULL``. + + If the *type* has an associated module but its state is ``NULL``, + returns ``NULL`` without setting an exception. + + .. versionadded:: 3.9 + Creating Heap-Allocated Types ............................. @@ -116,7 +140,7 @@ Creating Heap-Allocated Types The following functions and structs are used to create :ref:`heap types `. -.. c:function:: PyObject* PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) +.. c:function:: PyObject* PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) Creates and returns a heap type object from the *spec* (:const:`Py_TPFLAGS_HEAPTYPE`). @@ -127,8 +151,18 @@ The following functions and structs are used to create If *bases* is ``NULL``, the *Py_tp_base* slot is used instead. If that also is ``NULL``, the new type derives from :class:`object`. + The *module* must be a module object or ``NULL``. + If not ``NULL``, the module is associated with the new type and can later be + retreived with :c:func:`PyType_GetModule`. + This function calls :c:func:`PyType_Ready` on the new type. + .. versionadded:: 3.9 + +.. c:function:: PyObject* PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) + + Equivalent to ``PyType_FromModuleAndSpec(NULL, spec, bases)``. + .. versionadded:: 3.3 .. c:function:: PyObject* PyType_FromSpec(PyType_Spec *spec) @@ -194,6 +228,7 @@ The following functions and structs are used to create * :c:member:`~PyTypeObject.tp_dictoffset` (see :ref:`PyMemberDef `) * :c:member:`~PyTypeObject.tp_vectorcall_offset` + (see :ref:`PyMemberDef `) * :c:member:`~PyBufferProcs.bf_getbuffer` * :c:member:`~PyBufferProcs.bf_releasebuffer` diff --git a/Doc/conf.py b/Doc/conf.py index 32db34344a70a1..12d74ea24ce4ac 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -14,7 +14,8 @@ # --------------------- extensions = ['sphinx.ext.coverage', 'sphinx.ext.doctest', - 'pyspecific', 'c_annotations', 'escape4chm'] + 'pyspecific', 'c_annotations', 'escape4chm', + 'asdl_highlight'] doctest_global_setup = ''' diff --git a/Doc/library/array.rst b/Doc/library/array.rst index c9a9b1dabb2a79..78020738bf4f75 100644 --- a/Doc/library/array.rst +++ b/Doc/library/array.rst @@ -22,7 +22,7 @@ defined: +-----------+--------------------+-------------------+-----------------------+-------+ | ``'B'`` | unsigned char | int | 1 | | +-----------+--------------------+-------------------+-----------------------+-------+ -| ``'u'`` | Py_UNICODE | Unicode character | 2 | \(1) | +| ``'u'`` | wchar_t | Unicode character | 2 | \(1) | +-----------+--------------------+-------------------+-----------------------+-------+ | ``'h'`` | signed short | int | 2 | | +-----------+--------------------+-------------------+-----------------------+-------+ @@ -48,15 +48,16 @@ defined: Notes: (1) - The ``'u'`` type code corresponds to Python's obsolete unicode character - (:c:type:`Py_UNICODE` which is :c:type:`wchar_t`). Depending on the - platform, it can be 16 bits or 32 bits. + It can be 16 bits or 32 bits depending on the platform. - ``'u'`` will be removed together with the rest of the :c:type:`Py_UNICODE` - API. + .. versionchanged:: 3.9 + ``array('u')`` now uses ``wchar_t`` as C type instead of deprecated + ``Py_UNICODE``. This change doesn't affect to its behavior because + ``Py_UNICODE`` is alias of ``wchar_t`` since Python 3.3. .. deprecated-removed:: 3.3 4.0 + The actual representation of values is determined by the machine architecture (strictly speaking, by the C implementation). The actual size can be accessed through the :attr:`itemsize` attribute. diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index fc04114949c0c3..6c6ad01b842c8e 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -35,7 +35,7 @@ Abstract Grammar The abstract grammar is currently defined as follows: .. literalinclude:: ../../Parser/Python.asdl - :language: none + :language: asdl Node classes diff --git a/Doc/library/asyncio-task.rst b/Doc/library/asyncio-task.rst index 6627bec79823a3..42e2b4e2fc5b91 100644 --- a/Doc/library/asyncio-task.rst +++ b/Doc/library/asyncio-task.rst @@ -575,7 +575,7 @@ Waiting Primitives if task in done: # Everything will work as expected now. - .. deprecated:: 3.8 + .. deprecated-removed:: 3.8 3.11 Passing coroutine objects to ``wait()`` directly is deprecated. diff --git a/Doc/library/code.rst b/Doc/library/code.rst index 6708079f778c1a..538e5afc7822aa 100644 --- a/Doc/library/code.rst +++ b/Doc/library/code.rst @@ -56,8 +56,8 @@ build applications which provide an interactive interpreter prompt. *source* is the source string; *filename* is the optional filename from which source was read, defaulting to ``''``; and *symbol* is the optional - grammar start symbol, which should be either ``'single'`` (the default) or - ``'eval'``. + grammar start symbol, which should be ``'single'`` (the default), ``'eval'`` + or ``'exec'``. Returns a code object (the same as ``compile(source, filename, symbol)``) if the command is complete and valid; ``None`` if the command is incomplete; raises diff --git a/Doc/library/codeop.rst b/Doc/library/codeop.rst index a52d2c62c4fea1..c66b9d3ec0a26d 100644 --- a/Doc/library/codeop.rst +++ b/Doc/library/codeop.rst @@ -43,8 +43,9 @@ To do just the former: :exc:`OverflowError` or :exc:`ValueError` if there is an invalid literal. The *symbol* argument determines whether *source* is compiled as a statement - (``'single'``, the default) or as an :term:`expression` (``'eval'``). Any - other value will cause :exc:`ValueError` to be raised. + (``'single'``, the default), as a sequence of statements (``'exec'``) or + as an :term:`expression` (``'eval'``). Any other value will + cause :exc:`ValueError` to be raised. .. note:: diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst index b1ae9d60e8ae14..a511c7eda265b2 100644 --- a/Doc/library/compileall.rst +++ b/Doc/library/compileall.rst @@ -113,6 +113,11 @@ compile Python sources. Ignore symlinks pointing outside the given directory. +.. cmdoption:: --hardlink-dupes + + If two ``.pyc`` files with different optimization level have + the same content, use hard links to consolidate duplicate files. + .. versionchanged:: 3.2 Added the ``-i``, ``-b`` and ``-h`` options. @@ -125,7 +130,7 @@ compile Python sources. Added the ``--invalidation-mode`` option. .. versionchanged:: 3.9 - Added the ``-s``, ``-p``, ``-e`` options. + Added the ``-s``, ``-p``, ``-e`` and ``--hardlink-dupes`` options. Raised the default recursion limit from 10 to :py:func:`sys.getrecursionlimit()`. Added the possibility to specify the ``-o`` option multiple times. @@ -143,7 +148,7 @@ runtime. Public functions ---------------- -.. function:: compile_dir(dir, maxlevels=sys.getrecursionlimit(), ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, workers=1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None) +.. function:: compile_dir(dir, maxlevels=sys.getrecursionlimit(), ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, workers=1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None, hardlink_dupes=False) Recursively descend the directory tree named by *dir*, compiling all :file:`.py` files along the way. Return a true value if all the files compiled successfully, @@ -193,6 +198,9 @@ Public functions the ``-s``, ``-p`` and ``-e`` options described above. They may be specified as ``str``, ``bytes`` or :py:class:`os.PathLike`. + If *hardlink_dupes* is true and two ``.pyc`` files with different optimization + level have the same content, use hard links to consolidate duplicate files. + .. versionchanged:: 3.2 Added the *legacy* and *optimize* parameter. @@ -219,9 +227,9 @@ Public functions Setting *workers* to 0 now chooses the optimal number of cores. .. versionchanged:: 3.9 - Added *stripdir*, *prependdir* and *limit_sl_dest* arguments. + Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* arguments. -.. function:: compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None) +.. function:: compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, invalidation_mode=None, \*, stripdir=None, prependdir=None, limit_sl_dest=None, hardlink_dupes=False) Compile the file with path *fullname*. Return a true value if the file compiled successfully, and a false value otherwise. @@ -257,6 +265,9 @@ Public functions the ``-s``, ``-p`` and ``-e`` options described above. They may be specified as ``str``, ``bytes`` or :py:class:`os.PathLike`. + If *hardlink_dupes* is true and two ``.pyc`` files with different optimization + level have the same content, use hard links to consolidate duplicate files. + .. versionadded:: 3.2 .. versionchanged:: 3.5 @@ -273,7 +284,7 @@ Public functions The *invalidation_mode* parameter's default value is updated to None. .. versionchanged:: 3.9 - Added *stripdir*, *prependdir* and *limit_sl_dest* arguments. + Added *stripdir*, *prependdir*, *limit_sl_dest* and *hardlink_dupes* arguments. .. function:: compile_path(skip_curdir=True, maxlevels=0, force=False, quiet=0, legacy=False, optimize=-1, invalidation_mode=None) diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index c125a1130a9603..fe63d20671dd74 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -19,6 +19,8 @@ in :pep:`557`. The member variables to use in these generated methods are defined using :pep:`526` type annotations. For example this code:: + from dataclasses import dataclass + @dataclass class InventoryItem: '''Class for keeping track of an item in inventory.''' diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index 856c1c790ae361..a44eb85b27dbab 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -26,6 +26,32 @@ function for the purposes of this module. The :mod:`functools` module defines the following functions: +.. decorator:: cache(user_function) + + Simple lightweight unbounded function cache. Sometimes called + `"memoize" `_. + + Returns the same as ``lru_cache(maxsize=None)``, creating a thin + wrapper around a dictionary lookup for the function arguments. Because it + never needs to evict old values, this is smaller and faster than + :func:`lru_cache()` with a size limit. + + For example:: + + @cache + def factorial(n): + return n * factorial(n-1) if n else 1 + + >>> factorial(10) # no previously cached result, makes 11 recursive calls + 3628800 + >>> factorial(5) # just looks up cached value result + 120 + >>> factorial(12) # makes two new recursive calls, the other 10 are cached + 479001600 + + .. versionadded:: 3.9 + + .. decorator:: cached_property(func) Transform a method of a class into a property whose value is computed once @@ -132,11 +158,11 @@ The :mod:`functools` module defines the following functions: bypassing the cache, or for rewrapping the function with a different cache. An `LRU (least recently used) cache - `_ works - best when the most recent calls are the best predictors of upcoming calls (for - example, the most popular articles on a news server tend to change each day). - The cache's size limit assures that the cache does not grow without bound on - long-running processes such as web servers. + `_ + works best when the most recent calls are the best predictors of upcoming + calls (for example, the most popular articles on a news server tend to + change each day). The cache's size limit assures that the cache does not + grow without bound on long-running processes such as web servers. In general, the LRU cache should only be used when you want to reuse previously computed values. Accordingly, it doesn't make sense to cache diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index a612b1e1455a0c..99bfeacbbc7407 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -480,6 +480,8 @@ ABC hierarchy:: .. class:: ResourceReader + *Superseded by TraversableReader* + An :term:`abstract base class` to provide the ability to read *resources*. @@ -795,6 +797,28 @@ ABC hierarchy:: itself does not end in ``__init__``. +.. class:: Traversable + + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + + .. versionadded:: 3.9 + + +.. class:: TraversableReader + + An abstract base class for resource readers capable of serving + the ``files`` interface. Subclasses ResourceReader and provides + concrete implementations of the ResourceReader's abstract + methods. Therefore, any loader supplying TraversableReader + also supplies ResourceReader. + + Loaders that wish to support resource reading are expected to + implement this interface. + + .. versionadded:: 3.9 + + :mod:`importlib.resources` -- Resources --------------------------------------- @@ -853,6 +877,19 @@ The following types are defined. The following functions are available. + +.. function:: files(package) + + Returns an :class:`importlib.resources.abc.Traversable` object + representing the resource container for the package (think directory) + and its resources (think files). A Traversable may contain other + containers (think subdirectories). + + *package* is either a name or a module object which conforms to the + ``Package`` requirements. + + .. versionadded:: 3.9 + .. function:: open_binary(package, resource) Open for binary reading the *resource* within *package*. diff --git a/Doc/library/random.rst b/Doc/library/random.rst index f37bc2a111d954..90366f499cae6a 100644 --- a/Doc/library/random.rst +++ b/Doc/library/random.rst @@ -217,7 +217,7 @@ Functions for sequences The optional parameter *random*. -.. function:: sample(population, k) +.. function:: sample(population, k, *, counts=None) Return a *k* length list of unique elements chosen from the population sequence or set. Used for random sampling without replacement. @@ -231,6 +231,11 @@ Functions for sequences Members of the population need not be :term:`hashable` or unique. If the population contains repeats, then each occurrence is a possible selection in the sample. + Repeated elements can be specified one at a time or with the optional + keyword-only *counts* parameter. For example, ``sample(['red', 'blue'], + counts=[4, 2], k=5)`` is equivalent to ``sample(['red', 'red', 'red', 'red', + 'blue', 'blue'], k=5)``. + To choose a sample from a range of integers, use a :func:`range` object as an argument. This is especially fast and space efficient for sampling from a large population: ``sample(range(10000000), k=60)``. @@ -238,6 +243,9 @@ Functions for sequences If the sample size is larger than the population size, a :exc:`ValueError` is raised. + .. versionchanged:: 3.9 + Added the *counts* parameter. + .. deprecated:: 3.9 In the future, the *population* must be a sequence. Instances of :class:`set` are no longer supported. The set must first be converted @@ -420,12 +428,11 @@ Simulations:: >>> choices(['red', 'black', 'green'], [18, 18, 2], k=6) ['red', 'green', 'black', 'black', 'red', 'black'] - >>> # Deal 20 cards without replacement from a deck of 52 playing cards - >>> # and determine the proportion of cards with a ten-value - >>> # (a ten, jack, queen, or king). - >>> deck = collections.Counter(tens=16, low_cards=36) - >>> seen = sample(list(deck.elements()), k=20) - >>> seen.count('tens') / 20 + >>> # Deal 20 cards without replacement from a deck + >>> # of 52 playing cards, and determine the proportion of cards + >>> # with a ten-value: ten, jack, queen, or king. + >>> dealt = sample(['tens', 'low cards'], counts=[16, 36], k=20) + >>> dealt.count('tens') / 20 0.15 >>> # Estimate the probability of getting 5 or more heads from 7 spins diff --git a/Doc/library/sqlite3.rst b/Doc/library/sqlite3.rst index 314d3a58e2759a..ccb82278bdaa13 100644 --- a/Doc/library/sqlite3.rst +++ b/Doc/library/sqlite3.rst @@ -928,7 +928,7 @@ a class like this:: self.x, self.y = x, y Now you want to store the point in a single SQLite column. First you'll have to -choose one of the supported types first to be used for representing the point. +choose one of the supported types to be used for representing the point. Let's just use str and separate the coordinates using a semicolon. Then you need to give your class a method ``__conform__(self, protocol)`` which must return the converted value. The parameter *protocol* will be :class:`PrepareProtocol`. diff --git a/Doc/library/types.rst b/Doc/library/types.rst index 4cb91c1a90bcfc..cdddb46783a470 100644 --- a/Doc/library/types.rst +++ b/Doc/library/types.rst @@ -329,6 +329,12 @@ Standard names are defined for the following types: Return a new view of the underlying mapping's values. + .. describe:: reversed(proxy) + + Return a reverse iterator over the keys of the underlying mapping. + + .. versionadded:: 3.9 + Additional Utility Classes and Functions ---------------------------------------- @@ -373,7 +379,7 @@ Additional Utility Classes and Functions class's __getattr__ method; this is done by raising AttributeError. This allows one to have properties active on an instance, and have virtual - attributes on the class with the same name (see Enum for an example). + attributes on the class with the same name (see :class:`enum.Enum` for an example). .. versionadded:: 3.4 diff --git a/Doc/tools/extensions/asdl_highlight.py b/Doc/tools/extensions/asdl_highlight.py new file mode 100644 index 00000000000000..7d2ef011c1b766 --- /dev/null +++ b/Doc/tools/extensions/asdl_highlight.py @@ -0,0 +1,51 @@ +import os +import sys +sys.path.append(os.path.abspath("../Parser/")) + +from pygments.lexer import RegexLexer, bygroups, include, words +from pygments.token import (Comment, Generic, Keyword, Name, Operator, + Punctuation, Text) + +from asdl import builtin_types +from sphinx.highlighting import lexers + +class ASDLLexer(RegexLexer): + name = "ASDL" + aliases = ["asdl"] + filenames = ["*.asdl"] + _name = r"([^\W\d]\w*)" + _text_ws = r"(\s*)" + + tokens = { + "ws": [ + (r"\n", Text), + (r"\s+", Text), + (r"--.*?$", Comment.Singleline), + ], + "root": [ + include("ws"), + ( + r"(module)" + _text_ws + _name, + bygroups(Keyword, Text, Name.Tag), + ), + ( + r"(\w+)(\*\s|\?\s|\s)(\w+)", + bygroups(Name.Builtin.Pseudo, Operator, Name), + ), + (words(builtin_types), Name.Builtin), + (r"attributes", Name.Builtin), + ( + _name + _text_ws + "(=)", + bygroups(Name, Text, Operator), + ), + (_name, Name.Class), + (r"\|", Operator), + (r"{|}|\(|\)", Punctuation), + (r".", Text), + ], + } + + +def setup(app): + lexers["asdl"] = ASDLLexer() + return {'version': '1.0', 'parallel_read_safe': True} diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index 6d2b0d905ff06e..fdfc0a8f472cd6 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -428,8 +428,8 @@ Other Language Changes lastname, *members = family.split() return lastname.upper(), *members - >>> parse('simpsons homer marge bart lisa sally') - ('SIMPSONS', 'homer', 'marge', 'bart', 'lisa', 'sally') + >>> parse('simpsons homer marge bart lisa maggie') + ('SIMPSONS', 'homer', 'marge', 'bart', 'lisa', 'maggie') (Contributed by David Cuthbert and Jordan Chapman in :issue:`32117`.) diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index ac5331e0fdf7ea..1fdcc0740c4ee2 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -245,6 +245,16 @@ that schedules a shutdown for the default executor that waits on the Added :class:`asyncio.PidfdChildWatcher`, a Linux-specific child watcher implementation that polls process file descriptors. (:issue:`38692`) +compileall +---------- + +Added new possibility to use hardlinks for duplicated ``.pyc`` files: *hardlink_dupes* parameter and --hardlink-dupes command line option. +(Contributed by Lumír 'Frenzy' Balhar in :issue:`40495`.) + +Added new options for path manipulation in resulting ``.pyc`` files: *stripdir*, *prependdir*, *limit_sl_dest* parameters and -s, -p, -e command line options. +Added the possibility to specify the option for an optimization level multiple times. +(Contributed by Lumír 'Frenzy' Balhar in :issue:`38112`.) + concurrent.futures ------------------ @@ -781,6 +791,12 @@ Changes in the Python API ``PyCF_ALLOW_TOP_LEVEL_AWAIT`` was clashing with ``CO_FUTURE_DIVISION``. (Contributed by Batuhan Taskaya in :issue:`39562`) +* ``array('u')`` now uses ``wchar_t`` as C type instead of ``Py_UNICODE``. + This change doesn't affect to its behavior because ``Py_UNICODE`` is alias + of ``wchar_t`` since Python 3.3. + (Contributed by Inada Naoki in :issue:`34538`.) + + CPython bytecode changes ------------------------ @@ -953,3 +969,6 @@ Removed * ``PyTuple_ClearFreeList()`` * ``PyUnicode_ClearFreeList()``: the Unicode free list has been removed in Python 3.3. + +* Remove ``_PyUnicode_ClearStaticStrings()`` function. + (Contributed by Victor Stinner in :issue:`39465`.) diff --git a/Grammar/python.gram b/Grammar/python.gram index 0ce6ab4b4ba908..cca92090546265 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -82,19 +82,19 @@ compound_stmt[stmt_ty]: | &'while' while_stmt # NOTE: annotated_rhs may start with 'yield'; yield_expr must start with 'yield' -assignment: +assignment[stmt_ty]: | a=NAME ':' b=expression c=['=' d=annotated_rhs { d }] { CHECK_VERSION( 6, "Variable annotation syntax is", _Py_AnnAssign(CHECK(_PyPegen_set_expr_context(p, a, Store)), b, c, 1, EXTRA) ) } - | a=('(' b=inside_paren_ann_assign_target ')' { b } - | ann_assign_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] { + | a=('(' b=single_target ')' { b } + | single_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] { CHECK_VERSION(6, "Variable annotations syntax is", _Py_AnnAssign(a, b, c, 0, EXTRA)) } | a=(z=star_targets '=' { z })+ b=(yield_expr | star_expressions) tc=[TYPE_COMMENT] { _Py_Assign(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } - | a=target b=augassign c=(yield_expr | star_expressions) { + | a=single_target b=augassign c=(yield_expr | star_expressions) { _Py_AugAssign(a, b->kind, c, EXTRA) } | invalid_assignment @@ -170,11 +170,11 @@ for_stmt[stmt_ty]: CHECK_VERSION(5, "Async for loops are", _Py_AsyncFor(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA)) } with_stmt[stmt_ty]: - | 'with' '(' a=','.with_item+ ')' ':' b=block { + | 'with' '(' a=','.with_item+ ','? ')' ':' b=block { _Py_With(a, b, NULL, EXTRA) } | 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { _Py_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } - | ASYNC 'with' '(' a=','.with_item+ ')' ':' b=block { + | ASYNC 'with' '(' a=','.with_item+ ','? ')' ':' b=block { CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NULL, EXTRA)) } | ASYNC 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA)) } @@ -185,7 +185,7 @@ try_stmt[stmt_ty]: | 'try' ':' b=block f=finally_block { _Py_Try(b, NULL, NULL, f, EXTRA) } | 'try' ':' b=block ex=except_block+ el=[else_block] f=[finally_block] { _Py_Try(b, ex, el, f, EXTRA) } except_block[excepthandler_ty]: - | 'except' e=expression t=['as' z=target { z }] ':' b=block { + | 'except' e=expression t=['as' z=NAME { z }] ':' b=block { _Py_ExceptHandler(e, (t) ? ((expr_ty) t)->v.Name.id : NULL, b, EXTRA) } | 'except' ':' b=block { _Py_ExceptHandler(NULL, NULL, b, EXTRA) } finally_block[asdl_seq*]: 'finally' ':' a=block { a } @@ -548,10 +548,12 @@ kwarg_or_starred[KeywordOrStarred*]: | a=NAME '=' b=expression { _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(a->v.Name.id, b, EXTRA)), 1) } | a=starred_expression { _PyPegen_keyword_or_starred(p, a, 0) } + | invalid_kwarg kwarg_or_double_starred[KeywordOrStarred*]: | a=NAME '=' b=expression { _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(a->v.Name.id, b, EXTRA)), 1) } | '**' a=expression { _PyPegen_keyword_or_starred(p, CHECK(_Py_keyword(NULL, a, EXTRA)), 1) } + | invalid_kwarg # NOTE: star_targets may contain *bitwise_or, targets may not. star_targets[expr_ty]: @@ -571,25 +573,28 @@ star_atom[expr_ty]: | '(' a=[star_targets_seq] ')' { _Py_Tuple(a, Store, EXTRA) } | '[' a=[star_targets_seq] ']' { _Py_List(a, Store, EXTRA) } -inside_paren_ann_assign_target[expr_ty]: - | ann_assign_subscript_attribute_target +single_target[expr_ty]: + | single_subscript_attribute_target | a=NAME { _PyPegen_set_expr_context(p, a, Store) } - | '(' a=inside_paren_ann_assign_target ')' { a } - -ann_assign_subscript_attribute_target[expr_ty]: + | '(' a=single_target ')' { a } +single_subscript_attribute_target[expr_ty]: | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Store, EXTRA) } | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) } del_targets[asdl_seq*]: a=','.del_target+ [','] { a } +# The lookaheads to del_target_end ensure that we don't match expressions where a prefix of the +# expression matches our rule, thereby letting these cases fall through to invalid_del_target. del_target[expr_ty] (memo): - | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Del, EXTRA) } - | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Del, EXTRA) } + | a=t_primary '.' b=NAME &del_target_end { _Py_Attribute(a, b->v.Name.id, Del, EXTRA) } + | a=t_primary '[' b=slices ']' &del_target_end { _Py_Subscript(a, b, Del, EXTRA) } | del_t_atom del_t_atom[expr_ty]: - | a=NAME { _PyPegen_set_expr_context(p, a, Del) } + | a=NAME &del_target_end { _PyPegen_set_expr_context(p, a, Del) } | '(' a=del_target ')' { _PyPegen_set_expr_context(p, a, Del) } | '(' a=[del_targets] ')' { _Py_Tuple(a, Del, EXTRA) } | '[' a=[del_targets] ']' { _Py_List(a, Del, EXTRA) } + | invalid_del_target +del_target_end: ')' | ']' | ',' | ';' | NEWLINE targets[asdl_seq*]: a=','.target+ [','] { a } target[expr_ty] (memo): @@ -617,24 +622,40 @@ t_atom[expr_ty]: # From here on, there are rules for invalid syntax with specialised error messages incorrect_arguments: | args ',' '*' { RAISE_SYNTAX_ERROR("iterable argument unpacking follows keyword argument unpacking") } - | expression for_if_clauses ',' [args | expression for_if_clauses] { - RAISE_SYNTAX_ERROR("Generator expression must be parenthesized") } + | a=expression for_if_clauses ',' [args | expression for_if_clauses] { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "Generator expression must be parenthesized") } | a=args ',' args { _PyPegen_arguments_parsing_error(p, a) } +invalid_kwarg: + | a=expression '=' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, "expression cannot contain assignment, perhaps you meant \"==\"?") } invalid_named_expression: | a=expression ':=' expression { - RAISE_SYNTAX_ERROR("cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) } + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, "cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) } invalid_assignment: - | list ':' { RAISE_SYNTAX_ERROR("only single target (not list) can be annotated") } - | tuple ':' { RAISE_SYNTAX_ERROR("only single target (not tuple) can be annotated") } - | expression ':' expression ['=' annotated_rhs] { - RAISE_SYNTAX_ERROR("illegal target for annotation") } - | a=expression ('=' | augassign) (yield_expr | star_expressions) { - RAISE_SYNTAX_ERROR_NO_COL_OFFSET("cannot assign to %s", _PyPegen_get_expr_name(a)) } + | a=list ':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not list) can be annotated") } + | a=tuple ':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } + | a=star_named_expression ',' star_named_expressions* ':' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } + | a=expression ':' expression ['=' annotated_rhs] { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") } + | a=star_expressions '=' (yield_expr | star_expressions) { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + _PyPegen_get_invalid_target(a), + "cannot assign to %s", _PyPegen_get_expr_name(_PyPegen_get_invalid_target(a))) } + | a=star_expressions augassign (yield_expr | star_expressions) { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, + "'%s' is an illegal expression for augmented assignment", + _PyPegen_get_expr_name(a) + )} + invalid_block: | NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block") } invalid_comprehension: - | ('[' | '(' | '{') '*' expression for_if_clauses { - RAISE_SYNTAX_ERROR("iterable unpacking cannot be used in comprehension") } + | ('[' | '(' | '{') a=starred_expression for_if_clauses { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "iterable unpacking cannot be used in comprehension") } invalid_parameters: | param_no_default* (slash_with_default | param_with_default+) param_no_default { RAISE_SYNTAX_ERROR("non-default argument follows default argument") } @@ -645,3 +666,6 @@ invalid_lambda_star_etc: invalid_double_type_comments: | TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT { RAISE_SYNTAX_ERROR("Cannot have two type comments on def") } +invalid_del_target: + | a=star_expression &del_target_end { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot delete %s", _PyPegen_get_expr_name(a)) } diff --git a/Include/asdl.h b/Include/asdl.h index 549df2ace7555e..e962560bcd4cbe 100644 --- a/Include/asdl.h +++ b/Include/asdl.h @@ -4,9 +4,7 @@ typedef PyObject * identifier; typedef PyObject * string; -typedef PyObject * bytes; typedef PyObject * object; -typedef PyObject * singleton; typedef PyObject * constant; /* It would be nice if the code generated by asdl_c.py was completely diff --git a/Include/cpython/methodobject.h b/Include/cpython/methodobject.h new file mode 100644 index 00000000000000..7ecbfe3b5e2fe8 --- /dev/null +++ b/Include/cpython/methodobject.h @@ -0,0 +1,35 @@ +#ifndef Py_CPYTHON_METHODOBJECT_H +# error "this header file must not be included directly" +#endif + +PyAPI_DATA(PyTypeObject) PyCMethod_Type; + +#define PyCMethod_CheckExact(op) Py_IS_TYPE(op, &PyCMethod_Type) +#define PyCMethod_Check(op) PyObject_TypeCheck(op, &PyCMethod_Type) + +/* Macros for direct access to these values. Type checks are *not* + done, so use with care. */ +#define PyCFunction_GET_FUNCTION(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_meth) +#define PyCFunction_GET_SELF(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \ + NULL : ((PyCFunctionObject *)func) -> m_self) +#define PyCFunction_GET_FLAGS(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_flags) +#define PyCFunction_GET_CLASS(func) \ + (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_METHOD ? \ + ((PyCMethodObject *)func) -> mm_class : NULL) + +typedef struct { + PyObject_HEAD + PyMethodDef *m_ml; /* Description of the C function to call */ + PyObject *m_self; /* Passed as 'self' arg to the C func, can be NULL */ + PyObject *m_module; /* The __module__ attribute, can be anything */ + PyObject *m_weakreflist; /* List of weak references */ + vectorcallfunc vectorcall; +} PyCFunctionObject; + +typedef struct { + PyCFunctionObject func; + PyTypeObject *mm_class; /* Class that defines this method */ +} PyCMethodObject; diff --git a/Include/cpython/object.h b/Include/cpython/object.h index 45da752ed2e941..444f832f5bd8d3 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -36,7 +36,7 @@ PyAPI_FUNC(Py_ssize_t) _Py_GetRefTotal(void); PyId_foo is a static variable, either on block level or file level. On first usage, the string "foo" is interned, and the structures are linked. On interpreter - shutdown, all strings are released (through _PyUnicode_ClearStaticStrings). + shutdown, all strings are released. Alternatively, _Py_static_string allows choosing the variable name. _PyUnicode_FromId returns a borrowed reference to the interned string. @@ -289,6 +289,7 @@ typedef struct _heaptypeobject { PyBufferProcs as_buffer; PyObject *ht_name, *ht_slots, *ht_qualname; struct _dictkeysobject *ht_cached_keys; + PyObject *ht_module; /* here are optional user slots, followed by the members. */ } PyHeapTypeObject; diff --git a/Include/cpython/pyerrors.h b/Include/cpython/pyerrors.h index cdd052026c1ba2..dd3c2caa0cc043 100644 --- a/Include/cpython/pyerrors.h +++ b/Include/cpython/pyerrors.h @@ -75,7 +75,7 @@ typedef PyOSErrorObject PyWindowsErrorObject; /* Error handling definitions */ PyAPI_FUNC(void) _PyErr_SetKeyError(PyObject *); -_PyErr_StackItem *_PyErr_GetTopmostException(PyThreadState *tstate); +PyAPI_FUNC(_PyErr_StackItem*) _PyErr_GetTopmostException(PyThreadState *tstate); PyAPI_FUNC(void) _PyErr_GetExcInfo(PyThreadState *, PyObject **, PyObject **, PyObject **); /* Context manipulation (PEP 3134) */ diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 81a35cdc801d09..4fd674ffea36ea 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -1215,13 +1215,13 @@ Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE*) PyUnicode_AsUnicodeCopy( /* Return an interned Unicode object for an Identifier; may fail if there is no memory.*/ PyAPI_FUNC(PyObject*) _PyUnicode_FromId(_Py_Identifier*); -/* Clear all static strings. */ -PyAPI_FUNC(void) _PyUnicode_ClearStaticStrings(void); /* Fast equality check when the inputs are known to be exact unicode types and where the hash values are equal (i.e. a very probable match) */ PyAPI_FUNC(int) _PyUnicode_EQ(PyObject *, PyObject *); +PyAPI_FUNC(Py_ssize_t) _PyUnicode_ScanIdentifier(PyObject *); + #ifdef __cplusplus } #endif diff --git a/Include/errcode.h b/Include/errcode.h index b37cd261d5ec4d..790518b8b7730e 100644 --- a/Include/errcode.h +++ b/Include/errcode.h @@ -29,7 +29,6 @@ extern "C" { #define E_EOFS 23 /* EOF in triple-quoted string */ #define E_EOLS 24 /* EOL in single-quoted string */ #define E_LINECONT 25 /* Unexpected characters after a line continuation */ -#define E_IDENTIFIER 26 /* Invalid characters in identifier */ #define E_BADSINGLE 27 /* Ill-formed single statement input */ #ifdef __cplusplus diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 2df796deade3a9..368990099089fe 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -50,7 +50,11 @@ extern PyObject *_PyEval_EvalCode( PyObject *kwdefs, PyObject *closure, PyObject *name, PyObject *qualname); +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +extern int _PyEval_ThreadsInitialized(PyInterpreterState *interp); +#else extern int _PyEval_ThreadsInitialized(struct pyruntimestate *runtime); +#endif extern PyStatus _PyEval_InitGIL(PyThreadState *tstate); extern void _PyEval_FiniGIL(PyThreadState *tstate); @@ -65,12 +69,12 @@ PyAPI_DATA(int) _Py_CheckRecursionLimit; /* With USE_STACKCHECK macro defined, trigger stack checks in _Py_CheckRecursiveCall() on every 64th call to Py_EnterRecursiveCall. */ static inline int _Py_MakeRecCheck(PyThreadState *tstate) { - return (++tstate->recursion_depth > _Py_CheckRecursionLimit + return (++tstate->recursion_depth > tstate->interp->ceval.recursion_limit || ++tstate->stackcheck_counter > 64); } #else static inline int _Py_MakeRecCheck(PyThreadState *tstate) { - return (++tstate->recursion_depth > _Py_CheckRecursionLimit); + return (++tstate->recursion_depth > tstate->interp->ceval.recursion_limit); } #endif @@ -90,20 +94,22 @@ static inline int _Py_EnterRecursiveCall_inline(const char *where) { #define Py_EnterRecursiveCall(where) _Py_EnterRecursiveCall_inline(where) - /* Compute the "lower-water mark" for a recursion limit. When * Py_LeaveRecursiveCall() is called with a recursion depth below this mark, * the overflowed flag is reset to 0. */ -#define _Py_RecursionLimitLowerWaterMark(limit) \ - (((limit) > 200) \ - ? ((limit) - 50) \ - : (3 * ((limit) >> 2))) - -#define _Py_MakeEndRecCheck(x) \ - (--(x) < _Py_RecursionLimitLowerWaterMark(_Py_CheckRecursionLimit)) +static inline int _Py_RecursionLimitLowerWaterMark(int limit) { + if (limit > 200) { + return (limit - 50); + } + else { + return (3 * (limit >> 2)); + } +} static inline void _Py_LeaveRecursiveCall(PyThreadState *tstate) { - if (_Py_MakeEndRecCheck(tstate->recursion_depth)) { + tstate->recursion_depth--; + int limit = tstate->interp->ceval.recursion_limit; + if (tstate->recursion_depth < _Py_RecursionLimitLowerWaterMark(limit)) { tstate->overflowed = 0; } } diff --git a/Include/internal/pycore_hashtable.h b/Include/internal/pycore_hashtable.h new file mode 100644 index 00000000000000..18757abc28c195 --- /dev/null +++ b/Include/internal/pycore_hashtable.h @@ -0,0 +1,148 @@ +#ifndef Py_INTERNAL_HASHTABLE_H +#define Py_INTERNAL_HASHTABLE_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +/* Single linked list */ + +typedef struct _Py_slist_item_s { + struct _Py_slist_item_s *next; +} _Py_slist_item_t; + +typedef struct { + _Py_slist_item_t *head; +} _Py_slist_t; + +#define _Py_SLIST_ITEM_NEXT(ITEM) (((_Py_slist_item_t *)ITEM)->next) + +#define _Py_SLIST_HEAD(SLIST) (((_Py_slist_t *)SLIST)->head) + + +/* _Py_hashtable: table entry */ + +typedef struct { + /* used by _Py_hashtable_t.buckets to link entries */ + _Py_slist_item_t _Py_slist_item; + + Py_uhash_t key_hash; + void *key; + void *value; +} _Py_hashtable_entry_t; + + +/* _Py_hashtable: prototypes */ + +/* Forward declaration */ +struct _Py_hashtable_t; +typedef struct _Py_hashtable_t _Py_hashtable_t; + +typedef Py_uhash_t (*_Py_hashtable_hash_func) (const void *key); +typedef int (*_Py_hashtable_compare_func) (const void *key1, const void *key2); +typedef void (*_Py_hashtable_destroy_func) (void *key); +typedef _Py_hashtable_entry_t* (*_Py_hashtable_get_entry_func)(_Py_hashtable_t *ht, + const void *key); + +typedef struct { + // Allocate a memory block + void* (*malloc) (size_t size); + + // Release a memory block + void (*free) (void *ptr); +} _Py_hashtable_allocator_t; + + +/* _Py_hashtable: table */ +struct _Py_hashtable_t { + size_t nentries; // Total number of entries in the table + size_t nbuckets; + _Py_slist_t *buckets; + + _Py_hashtable_get_entry_func get_entry_func; + _Py_hashtable_hash_func hash_func; + _Py_hashtable_compare_func compare_func; + _Py_hashtable_destroy_func key_destroy_func; + _Py_hashtable_destroy_func value_destroy_func; + _Py_hashtable_allocator_t alloc; +}; + +/* Hash a pointer (void*) */ +PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr(const void *key); + +/* Comparison using memcmp() */ +PyAPI_FUNC(int) _Py_hashtable_compare_direct( + const void *key1, + const void *key2); + +PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new( + _Py_hashtable_hash_func hash_func, + _Py_hashtable_compare_func compare_func); + +PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new_full( + _Py_hashtable_hash_func hash_func, + _Py_hashtable_compare_func compare_func, + _Py_hashtable_destroy_func key_destroy_func, + _Py_hashtable_destroy_func value_destroy_func, + _Py_hashtable_allocator_t *allocator); + +PyAPI_FUNC(void) _Py_hashtable_destroy(_Py_hashtable_t *ht); + +PyAPI_FUNC(void) _Py_hashtable_clear(_Py_hashtable_t *ht); + +typedef int (*_Py_hashtable_foreach_func) (_Py_hashtable_t *ht, + const void *key, const void *value, + void *user_data); + +/* Call func() on each entry of the hashtable. + Iteration stops if func() result is non-zero, in this case it's the result + of the call. Otherwise, the function returns 0. */ +PyAPI_FUNC(int) _Py_hashtable_foreach( + _Py_hashtable_t *ht, + _Py_hashtable_foreach_func func, + void *user_data); + +PyAPI_FUNC(size_t) _Py_hashtable_size(const _Py_hashtable_t *ht); + +/* Add a new entry to the hash. The key must not be present in the hash table. + Return 0 on success, -1 on memory error. */ +PyAPI_FUNC(int) _Py_hashtable_set( + _Py_hashtable_t *ht, + const void *key, + void *value); + + +/* Get an entry. + Return NULL if the key does not exist. */ +static inline _Py_hashtable_entry_t * +_Py_hashtable_get_entry(_Py_hashtable_t *ht, const void *key) +{ + return ht->get_entry_func(ht, key); +} + + +/* Get value from an entry. + Return NULL if the entry is not found. + + Use _Py_hashtable_get_entry() to distinguish entry value equal to NULL + and entry not found. */ +PyAPI_FUNC(void*) _Py_hashtable_get(_Py_hashtable_t *ht, const void *key); + + +/* Remove a key and its associated value without calling key and value destroy + functions. + + Return the removed value if the key was found. + Return NULL if the key was not found. */ +PyAPI_FUNC(void*) _Py_hashtable_steal( + _Py_hashtable_t *ht, + const void *key); + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_HASHTABLE_H */ diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 6e9937caa9dbff..f04ea330d04571 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -33,6 +33,7 @@ struct _pending_calls { }; struct _ceval_state { + int recursion_limit; /* Records whether tracing is on for any thread. Counts the number of threads for which tstate->c_tracefunc is non-NULL, so if the value is 0, we know we don't have to check this thread's @@ -42,7 +43,25 @@ struct _ceval_state { /* This single variable consolidates all requests to break out of the fast path in the eval loop. */ _Py_atomic_int eval_breaker; + /* Request for dropping the GIL */ + _Py_atomic_int gil_drop_request; struct _pending_calls pending; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state gil; +#endif +}; + +/* fs_codec.encoding is initialized to NULL. + Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */ +struct _Py_unicode_fs_codec { + char *encoding; // Filesystem encoding (encoded to UTF-8) + int utf8; // encoding=="utf-8"? + char *errors; // Filesystem errors (encoded to UTF-8) + _Py_error_handler error_handler; +}; + +struct _Py_unicode_state { + struct _Py_unicode_fs_codec fs_codec; }; @@ -91,14 +110,7 @@ struct _is { PyObject *codec_error_registry; int codecs_initialized; - /* fs_codec.encoding is initialized to NULL. - Later, it is set to a non-NULL string by _PyUnicode_InitEncodings(). */ - struct { - char *encoding; /* Filesystem encoding (encoded to UTF-8) */ - int utf8; /* encoding=="utf-8"? */ - char *errors; /* Filesystem errors (encoded to UTF-8) */ - _Py_error_handler error_handler; - } fs_codec; + struct _Py_unicode_state unicode; PyConfig config; #ifdef HAVE_DLOPEN diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index 18203e30f5cfe3..3d925e2250d252 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -88,17 +88,12 @@ struct _PyTraceMalloc_Config { /* limit of the number of frames in a traceback, 1 by default. Variable protected by the GIL. */ int max_nframe; - - /* use domain in trace key? - Variable protected by the GIL. */ - int use_domain; }; #define _PyTraceMalloc_Config_INIT \ {.initialized = TRACEMALLOC_NOT_INITIALIZED, \ .tracing = 0, \ - .max_nframe = 1, \ - .use_domain = 0} + .max_nframe = 1} PyAPI_DATA(struct _PyTraceMalloc_Config) _Py_tracemalloc_config; diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index c82e8db905188c..d96ba31207001a 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -49,8 +49,18 @@ _Py_ThreadCanHandlePendingCalls(void) /* Variable and macro for in-line access to current thread and interpreter state */ -static inline PyThreadState* _PyRuntimeState_GetThreadState(_PyRuntimeState *runtime) { +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +PyAPI_FUNC(PyThreadState*) _PyThreadState_GetTSS(void); +#endif + +static inline PyThreadState* +_PyRuntimeState_GetThreadState(_PyRuntimeState *runtime) +{ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + return _PyThreadState_GetTSS(); +#else return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->gilstate.tstate_current); +#endif } /* Get the current Python thread state. @@ -62,8 +72,14 @@ static inline PyThreadState* _PyRuntimeState_GetThreadState(_PyRuntimeState *run The caller must hold the GIL. See also PyThreadState_Get() and PyThreadState_GET(). */ -static inline PyThreadState *_PyThreadState_GET(void) { +static inline PyThreadState* +_PyThreadState_GET(void) +{ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + return _PyThreadState_GetTSS(); +#else return _PyRuntimeState_GetThreadState(&_PyRuntime); +#endif } /* Redefine PyThreadState_GET() as an alias to _PyThreadState_GET() */ diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 995fe231c32149..ebdc12b23a9ca6 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -14,12 +14,14 @@ extern "C" { /* ceval state */ struct _ceval_runtime_state { - int recursion_limit; - /* Request for dropping the GIL */ - _Py_atomic_int gil_drop_request; - /* Request for checking signals. */ + /* Request for checking signals. It is shared by all interpreters (see + bpo-40513). Any thread of any interpreter can receive a signal, but only + the main thread of the main interpreter can handle signals: see + _Py_ThreadCanHandleSignals(). */ _Py_atomic_int signals_pending; +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS struct _gil_runtime_state gil; +#endif }; /* GIL state */ diff --git a/Include/methodobject.h b/Include/methodobject.h index adb2d9e884fbb0..12e049b4043ba5 100644 --- a/Include/methodobject.h +++ b/Include/methodobject.h @@ -13,7 +13,8 @@ extern "C" { PyAPI_DATA(PyTypeObject) PyCFunction_Type; -#define PyCFunction_Check(op) Py_IS_TYPE(op, &PyCFunction_Type) +#define PyCFunction_CheckExact(op) Py_IS_TYPE(op, &PyCFunction_Type) +#define PyCFunction_Check(op) PyObject_TypeCheck(op, &PyCFunction_Type) typedef PyObject *(*PyCFunction)(PyObject *, PyObject *); typedef PyObject *(*_PyCFunctionFast) (PyObject *, PyObject *const *, Py_ssize_t); @@ -22,21 +23,13 @@ typedef PyObject *(*PyCFunctionWithKeywords)(PyObject *, PyObject *, typedef PyObject *(*_PyCFunctionFastWithKeywords) (PyObject *, PyObject *const *, Py_ssize_t, PyObject *); +typedef PyObject *(*PyCMethod)(PyObject *, PyTypeObject *, PyObject *const *, + size_t, PyObject *); + PyAPI_FUNC(PyCFunction) PyCFunction_GetFunction(PyObject *); PyAPI_FUNC(PyObject *) PyCFunction_GetSelf(PyObject *); PyAPI_FUNC(int) PyCFunction_GetFlags(PyObject *); -/* Macros for direct access to these values. Type checks are *not* - done, so use with care. */ -#ifndef Py_LIMITED_API -#define PyCFunction_GET_FUNCTION(func) \ - (((PyCFunctionObject *)func) -> m_ml -> ml_meth) -#define PyCFunction_GET_SELF(func) \ - (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \ - NULL : ((PyCFunctionObject *)func) -> m_self) -#define PyCFunction_GET_FLAGS(func) \ - (((PyCFunctionObject *)func) -> m_ml -> ml_flags) -#endif Py_DEPRECATED(3.9) PyAPI_FUNC(PyObject *) PyCFunction_Call(PyObject *, PyObject *, PyObject *); struct PyMethodDef { @@ -52,6 +45,13 @@ typedef struct PyMethodDef PyMethodDef; PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, PyObject *); +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000 +#define PyCFunction_NewEx(ML, SELF, MOD) PyCMethod_New((ML), (SELF), (MOD), NULL) +PyAPI_FUNC(PyObject *) PyCMethod_New(PyMethodDef *, PyObject *, + PyObject *, PyTypeObject *); +#endif + + /* Flag passed to newmethodobject */ /* #define METH_OLDARGS 0x0000 -- unsupported now */ #define METH_VARARGS 0x0001 @@ -84,15 +84,24 @@ PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, #define METH_STACKLESS 0x0000 #endif +/* METH_METHOD means the function stores an + * additional reference to the class that defines it; + * both self and class are passed to it. + * It uses PyCMethodObject instead of PyCFunctionObject. + * May not be combined with METH_NOARGS, METH_O, METH_CLASS or METH_STATIC. + */ + +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000 +#define METH_METHOD 0x0200 +#endif + + #ifndef Py_LIMITED_API -typedef struct { - PyObject_HEAD - PyMethodDef *m_ml; /* Description of the C function to call */ - PyObject *m_self; /* Passed as 'self' arg to the C func, can be NULL */ - PyObject *m_module; /* The __module__ attribute, can be anything */ - PyObject *m_weakreflist; /* List of weak references */ - vectorcallfunc vectorcall; -} PyCFunctionObject; + +#define Py_CPYTHON_METHODOBJECT_H +#include "cpython/methodobject.h" +#undef Py_CPYTHON_METHODOBJECT_H + #endif #ifdef __cplusplus diff --git a/Include/object.h b/Include/object.h index 6c30809124dea8..514d934196f571 100644 --- a/Include/object.h +++ b/Include/object.h @@ -213,6 +213,11 @@ PyAPI_FUNC(PyObject*) PyType_FromSpecWithBases(PyType_Spec*, PyObject*); #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03040000 PyAPI_FUNC(void*) PyType_GetSlot(PyTypeObject*, int); #endif +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03090000 +PyAPI_FUNC(PyObject*) PyType_FromModuleAndSpec(PyObject *, PyType_Spec *, PyObject *); +PyAPI_FUNC(PyObject *) PyType_GetModule(struct _typeobject *); +PyAPI_FUNC(void *) PyType_GetModuleState(struct _typeobject *); +#endif /* Generic type check */ PyAPI_FUNC(int) PyType_IsSubtype(PyTypeObject *, PyTypeObject *); diff --git a/Include/pydebug.h b/Include/pydebug.h index bd4aafe3b49f83..78bcb118be4659 100644 --- a/Include/pydebug.h +++ b/Include/pydebug.h @@ -5,8 +5,6 @@ extern "C" { #endif -/* These global variable are defined in pylifecycle.c */ -/* XXX (ncoghlan): move these declarations to pylifecycle.h? */ PyAPI_DATA(int) Py_DebugFlag; PyAPI_DATA(int) Py_VerboseFlag; PyAPI_DATA(int) Py_QuietFlag; diff --git a/Include/pyhash.h b/Include/pyhash.h index 2f398589cee7ef..4437b870332bde 100644 --- a/Include/pyhash.h +++ b/Include/pyhash.h @@ -9,6 +9,8 @@ extern "C" { #ifndef Py_LIMITED_API PyAPI_FUNC(Py_hash_t) _Py_HashDouble(double); PyAPI_FUNC(Py_hash_t) _Py_HashPointer(const void*); +// Similar to _Py_HashPointer(), but don't replace -1 with -2 +PyAPI_FUNC(Py_hash_t) _Py_HashPointerRaw(const void*); PyAPI_FUNC(Py_hash_t) _Py_HashBytes(const void*, Py_ssize_t); #endif diff --git a/Include/pystate.h b/Include/pystate.h index 34cad02c3a930d..bae440778b261a 100644 --- a/Include/pystate.h +++ b/Include/pystate.h @@ -18,7 +18,7 @@ struct _is; /* struct _ts is defined in cpython/pystate.h */ typedef struct _ts PyThreadState; -/* struct _is is defined in internal/pycore_pystate.h */ +/* struct _is is defined in internal/pycore_interp.h */ typedef struct _is PyInterpreterState; PyAPI_FUNC(PyInterpreterState *) PyInterpreterState_New(void); diff --git a/Lib/ast.py b/Lib/ast.py index 5c68c4a66e1dd0..7a43581c0e6ce6 100644 --- a/Lib/ast.py +++ b/Lib/ast.py @@ -62,11 +62,12 @@ def literal_eval(node_or_string): node_or_string = parse(node_or_string, mode='eval') if isinstance(node_or_string, Expression): node_or_string = node_or_string.body + def _raise_malformed_node(node): + raise ValueError(f'malformed node or string: {node!r}') def _convert_num(node): - if isinstance(node, Constant): - if type(node.value) in (int, float, complex): - return node.value - raise ValueError('malformed node or string: ' + repr(node)) + if not isinstance(node, Constant) or type(node.value) not in (int, float, complex): + _raise_malformed_node(node) + return node.value def _convert_signed_num(node): if isinstance(node, UnaryOp) and isinstance(node.op, (UAdd, USub)): operand = _convert_num(node.operand) @@ -88,6 +89,8 @@ def _convert(node): node.func.id == 'set' and node.args == node.keywords == []): return set() elif isinstance(node, Dict): + if len(node.keys) != len(node.values): + _raise_malformed_node(node) return dict(zip(map(_convert, node.keys), map(_convert, node.values))) elif isinstance(node, BinOp) and isinstance(node.op, (Add, Sub)): diff --git a/Lib/codeop.py b/Lib/codeop.py index 082285f94fe847..835e68c09ba272 100644 --- a/Lib/codeop.py +++ b/Lib/codeop.py @@ -112,7 +112,8 @@ def compile_command(source, filename="", symbol="single"): source -- the source string; may contain \n characters filename -- optional filename from which source was read; default "" - symbol -- optional grammar start symbol; "single" (default) or "eval" + symbol -- optional grammar start symbol; "single" (default), "exec" + or "eval" Return value / exceptions raised: diff --git a/Lib/compileall.py b/Lib/compileall.py index abe6cffce59c5f..fe7f450c55e1c5 100644 --- a/Lib/compileall.py +++ b/Lib/compileall.py @@ -15,6 +15,7 @@ import importlib.util import py_compile import struct +import filecmp from functools import partial from pathlib import Path @@ -47,7 +48,7 @@ def _walk_dir(dir, maxlevels, quiet=0): def compile_dir(dir, maxlevels=None, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, workers=1, invalidation_mode=None, *, stripdir=None, - prependdir=None, limit_sl_dest=None): + prependdir=None, limit_sl_dest=None, hardlink_dupes=False): """Byte-compile all modules in the given directory tree. Arguments (only dir is required): @@ -70,6 +71,7 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False, after stripdir limit_sl_dest: ignore symlinks if they are pointing outside of the defined path + hardlink_dupes: hardlink duplicated pyc files """ ProcessPoolExecutor = None if ddir is not None and (stripdir is not None or prependdir is not None): @@ -104,7 +106,8 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False, invalidation_mode=invalidation_mode, stripdir=stripdir, prependdir=prependdir, - limit_sl_dest=limit_sl_dest), + limit_sl_dest=limit_sl_dest, + hardlink_dupes=hardlink_dupes), files) success = min(results, default=True) else: @@ -112,14 +115,15 @@ def compile_dir(dir, maxlevels=None, ddir=None, force=False, if not compile_file(file, ddir, force, rx, quiet, legacy, optimize, invalidation_mode, stripdir=stripdir, prependdir=prependdir, - limit_sl_dest=limit_sl_dest): + limit_sl_dest=limit_sl_dest, + hardlink_dupes=hardlink_dupes): success = False return success def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1, invalidation_mode=None, *, stripdir=None, prependdir=None, - limit_sl_dest=None): + limit_sl_dest=None, hardlink_dupes=False): """Byte-compile one file. Arguments (only fullname is required): @@ -140,6 +144,7 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, after stripdir limit_sl_dest: ignore symlinks if they are pointing outside of the defined path. + hardlink_dupes: hardlink duplicated pyc files """ if ddir is not None and (stripdir is not None or prependdir is not None): @@ -176,6 +181,14 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, if isinstance(optimize, int): optimize = [optimize] + # Use set() to remove duplicates. + # Use sorted() to create pyc files in a deterministic order. + optimize = sorted(set(optimize)) + + if hardlink_dupes and len(optimize) < 2: + raise ValueError("Hardlinking of duplicated bytecode makes sense " + "only for more than one optimization level") + if rx is not None: mo = rx.search(fullname) if mo: @@ -220,10 +233,16 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, if not quiet: print('Compiling {!r}...'.format(fullname)) try: - for opt_level, cfile in opt_cfiles.items(): + for index, opt_level in enumerate(optimize): + cfile = opt_cfiles[opt_level] ok = py_compile.compile(fullname, cfile, dfile, True, optimize=opt_level, invalidation_mode=invalidation_mode) + if index > 0 and hardlink_dupes: + previous_cfile = opt_cfiles[optimize[index - 1]] + if filecmp.cmp(cfile, previous_cfile, shallow=False): + os.unlink(cfile) + os.link(previous_cfile, cfile) except py_compile.PyCompileError as err: success = False if quiet >= 2: @@ -352,6 +371,9 @@ def main(): 'Python interpreter itself (specified by -O).')) parser.add_argument('-e', metavar='DIR', dest='limit_sl_dest', help='Ignore symlinks pointing outsite of the DIR') + parser.add_argument('--hardlink-dupes', action='store_true', + dest='hardlink_dupes', + help='Hardlink duplicated pyc files') args = parser.parse_args() compile_dests = args.compile_dest @@ -371,6 +393,10 @@ def main(): if args.opt_levels is None: args.opt_levels = [-1] + if len(args.opt_levels) == 1 and args.hardlink_dupes: + parser.error(("Hardlinking of duplicated bytecode makes sense " + "only for more than one optimization level.")) + if args.ddir is not None and ( args.stripdir is not None or args.prependdir is not None ): @@ -404,7 +430,8 @@ def main(): stripdir=args.stripdir, prependdir=args.prependdir, optimize=args.opt_levels, - limit_sl_dest=args.limit_sl_dest): + limit_sl_dest=args.limit_sl_dest, + hardlink_dupes=args.hardlink_dupes): success = False else: if not compile_dir(dest, maxlevels, args.ddir, @@ -414,7 +441,8 @@ def main(): stripdir=args.stripdir, prependdir=args.prependdir, optimize=args.opt_levels, - limit_sl_dest=args.limit_sl_dest): + limit_sl_dest=args.limit_sl_dest, + hardlink_dupes=args.hardlink_dupes): success = False return success else: diff --git a/Lib/distutils/tests/__init__.py b/Lib/distutils/tests/__init__.py index 1b939cbd5db2bf..5d2e69e3e6a8f6 100644 --- a/Lib/distutils/tests/__init__.py +++ b/Lib/distutils/tests/__init__.py @@ -15,6 +15,7 @@ import os import sys import unittest +import warnings from test.support import run_unittest @@ -22,6 +23,7 @@ def test_suite(): + old_filters = warnings.filters[:] suite = unittest.TestSuite() for fn in os.listdir(here): if fn.startswith("test") and fn.endswith(".py"): @@ -29,6 +31,10 @@ def test_suite(): __import__(modname) module = sys.modules[modname] suite.addTest(module.test_suite()) + # bpo-40055: Save/restore warnings filters to leave them unchanged. + # Importing tests imports docutils which imports pkg_resources which adds a + # warnings filter. + warnings.filters[:] = old_filters return suite diff --git a/Lib/email/contentmanager.py b/Lib/email/contentmanager.py index b904ded94c92ef..2b4b8757f46f62 100644 --- a/Lib/email/contentmanager.py +++ b/Lib/email/contentmanager.py @@ -146,13 +146,13 @@ def embedded_body(lines): return linesep.join(lines) + linesep def normal_body(lines): return b'\n'.join(lines) + b'\n' if cte==None: # Use heuristics to decide on the "best" encoding. - try: - return '7bit', normal_body(lines).decode('ascii') - except UnicodeDecodeError: - pass - if (policy.cte_type == '8bit' and - max(len(x) for x in lines) <= policy.max_line_length): - return '8bit', normal_body(lines).decode('ascii', 'surrogateescape') + if max(len(x) for x in lines) <= policy.max_line_length: + try: + return '7bit', normal_body(lines).decode('ascii') + except UnicodeDecodeError: + pass + if policy.cte_type == '8bit': + return '8bit', normal_body(lines).decode('ascii', 'surrogateescape') sniff = embedded_body(lines[:10]) sniff_qp = quoprimime.body_encode(sniff.decode('latin-1'), policy.max_line_length) diff --git a/Lib/fnmatch.py b/Lib/fnmatch.py index b98e6413295e1c..0eb1802bdb53c5 100644 --- a/Lib/fnmatch.py +++ b/Lib/fnmatch.py @@ -16,6 +16,12 @@ __all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] +# Build a thread-safe incrementing counter to help create unique regexp group +# names across calls. +from itertools import count +_nextgroupnum = count().__next__ +del count + def fnmatch(name, pat): """Test whether FILENAME matches PATTERN. @@ -77,15 +83,19 @@ def translate(pat): There is no way to quote meta-characters. """ + STAR = object() + res = [] + add = res.append i, n = 0, len(pat) - res = '' while i < n: c = pat[i] i = i+1 if c == '*': - res = res + '.*' + # compress consecutive `*` into one + if (not res) or res[-1] is not STAR: + add(STAR) elif c == '?': - res = res + '.' + add('.') elif c == '[': j = i if j < n and pat[j] == '!': @@ -95,7 +105,7 @@ def translate(pat): while j < n and pat[j] != ']': j = j+1 if j >= n: - res = res + '\\[' + add('\\[') else: stuff = pat[i:j] if '--' not in stuff: @@ -122,7 +132,52 @@ def translate(pat): stuff = '^' + stuff[1:] elif stuff[0] in ('^', '['): stuff = '\\' + stuff - res = '%s[%s]' % (res, stuff) + add(f'[{stuff}]') + else: + add(re.escape(c)) + assert i == n + + # Deal with STARs. + inp = res + res = [] + add = res.append + i, n = 0, len(inp) + # Fixed pieces at the start? + while i < n and inp[i] is not STAR: + add(inp[i]) + i += 1 + # Now deal with STAR fixed STAR fixed ... + # For an interior `STAR fixed` pairing, we want to do a minimal + # .*? match followed by `fixed`, with no possibility of backtracking. + # We can't spell that directly, but can trick it into working by matching + # .*?fixed + # in a lookahead assertion, save the matched part in a group, then + # consume that group via a backreference. If the overall match fails, + # the lookahead assertion won't try alternatives. So the translation is: + # (?=(?P.*?fixed))(?P=name) + # Group names are created as needed: g0, g1, g2, ... + # The numbers are obtained from _nextgroupnum() to ensure they're unique + # across calls and across threads. This is because people rely on the + # undocumented ability to join multiple translate() results together via + # "|" to build large regexps matching "one of many" shell patterns. + while i < n: + assert inp[i] is STAR + i += 1 + if i == n: + add(".*") + break + assert inp[i] is not STAR + fixed = [] + while i < n and inp[i] is not STAR: + fixed.append(inp[i]) + i += 1 + fixed = "".join(fixed) + if i == n: + add(".*") + add(fixed) else: - res = res + re.escape(c) - return r'(?s:%s)\Z' % res + groupnum = _nextgroupnum() + add(f"(?=(?P.*?{fixed}))(?P=g{groupnum})") + assert i == n + res = "".join(res) + return fr'(?s:{res})\Z' diff --git a/Lib/functools.py b/Lib/functools.py index f05b106b62c007..87c7d87438998b 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -10,7 +10,7 @@ # See C source code for _functools credits/copyright __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES', - 'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', + 'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce', 'TopologicalSorter', 'CycleError', 'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod', 'cached_property'] @@ -888,6 +888,15 @@ def cache_clear(): pass +################################################################################ +### cache -- simplified access to the infinity cache +################################################################################ + +def cache(user_function, /): + 'Simple lightweight unbounded cache. Sometimes called "memoize".' + return lru_cache(maxsize=None)(user_function) + + ################################################################################ ### singledispatch() - single-dispatch generic function decorator ################################################################################ diff --git a/Lib/gettext.py b/Lib/gettext.py index b98f501884b75a..77b67aef4204c9 100644 --- a/Lib/gettext.py +++ b/Lib/gettext.py @@ -46,7 +46,6 @@ # find this format documented anywhere. -import locale import os import re import sys @@ -210,6 +209,7 @@ def func(n): def _expand_lang(loc): + import locale loc = locale.normalize(loc) COMPONENT_CODESET = 1 << 0 COMPONENT_TERRITORY = 1 << 1 @@ -278,6 +278,7 @@ def lgettext(self, message): import warnings warnings.warn('lgettext() is deprecated, use gettext() instead', DeprecationWarning, 2) + import locale if self._fallback: with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'.*\blgettext\b.*', @@ -299,6 +300,7 @@ def lngettext(self, msgid1, msgid2, n): import warnings warnings.warn('lngettext() is deprecated, use ngettext() instead', DeprecationWarning, 2) + import locale if self._fallback: with warnings.catch_warnings(): warnings.filterwarnings('ignore', r'.*\blngettext\b.*', @@ -462,6 +464,7 @@ def lgettext(self, message): import warnings warnings.warn('lgettext() is deprecated, use gettext() instead', DeprecationWarning, 2) + import locale missing = object() tmsg = self._catalog.get(message, missing) if tmsg is missing: @@ -476,6 +479,7 @@ def lngettext(self, msgid1, msgid2, n): import warnings warnings.warn('lngettext() is deprecated, use ngettext() instead', DeprecationWarning, 2) + import locale try: tmsg = self._catalog[(msgid1, self.plural(n))] except KeyError: @@ -668,6 +672,7 @@ def ldgettext(domain, message): import warnings warnings.warn('ldgettext() is deprecated, use dgettext() instead', DeprecationWarning, 2) + import locale codeset = _localecodesets.get(domain) try: with warnings.catch_warnings(): @@ -695,6 +700,7 @@ def ldngettext(domain, msgid1, msgid2, n): import warnings warnings.warn('ldngettext() is deprecated, use dngettext() instead', DeprecationWarning, 2) + import locale codeset = _localecodesets.get(domain) try: with warnings.catch_warnings(): diff --git a/Lib/idlelib/zzdummy.py b/Lib/idlelib/zzdummy.py index 8084499646653d..3c4b1d23b0d379 100644 --- a/Lib/idlelib/zzdummy.py +++ b/Lib/idlelib/zzdummy.py @@ -28,7 +28,7 @@ def z_in_event(self, event): text = self.text text.undo_block_start() for line in range(1, text.index('end')): - text.insert('%d.0', ztest) + text.insert('%d.0', ztext) text.undo_block_stop() return "break" diff --git a/Lib/importlib/_common.py b/Lib/importlib/_common.py new file mode 100644 index 00000000000000..ba7cbac3c9bfda --- /dev/null +++ b/Lib/importlib/_common.py @@ -0,0 +1,72 @@ +import os +import pathlib +import zipfile +import tempfile +import functools +import contextlib + + +def from_package(package): + """ + Return a Traversable object for the given package. + + """ + spec = package.__spec__ + return from_traversable_resources(spec) or fallback_resources(spec) + + +def from_traversable_resources(spec): + """ + If the spec.loader implements TraversableResources, + directly or implicitly, it will have a ``files()`` method. + """ + with contextlib.suppress(AttributeError): + return spec.loader.files() + + +def fallback_resources(spec): + package_directory = pathlib.Path(spec.origin).parent + try: + archive_path = spec.loader.archive + rel_path = package_directory.relative_to(archive_path) + return zipfile.Path(archive_path, str(rel_path) + '/') + except Exception: + pass + return package_directory + + +@contextlib.contextmanager +def _tempfile(reader, suffix=''): + # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' + # blocks due to the need to close the temporary file to work on Windows + # properly. + fd, raw_path = tempfile.mkstemp(suffix=suffix) + try: + os.write(fd, reader()) + os.close(fd) + yield pathlib.Path(raw_path) + finally: + try: + os.remove(raw_path) + except FileNotFoundError: + pass + + +@functools.singledispatch +@contextlib.contextmanager +def as_file(path): + """ + Given a Traversable object, return that object as a + path on the local file system in a context manager. + """ + with _tempfile(path.read_bytes, suffix=path.name) as local: + yield local + + +@as_file.register(pathlib.Path) +@contextlib.contextmanager +def _(path): + """ + Degenerate behavior for pathlib.Path objects. + """ + yield path diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index b1b5ccce4bd35d..b8a9bb1a21ef77 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -14,6 +14,7 @@ _frozen_importlib_external = _bootstrap_external import abc import warnings +from typing import Protocol, runtime_checkable def _register(abstract_cls, *classes): @@ -386,3 +387,88 @@ def contents(self): _register(ResourceReader, machinery.SourceFileLoader) + + +@runtime_checkable +class Traversable(Protocol): + """ + An object with a subset of pathlib.Path methods suitable for + traversing directories and opening files. + """ + + @abc.abstractmethod + def iterdir(self): + """ + Yield Traversable objects in self + """ + + @abc.abstractmethod + def read_bytes(self): + """ + Read contents of self as bytes + """ + + @abc.abstractmethod + def read_text(self, encoding=None): + """ + Read contents of self as bytes + """ + + @abc.abstractmethod + def is_dir(self): + """ + Return True if self is a dir + """ + + @abc.abstractmethod + def is_file(self): + """ + Return True if self is a file + """ + + @abc.abstractmethod + def joinpath(self, child): + """ + Return Traversable child in self + """ + + @abc.abstractmethod + def __truediv__(self, child): + """ + Return Traversable child in self + """ + + @abc.abstractmethod + def open(self, mode='r', *args, **kwargs): + """ + mode may be 'r' or 'rb' to open as text or binary. Return a handle + suitable for reading (same as pathlib.Path.open). + + When opening as text, accepts encoding parameters such as those + accepted by io.TextIOWrapper. + """ + + @abc.abstractproperty + def name(self): + # type: () -> str + """ + The base name of this object without any parent references. + """ + + +class TraversableResources(ResourceReader): + @abc.abstractmethod + def files(self): + """Return a Traversable object for the loaded package.""" + + def open_resource(self, resource): + return self.files().joinpath(resource).open('rb') + + def resource_path(self, resource): + raise FileNotFoundError(resource) + + def is_resource(self, path): + return self.files().joinpath(path).isfile() + + def contents(self): + return (item.name for item in self.files().iterdir()) diff --git a/Lib/importlib/resources.py b/Lib/importlib/resources.py index f51886557466c7..b803a01c91d652 100644 --- a/Lib/importlib/resources.py +++ b/Lib/importlib/resources.py @@ -1,14 +1,15 @@ import os -import tempfile from . import abc as resources_abc +from . import _common +from ._common import as_file from contextlib import contextmanager, suppress from importlib import import_module from importlib.abc import ResourceLoader from io import BytesIO, TextIOWrapper from pathlib import Path from types import ModuleType -from typing import Iterable, Iterator, Optional, Union # noqa: F401 +from typing import ContextManager, Iterable, Optional, Union from typing import cast from typing.io import BinaryIO, TextIO @@ -16,7 +17,9 @@ __all__ = [ 'Package', 'Resource', + 'as_file', 'contents', + 'files', 'is_resource', 'open_binary', 'open_text', @@ -30,24 +33,23 @@ Resource = Union[str, os.PathLike] +def _resolve(name) -> ModuleType: + """If name is a string, resolve to a module.""" + if hasattr(name, '__spec__'): + return name + return import_module(name) + + def _get_package(package) -> ModuleType: """Take a package name or module object and return the module. - If a name, the module is imported. If the passed or imported module + If a name, the module is imported. If the resolved module object is not a package, raise an exception. """ - if hasattr(package, '__spec__'): - if package.__spec__.submodule_search_locations is None: - raise TypeError('{!r} is not a package'.format( - package.__spec__.name)) - else: - return package - else: - module = import_module(package) - if module.__spec__.submodule_search_locations is None: - raise TypeError('{!r} is not a package'.format(package)) - else: - return module + module = _resolve(package) + if module.__spec__.submodule_search_locations is None: + raise TypeError('{!r} is not a package'.format(package)) + return module def _normalize_path(path) -> str: @@ -58,8 +60,7 @@ def _normalize_path(path) -> str: parent, file_name = os.path.split(path) if parent: raise ValueError('{!r} must be only a file name'.format(path)) - else: - return file_name + return file_name def _get_resource_reader( @@ -88,8 +89,8 @@ def open_binary(package: Package, resource: Resource) -> BinaryIO: reader = _get_resource_reader(package) if reader is not None: return reader.open_resource(resource) - _check_location(package) - absolute_package_path = os.path.abspath(package.__spec__.origin) + absolute_package_path = os.path.abspath( + package.__spec__.origin or 'non-existent file') package_path = os.path.dirname(absolute_package_path) full_path = os.path.join(package_path, resource) try: @@ -108,8 +109,7 @@ def open_binary(package: Package, resource: Resource) -> BinaryIO: message = '{!r} resource not found in {!r}'.format( resource, package_name) raise FileNotFoundError(message) - else: - return BytesIO(data) + return BytesIO(data) def open_text(package: Package, @@ -117,39 +117,12 @@ def open_text(package: Package, encoding: str = 'utf-8', errors: str = 'strict') -> TextIO: """Return a file-like object opened for text reading of the resource.""" - resource = _normalize_path(resource) - package = _get_package(package) - reader = _get_resource_reader(package) - if reader is not None: - return TextIOWrapper(reader.open_resource(resource), encoding, errors) - _check_location(package) - absolute_package_path = os.path.abspath(package.__spec__.origin) - package_path = os.path.dirname(absolute_package_path) - full_path = os.path.join(package_path, resource) - try: - return open(full_path, mode='r', encoding=encoding, errors=errors) - except OSError: - # Just assume the loader is a resource loader; all the relevant - # importlib.machinery loaders are and an AttributeError for - # get_data() will make it clear what is needed from the loader. - loader = cast(ResourceLoader, package.__spec__.loader) - data = None - if hasattr(package.__spec__.loader, 'get_data'): - with suppress(OSError): - data = loader.get_data(full_path) - if data is None: - package_name = package.__spec__.name - message = '{!r} resource not found in {!r}'.format( - resource, package_name) - raise FileNotFoundError(message) - else: - return TextIOWrapper(BytesIO(data), encoding, errors) + return TextIOWrapper( + open_binary(package, resource), encoding=encoding, errors=errors) def read_binary(package: Package, resource: Resource) -> bytes: """Return the binary contents of the resource.""" - resource = _normalize_path(resource) - package = _get_package(package) with open_binary(package, resource) as fp: return fp.read() @@ -163,14 +136,20 @@ def read_text(package: Package, The decoding-related arguments have the same semantics as those of bytes.decode(). """ - resource = _normalize_path(resource) - package = _get_package(package) with open_text(package, resource, encoding, errors) as fp: return fp.read() -@contextmanager -def path(package: Package, resource: Resource) -> Iterator[Path]: +def files(package: Package) -> resources_abc.Traversable: + """ + Get a Traversable resource from a package + """ + return _common.from_package(_get_package(package)) + + +def path( + package: Package, resource: Resource, + ) -> 'ContextManager[Path]': """A context manager providing a file path object to the resource. If the resource does not already exist on its own on the file system, @@ -179,39 +158,23 @@ def path(package: Package, resource: Resource) -> Iterator[Path]: raised if the file was deleted prior to the context manager exiting). """ - resource = _normalize_path(resource) - package = _get_package(package) - reader = _get_resource_reader(package) - if reader is not None: - try: - yield Path(reader.resource_path(resource)) - return - except FileNotFoundError: - pass - else: - _check_location(package) - # Fall-through for both the lack of resource_path() *and* if - # resource_path() raises FileNotFoundError. - package_directory = Path(package.__spec__.origin).parent - file_path = package_directory / resource - if file_path.exists(): - yield file_path - else: - with open_binary(package, resource) as fp: - data = fp.read() - # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try' - # blocks due to the need to close the temporary file to work on - # Windows properly. - fd, raw_path = tempfile.mkstemp() - try: - os.write(fd, data) - os.close(fd) - yield Path(raw_path) - finally: - try: - os.remove(raw_path) - except FileNotFoundError: - pass + reader = _get_resource_reader(_get_package(package)) + return ( + _path_from_reader(reader, resource) + if reader else + _common.as_file(files(package).joinpath(_normalize_path(resource))) + ) + + +@contextmanager +def _path_from_reader(reader, resource): + norm_resource = _normalize_path(resource) + with suppress(FileNotFoundError): + yield Path(reader.resource_path(norm_resource)) + return + opener_reader = reader.open_resource(norm_resource) + with _common._tempfile(opener_reader.read, suffix=norm_resource) as res: + yield res def is_resource(package: Package, name: str) -> bool: @@ -224,17 +187,10 @@ def is_resource(package: Package, name: str) -> bool: reader = _get_resource_reader(package) if reader is not None: return reader.is_resource(name) - try: - package_contents = set(contents(package)) - except (NotADirectoryError, FileNotFoundError): - return False + package_contents = set(contents(package)) if name not in package_contents: return False - # Just because the given file_name lives as an entry in the package's - # contents doesn't necessarily mean it's a resource. Directories are not - # resources, so let's try to find out if it's a directory or not. - path = Path(package.__spec__.origin).parent / name - return path.is_file() + return (_common.from_package(package) / name).is_file() def contents(package: Package) -> Iterable[str]: @@ -249,10 +205,11 @@ def contents(package: Package) -> Iterable[str]: if reader is not None: return reader.contents() # Is the package a namespace package? By definition, namespace packages - # cannot have resources. We could use _check_location() and catch the - # exception, but that's extra work, so just inline the check. - elif package.__spec__.origin is None or not package.__spec__.has_location: + # cannot have resources. + namespace = ( + package.__spec__.origin is None or + package.__spec__.origin == 'namespace' + ) + if namespace or not package.__spec__.has_location: return () - else: - package_directory = Path(package.__spec__.origin).parent - return os.listdir(package_directory) + return list(item.name for item in _common.from_package(package).iterdir()) diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 439f2418174686..6e5a754c2acf1e 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -12,7 +12,6 @@ import functools -import types IPV4LENGTH = 32 IPV6LENGTH = 128 @@ -1125,8 +1124,6 @@ def is_loopback(self): return (self.network_address.is_loopback and self.broadcast_address.is_loopback) - __class_getitem__ = classmethod(types.GenericAlias) - class _BaseV4: """Base IPv4 object. @@ -1446,8 +1443,6 @@ def with_hostmask(self): return '%s/%s' % (self._string_from_ip_int(self._ip), self.hostmask) - __class_getitem__ = classmethod(types.GenericAlias) - class IPv4Network(_BaseV4, _BaseNetwork): @@ -2156,8 +2151,6 @@ def is_unspecified(self): def is_loopback(self): return self._ip == 1 and self.network.is_loopback - __class_getitem__ = classmethod(types.GenericAlias) - class IPv6Network(_BaseV6, _BaseNetwork): diff --git a/Lib/linecache.py b/Lib/linecache.py index ddd0abf2cf01d9..fa5dbd09eab869 100644 --- a/Lib/linecache.py +++ b/Lib/linecache.py @@ -71,10 +71,10 @@ def checkcache(filename=None): try: stat = os.stat(fullname) except OSError: - del cache[filename] + cache.pop(filename, None) continue if size != stat.st_size or mtime != stat.st_mtime: - del cache[filename] + cache.pop(filename, None) def updatecache(filename, module_globals=None): @@ -84,7 +84,7 @@ def updatecache(filename, module_globals=None): if filename in cache: if len(cache[filename]) != 1: - del cache[filename] + cache.pop(filename, None) if not filename or (filename.startswith('<') and filename.endswith('>')): return [] diff --git a/Lib/platform.py b/Lib/platform.py index 049c2c6ef25a1b..e9f50ab622d316 100755 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -798,9 +798,10 @@ def __iter__(self): ) def __getitem__(self, key): - if key == 5: - return self.processor - return super().__getitem__(key) + return tuple(iter(self))[key] + + def __len__(self): + return len(tuple(iter(self))) _uname_cache = None diff --git a/Lib/random.py b/Lib/random.py index f2c4f39fb6079d..75f70d5d699ed9 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -331,7 +331,7 @@ def shuffle(self, x, random=None): j = _int(random() * (i+1)) x[i], x[j] = x[j], x[i] - def sample(self, population, k): + def sample(self, population, k, *, counts=None): """Chooses k unique random elements from a population sequence or set. Returns a new list containing elements from the population while @@ -344,9 +344,21 @@ def sample(self, population, k): population contains repeats, then each occurrence is a possible selection in the sample. - To choose a sample in a range of integers, use range as an argument. - This is especially fast and space efficient for sampling from a - large population: sample(range(10000000), 60) + Repeated elements can be specified one at a time or with the optional + counts parameter. For example: + + sample(['red', 'blue'], counts=[4, 2], k=5) + + is equivalent to: + + sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5) + + To choose a sample from a range of integers, use range() for the + population argument. This is especially fast and space efficient + for sampling from a large population: + + sample(range(10000000), 60) + """ # Sampling without replacement entails tracking either potential @@ -379,8 +391,20 @@ def sample(self, population, k): population = tuple(population) if not isinstance(population, _Sequence): raise TypeError("Population must be a sequence. For dicts or sets, use sorted(d).") - randbelow = self._randbelow n = len(population) + if counts is not None: + cum_counts = list(_accumulate(counts)) + if len(cum_counts) != n: + raise ValueError('The number of counts does not match the population') + total = cum_counts.pop() + if not isinstance(total, int): + raise TypeError('Counts must be integers') + if total <= 0: + raise ValueError('Total of counts must be greater than zero') + selections = sample(range(total), k=k) + bisect = _bisect + return [population[bisect(cum_counts, s)] for s in selections] + randbelow = self._randbelow if not 0 <= k <= n: raise ValueError("Sample larger than population or is negative") result = [None] * k diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 9063b3d2d7b744..6b71adac4e4a6b 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -597,7 +597,7 @@ def test_empty_yield_from(self): empty_yield_from.body[0].body[0].value.value = None with self.assertRaises(ValueError) as cm: compile(empty_yield_from, "", "exec") - self.assertIn("field value is required", str(cm.exception)) + self.assertIn("field 'value' is required", str(cm.exception)) @support.cpython_only def test_issue31592(self): @@ -965,6 +965,12 @@ def test_literal_eval_complex(self): self.assertRaises(ValueError, ast.literal_eval, '3+(0+6j)') self.assertRaises(ValueError, ast.literal_eval, '-(3+6j)') + def test_literal_eval_malformed_dict_nodes(self): + malformed = ast.Dict(keys=[ast.Constant(1), ast.Constant(2)], values=[ast.Constant(3)]) + self.assertRaises(ValueError, ast.literal_eval, malformed) + malformed = ast.Dict(keys=[ast.Constant(1)], values=[ast.Constant(2), ast.Constant(3)]) + self.assertRaises(ValueError, ast.literal_eval, malformed) + def test_bad_integer(self): # issue13436: Bad error message with invalid numeric values body = [ast.ImportFrom(module='time', diff --git a/Lib/test/test_asyncio/test_tasks.py b/Lib/test/test_asyncio/test_tasks.py index 68f3b8cce9f65d..6eb6b46ec8af75 100644 --- a/Lib/test/test_asyncio/test_tasks.py +++ b/Lib/test/test_asyncio/test_tasks.py @@ -466,6 +466,33 @@ async def inner2(): t = outer() self.assertEqual(self.loop.run_until_complete(t), 1042) + def test_exception_chaining_after_await(self): + # Test that when awaiting on a task when an exception is already + # active, if the task raises an exception it will be chained + # with the original. + loop = asyncio.new_event_loop() + self.set_event_loop(loop) + + async def raise_error(): + raise ValueError + + async def run(): + try: + raise KeyError(3) + except Exception as exc: + task = self.new_task(loop, raise_error()) + try: + await task + except Exception as exc: + self.assertEqual(type(exc), ValueError) + chained = exc.__context__ + self.assertEqual((type(chained), chained.args), + (KeyError, (3,))) + + task = self.new_task(loop, run()) + loop.run_until_complete(task) + loop.close() + def test_cancel(self): def gen(): diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index f9578d3afa81f3..5c7526aa7ec29a 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -13,6 +13,8 @@ import time import unittest import weakref +import importlib.machinery +import importlib.util from test import support from test.support import MISSING_C_DOCSTRINGS from test.support.script_helper import assert_python_failure, assert_python_ok @@ -774,5 +776,76 @@ class PyMemDefaultTests(PyMemDebugTests): PYTHONMALLOC = '' +class Test_ModuleStateAccess(unittest.TestCase): + """Test access to module start (PEP 573)""" + + # The C part of the tests lives in _testmultiphase, in a module called + # _testmultiphase_meth_state_access. + # This module has multi-phase initialization, unlike _testcapi. + + def setUp(self): + fullname = '_testmultiphase_meth_state_access' # XXX + origin = importlib.util.find_spec('_testmultiphase').origin + loader = importlib.machinery.ExtensionFileLoader(fullname, origin) + spec = importlib.util.spec_from_loader(fullname, loader) + module = importlib.util.module_from_spec(spec) + loader.exec_module(module) + self.module = module + + def test_subclass_get_module(self): + """PyType_GetModule for defining_class""" + class StateAccessType_Subclass(self.module.StateAccessType): + pass + + instance = StateAccessType_Subclass() + self.assertIs(instance.get_defining_module(), self.module) + + def test_subclass_get_module_with_super(self): + class StateAccessType_Subclass(self.module.StateAccessType): + def get_defining_module(self): + return super().get_defining_module() + + instance = StateAccessType_Subclass() + self.assertIs(instance.get_defining_module(), self.module) + + def test_state_access(self): + """Checks methods defined with and without argument clinic + + This tests a no-arg method (get_count) and a method with + both a positional and keyword argument. + """ + + a = self.module.StateAccessType() + b = self.module.StateAccessType() + + methods = { + 'clinic': a.increment_count_clinic, + 'noclinic': a.increment_count_noclinic, + } + + for name, increment_count in methods.items(): + with self.subTest(name): + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 0) + + increment_count() + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 1) + + increment_count(3) + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 4) + + increment_count(-2, twice=True) + self.assertEqual(a.get_count(), b.get_count()) + self.assertEqual(a.get_count(), 0) + + with self.assertRaises(TypeError): + increment_count(thrice=3) + + with self.assertRaises(TypeError): + increment_count(1, 2, 3) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index ee96473322dba0..724402533038d4 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -756,6 +756,17 @@ def test_argv0_normalization(self): self.assertEqual(proc.returncode, 0, proc) self.assertEqual(proc.stdout.strip(), b'0') + def test_parsing_error(self): + args = [sys.executable, '-I', '--unknown-option'] + proc = subprocess.run(args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True) + err_msg = "unknown option --unknown-option\nusage: " + self.assertTrue(proc.stderr.startswith(err_msg), proc.stderr) + self.assertNotEqual(proc.returncode, 0) + + @unittest.skipIf(interpreter_requires_environment(), 'Cannot run -I tests when PYTHON env vars are required.') class IgnoreEnvironmentTest(unittest.TestCase): diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index 1fc9500738f352..15fca7b8a5191e 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -633,7 +633,7 @@ def test_syntaxerror_multi_line_fstring(self): stderr.splitlines()[-3:], [ b' foo"""', - b' ^', + b' ^', b'SyntaxError: f-string: empty expression not allowed', ], ) @@ -648,7 +648,7 @@ def test_syntaxerror_invalid_escape_sequence_multi_line(self): self.assertEqual( stderr.splitlines()[-3:], [ b' foo = """\\q"""', - b' ^', + b' ^', b'SyntaxError: invalid escape sequence \\q' ], ) diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 1f27830ae50b84..0c5e362feea0ca 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -288,6 +288,15 @@ def test_invalid(self): ai("[i for i in range(10)] = (1, 2, 3)") + def test_invalid_exec(self): + ai = self.assertInvalid + ai("raise = 4", symbol="exec") + ai('def a-b', symbol='exec') + ai('await?', symbol='exec') + ai('=!=', symbol='exec') + ai('a await raise b', symbol='exec') + ai('a await raise b?+1', symbol='exec') + def test_filename(self): self.assertEqual(compile_command("a = 1\n", "abc").co_filename, compile("a = 1\n", "abc", 'single').co_filename) diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py index 72678945089f28..b4061b79357b87 100644 --- a/Lib/test/test_compileall.py +++ b/Lib/test/test_compileall.py @@ -1,16 +1,19 @@ -import sys import compileall +import contextlib +import filecmp import importlib.util -import test.test_importlib.util +import io +import itertools import os import pathlib import py_compile import shutil import struct +import sys import tempfile +import test.test_importlib.util import time import unittest -import io from unittest import mock, skipUnless try: @@ -26,6 +29,24 @@ from .test_py_compile import SourceDateEpochTestMeta +def get_pyc(script, opt): + if not opt: + # Replace None and 0 with '' + opt = '' + return importlib.util.cache_from_source(script, optimization=opt) + + +def get_pycs(script): + return [get_pyc(script, opt) for opt in (0, 1, 2)] + + +def is_hardlink(filename1, filename2): + """Returns True if two files have the same inode (hardlink)""" + inode1 = os.stat(filename1).st_ino + inode2 = os.stat(filename2).st_ino + return inode1 == inode2 + + class CompileallTestsBase: def setUp(self): @@ -825,6 +846,32 @@ def test_ignore_symlink_destination(self): self.assertTrue(os.path.isfile(allowed_bc)) self.assertFalse(os.path.isfile(prohibited_bc)) + def test_hardlink_bad_args(self): + # Bad arguments combination, hardlink deduplication make sense + # only for more than one optimization level + self.assertRunNotOK(self.directory, "-o 1", "--hardlink-dupes") + + def test_hardlink(self): + # 'a = 0' code produces the same bytecode for the 3 optimization + # levels. All three .pyc files must have the same inode (hardlinks). + # + # If deduplication is disabled, all pyc files must have different + # inodes. + for dedup in (True, False): + with tempfile.TemporaryDirectory() as path: + with self.subTest(dedup=dedup): + script = script_helper.make_script(path, "script", "a = 0") + pycs = get_pycs(script) + + args = ["-q", "-o 0", "-o 1", "-o 2"] + if dedup: + args.append("--hardlink-dupes") + self.assertRunOK(path, *args) + + self.assertEqual(is_hardlink(pycs[0], pycs[1]), dedup) + self.assertEqual(is_hardlink(pycs[1], pycs[2]), dedup) + self.assertEqual(is_hardlink(pycs[0], pycs[2]), dedup) + class CommandLineTestsWithSourceEpoch(CommandLineTestsBase, unittest.TestCase, @@ -841,5 +888,176 @@ class CommandLineTestsNoSourceEpoch(CommandLineTestsBase, +class HardlinkDedupTestsBase: + # Test hardlink_dupes parameter of compileall.compile_dir() + + def setUp(self): + self.path = None + + @contextlib.contextmanager + def temporary_directory(self): + with tempfile.TemporaryDirectory() as path: + self.path = path + yield path + self.path = None + + def make_script(self, code, name="script"): + return script_helper.make_script(self.path, name, code) + + def compile_dir(self, *, dedup=True, optimize=(0, 1, 2), force=False): + compileall.compile_dir(self.path, quiet=True, optimize=optimize, + hardlink_dupes=dedup, force=force) + + def test_bad_args(self): + # Bad arguments combination, hardlink deduplication make sense + # only for more than one optimization level + with self.temporary_directory(): + self.make_script("pass") + with self.assertRaises(ValueError): + compileall.compile_dir(self.path, quiet=True, optimize=0, + hardlink_dupes=True) + with self.assertRaises(ValueError): + # same optimization level specified twice: + # compile_dir() removes duplicates + compileall.compile_dir(self.path, quiet=True, optimize=[0, 0], + hardlink_dupes=True) + + def create_code(self, docstring=False, assertion=False): + lines = [] + if docstring: + lines.append("'module docstring'") + lines.append('x = 1') + if assertion: + lines.append("assert x == 1") + return '\n'.join(lines) + + def iter_codes(self): + for docstring in (False, True): + for assertion in (False, True): + code = self.create_code(docstring=docstring, assertion=assertion) + yield (code, docstring, assertion) + + def test_disabled(self): + # Deduplication disabled, no hardlinks + for code, docstring, assertion in self.iter_codes(): + with self.subTest(docstring=docstring, assertion=assertion): + with self.temporary_directory(): + script = self.make_script(code) + pycs = get_pycs(script) + self.compile_dir(dedup=False) + self.assertFalse(is_hardlink(pycs[0], pycs[1])) + self.assertFalse(is_hardlink(pycs[0], pycs[2])) + self.assertFalse(is_hardlink(pycs[1], pycs[2])) + + def check_hardlinks(self, script, docstring=False, assertion=False): + pycs = get_pycs(script) + self.assertEqual(is_hardlink(pycs[0], pycs[1]), + not assertion) + self.assertEqual(is_hardlink(pycs[0], pycs[2]), + not assertion and not docstring) + self.assertEqual(is_hardlink(pycs[1], pycs[2]), + not docstring) + + def test_hardlink(self): + # Test deduplication on all combinations + for code, docstring, assertion in self.iter_codes(): + with self.subTest(docstring=docstring, assertion=assertion): + with self.temporary_directory(): + script = self.make_script(code) + self.compile_dir() + self.check_hardlinks(script, docstring, assertion) + + def test_only_two_levels(self): + # Don't build the 3 optimization levels, but only 2 + for opts in ((0, 1), (1, 2), (0, 2)): + with self.subTest(opts=opts): + with self.temporary_directory(): + # code with no dostring and no assertion: + # same bytecode for all optimization levels + script = self.make_script(self.create_code()) + self.compile_dir(optimize=opts) + pyc1 = get_pyc(script, opts[0]) + pyc2 = get_pyc(script, opts[1]) + self.assertTrue(is_hardlink(pyc1, pyc2)) + + def test_duplicated_levels(self): + # compile_dir() must not fail if optimize contains duplicated + # optimization levels and/or if optimization levels are not sorted. + with self.temporary_directory(): + # code with no dostring and no assertion: + # same bytecode for all optimization levels + script = self.make_script(self.create_code()) + self.compile_dir(optimize=[1, 0, 1, 0]) + pyc1 = get_pyc(script, 0) + pyc2 = get_pyc(script, 1) + self.assertTrue(is_hardlink(pyc1, pyc2)) + + def test_recompilation(self): + # Test compile_dir() when pyc files already exists and the script + # content changed + with self.temporary_directory(): + script = self.make_script("a = 0") + self.compile_dir() + # All three levels have the same inode + self.check_hardlinks(script) + + pycs = get_pycs(script) + inode = os.stat(pycs[0]).st_ino + + # Change of the module content + script = self.make_script("print(0)") + + # Recompilation without -o 1 + self.compile_dir(optimize=[0, 2], force=True) + + # opt-1.pyc should have the same inode as before and others should not + self.assertEqual(inode, os.stat(pycs[1]).st_ino) + self.assertTrue(is_hardlink(pycs[0], pycs[2])) + self.assertNotEqual(inode, os.stat(pycs[2]).st_ino) + # opt-1.pyc and opt-2.pyc have different content + self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True)) + + def test_import(self): + # Test that import updates a single pyc file when pyc files already + # exists and the script content changed + with self.temporary_directory(): + script = self.make_script(self.create_code(), name="module") + self.compile_dir() + # All three levels have the same inode + self.check_hardlinks(script) + + pycs = get_pycs(script) + inode = os.stat(pycs[0]).st_ino + + # Change of the module content + script = self.make_script("print(0)", name="module") + + # Import the module in Python with -O (optimization level 1) + script_helper.assert_python_ok( + "-O", "-c", "import module", __isolated=False, PYTHONPATH=self.path + ) + + # Only opt-1.pyc is changed + self.assertEqual(inode, os.stat(pycs[0]).st_ino) + self.assertEqual(inode, os.stat(pycs[2]).st_ino) + self.assertFalse(is_hardlink(pycs[1], pycs[2])) + # opt-1.pyc and opt-2.pyc have different content + self.assertFalse(filecmp.cmp(pycs[1], pycs[2], shallow=True)) + + +class HardlinkDedupTestsWithSourceEpoch(HardlinkDedupTestsBase, + unittest.TestCase, + metaclass=SourceDateEpochTestMeta, + source_date_epoch=True): + pass + + +class HardlinkDedupTestsNoSourceEpoch(HardlinkDedupTestsBase, + unittest.TestCase, + metaclass=SourceDateEpochTestMeta, + source_date_epoch=False): + pass + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_dictcomps.py b/Lib/test/test_dictcomps.py index 16aa651b93c46b..472e3dfa0d8a0a 100644 --- a/Lib/test/test_dictcomps.py +++ b/Lib/test/test_dictcomps.py @@ -77,7 +77,7 @@ def test_illegal_assignment(self): compile("{x: y for y, x in ((1, 2), (3, 4))} = 5", "", "exec") - with self.assertRaisesRegex(SyntaxError, "cannot assign"): + with self.assertRaisesRegex(SyntaxError, "illegal expression"): compile("{x: y for y, x in ((1, 2), (3, 4))} += 5", "", "exec") diff --git a/Lib/test/test_email/test_contentmanager.py b/Lib/test/test_email/test_contentmanager.py index 169058eac83da3..64dca2d017e629 100644 --- a/Lib/test/test_email/test_contentmanager.py +++ b/Lib/test/test_email/test_contentmanager.py @@ -329,6 +329,21 @@ def test_set_text_charset_latin_1(self): self.assertEqual(m.get_payload(decode=True).decode('utf-8'), content) self.assertEqual(m.get_content(), content) + def test_set_text_plain_long_line_heuristics(self): + m = self._make_message() + content = ("Simple but long message that is over 78 characters" + " long to force transfer encoding.\n") + raw_data_manager.set_content(m, content) + self.assertEqual(str(m), textwrap.dedent("""\ + Content-Type: text/plain; charset="utf-8" + Content-Transfer-Encoding: quoted-printable + + Simple but long message that is over 78 characters long to = + force transfer encoding. + """)) + self.assertEqual(m.get_payload(decode=True).decode('utf-8'), content) + self.assertEqual(m.get_content(), content) + def test_set_text_short_line_minimal_non_ascii_heuristics(self): m = self._make_message() content = "et là il est monté sur moi et il commence à m'éto.\n" diff --git a/Lib/test/test_eof.py b/Lib/test/test_eof.py index 9ef8eb1187486f..bebad3106119ec 100644 --- a/Lib/test/test_eof.py +++ b/Lib/test/test_eof.py @@ -26,6 +26,15 @@ def test_EOFS(self): else: raise support.TestFailed + def test_eof_with_line_continuation(self): + expect = "unexpected EOF while parsing (, line 1)" + try: + compile('"\\xhh" \\', '', 'exec', dont_inherit=True) + except SyntaxError as msg: + self.assertEqual(str(msg), expect) + else: + raise support.TestFailed + def test_line_continuation_EOF(self): """A continuation at the end of input must be an error; bpo2180.""" expect = 'unexpected EOF while parsing (, line 1)' diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index d83b73ab340c35..efd77fdbaabe1b 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -228,6 +228,8 @@ def bar(): def baz(): '''quux''' """, 9, 20) + check("pass\npass\npass\n(1+)\npass\npass\npass", 4, 4) + check("(1+)", 1, 4) # Errors thrown by symtable.c check('x = [(yield i) for i in range(3)]', 1, 5) @@ -242,16 +244,13 @@ def baz(): check('from __future__ import doesnt_exist', 1, 1) check('from __future__ import braces', 1, 1) check('x=1\nfrom __future__ import division', 2, 1) + check('foo(1=2)', 1, 5) + check('def f():\n x, y: int', 2, 3) + check('[*x for x in xs]', 1, 2) + check('foo(x for x in range(10), 100)', 1, 5) + check('(yield i) = 2', 1, 1 if support.use_old_parser() else 2) check('def f(*):\n pass', 1, 7 if support.use_old_parser() else 8) - - @support.skip_if_new_parser("Pegen column offsets might be different") - def testSyntaxErrorOffsetCustom(self): - self.check('for 1 in []: pass', 1, 5) - self.check('[*x for x in xs]', 1, 2) - self.check('def f():\n x, y: int', 2, 3) - self.check('(yield i) = 2', 1, 1) - self.check('foo(x for x in range(10), 100)', 1, 5) - self.check('foo(1=2)', 1, 5) + check('for 1 in []: pass', 1, 5 if support.use_old_parser() else 7) @cpython_only def testSettingException(self): diff --git a/Lib/test/test_fnmatch.py b/Lib/test/test_fnmatch.py index 55f9f0d3a5425a..10668e4f6103aa 100644 --- a/Lib/test/test_fnmatch.py +++ b/Lib/test/test_fnmatch.py @@ -45,6 +45,13 @@ def test_fnmatch(self): check('\nfoo', 'foo*', False) check('\n', '*') + def test_slow_fnmatch(self): + check = self.check_match + check('a' * 50, '*a*a*a*a*a*a*a*a*a*a') + # The next "takes forever" if the regexp translation is + # straightforward. See bpo-40480. + check('a' * 50 + 'b', '*a*a*a*a*a*a*a*a*a*a', False) + def test_mix_bytes_str(self): self.assertRaises(TypeError, fnmatch, 'test', b'*') self.assertRaises(TypeError, fnmatch, b'test', '*') @@ -99,6 +106,7 @@ def test_warnings(self): class TranslateTestCase(unittest.TestCase): def test_translate(self): + import re self.assertEqual(translate('*'), r'(?s:.*)\Z') self.assertEqual(translate('?'), r'(?s:.)\Z') self.assertEqual(translate('a?b*'), r'(?s:a.b.*)\Z') @@ -107,7 +115,34 @@ def test_translate(self): self.assertEqual(translate('[!x]'), r'(?s:[^x])\Z') self.assertEqual(translate('[^x]'), r'(?s:[\^x])\Z') self.assertEqual(translate('[x'), r'(?s:\[x)\Z') - + # from the docs + self.assertEqual(translate('*.txt'), r'(?s:.*\.txt)\Z') + # squash consecutive stars + self.assertEqual(translate('*********'), r'(?s:.*)\Z') + self.assertEqual(translate('A*********'), r'(?s:A.*)\Z') + self.assertEqual(translate('*********A'), r'(?s:.*A)\Z') + self.assertEqual(translate('A*********?[?]?'), r'(?s:A.*.[?].)\Z') + # fancy translation to prevent exponential-time match failure + t = translate('**a*a****a') + digits = re.findall(r'\d+', t) + self.assertEqual(len(digits), 4) + self.assertEqual(digits[0], digits[1]) + self.assertEqual(digits[2], digits[3]) + g1 = f"g{digits[0]}" # e.g., group name "g4" + g2 = f"g{digits[2]}" # e.g., group name "g5" + self.assertEqual(t, + fr'(?s:(?=(?P<{g1}>.*?a))(?P={g1})(?=(?P<{g2}>.*?a))(?P={g2}).*a)\Z') + # and try pasting multiple translate results - it's an undocumented + # feature that this works; all the pain of generating unique group + # names across calls exists to support this + r1 = translate('**a**a**a*') + r2 = translate('**b**b**b*') + r3 = translate('*c*c*c*') + fatre = "|".join([r1, r2, r3]) + self.assertTrue(re.match(fatre, 'abaccad')) + self.assertTrue(re.match(fatre, 'abxbcab')) + self.assertTrue(re.match(fatre, 'cbabcaxc')) + self.assertFalse(re.match(fatre, 'dabccbad')) class FilterTestCase(unittest.TestCase): diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index ac5aa9a76efe7c..e0bb5b56b2614f 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -583,7 +583,7 @@ def test_missing_expression(self): ]) # Different error message is raised for other whitespace characters. - self.assertAllRaise(SyntaxError, 'invalid character in identifier', + self.assertAllRaise(SyntaxError, r"invalid non-printable character U\+00A0", ["f'''{\xa0}'''", "\xa0", ]) diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 9503f4086b1cb9..e122fe0b333402 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -14,6 +14,8 @@ import unittest import unittest.mock import os +import weakref +import gc from weakref import proxy import contextlib @@ -1430,6 +1432,25 @@ def check_order_with_hash_seed(seed): self.assertEqual(run1, run2) +class TestCache: + # This tests that the pass-through is working as designed. + # The underlying functionality is tested in TestLRU. + + def test_cache(self): + @self.module.cache + def fib(n): + if n < 2: + return n + return fib(n-1) + fib(n-2) + self.assertEqual([fib(n) for n in range(16)], + [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]) + self.assertEqual(fib.cache_info(), + self.module._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16)) + fib.cache_clear() + self.assertEqual(fib.cache_info(), + self.module._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0)) + + class TestLRU: def test_lru(self): @@ -1938,6 +1959,35 @@ def f(): return 1 self.assertEqual(f.cache_parameters(), {'maxsize': 1000, "typed": True}) + def test_lru_cache_weakrefable(self): + @self.module.lru_cache + def test_function(x): + return x + + class A: + @self.module.lru_cache + def test_method(self, x): + return (self, x) + + @staticmethod + @self.module.lru_cache + def test_staticmethod(x): + return (self, x) + + refs = [weakref.ref(test_function), + weakref.ref(A.test_method), + weakref.ref(A.test_staticmethod)] + + for ref in refs: + self.assertIsNotNone(ref()) + + del A + del test_function + gc.collect() + + for ref in refs: + self.assertIsNone(ref()) + @py_functools.lru_cache() def py_cached_func(x, y): diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 5824ecd7c37e88..348ae15aa6532b 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -318,7 +318,7 @@ def g(): class GeneratorThrowTest(unittest.TestCase): - def test_exception_context_set(self): + def test_exception_context_with_yield(self): def f(): try: raise KeyError('a') @@ -332,6 +332,23 @@ def f(): context = cm.exception.__context__ self.assertEqual((type(context), context.args), (KeyError, ('a',))) + def test_exception_context_with_yield_from(self): + def f(): + yield + + def g(): + try: + raise KeyError('a') + except Exception: + yield from f() + + gen = g() + gen.send(None) + with self.assertRaises(ValueError) as cm: + gen.throw(ValueError) + context = cm.exception.__context__ + self.assertEqual((type(context), context.args), (KeyError, ('a',))) + def test_throw_after_none_exc_type(self): def g(): try: @@ -342,9 +359,6 @@ def g(): try: yield except Exception: - # Without the `gi_exc_state.exc_type != Py_None` in - # _gen_throw(), this line was causing a crash ("Segmentation - # fault (core dumped)") on e.g. Fedora 32. raise RuntimeError gen = g() @@ -1907,7 +1921,7 @@ def printsolution(self, x): >>> def f(): (yield bar) += y Traceback (most recent call last): ... -SyntaxError: cannot assign to yield expression +SyntaxError: 'yield expression' is an illegal expression for augmented assignment Now check some throw() conditions: diff --git a/Lib/test/test_genericalias.py b/Lib/test/test_genericalias.py index 024b2f6ed6636b..4f3798e8f87d8f 100644 --- a/Lib/test/test_genericalias.py +++ b/Lib/test/test_genericalias.py @@ -17,8 +17,6 @@ from difflib import SequenceMatcher from filecmp import dircmp from fileinput import FileInput -from mmap import mmap -from ipaddress import IPv4Network, IPv4Interface, IPv6Network, IPv6Interface from itertools import chain from http.cookies import Morsel from multiprocessing.managers import ValueProxy @@ -49,7 +47,6 @@ class BaseTest(unittest.TestCase): def test_subscriptable(self): for t in (type, tuple, list, dict, set, frozenset, enumerate, - mmap, defaultdict, deque, SequenceMatcher, dircmp, @@ -74,7 +71,6 @@ def test_subscriptable(self): Sequence, MutableSequence, MappingProxyType, AsyncGeneratorType, DirEntry, - IPv4Network, IPv4Interface, IPv6Network, IPv6Interface, chain, TemporaryDirectory, SpooledTemporaryFile, Queue, SimpleQueue, diff --git a/Lib/test/test_genexps.py b/Lib/test/test_genexps.py index 86e4e195f55ec5..5c1a209b0e9908 100644 --- a/Lib/test/test_genexps.py +++ b/Lib/test/test_genexps.py @@ -158,7 +158,7 @@ >>> (y for y in (1,2)) += 10 Traceback (most recent call last): ... - SyntaxError: cannot assign to generator expression + SyntaxError: 'generator expression' is an illegal expression for augmented assignment ########### Tests borrowed from or inspired by test_generators.py ############ diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index 922a5166ec12f7..e1a402e2b463b1 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -1,7 +1,7 @@ # Python test set -- part 1, grammar. # This just tests whether the parser accepts them all. -from test.support import check_syntax_error, check_syntax_warning +from test.support import check_syntax_error, check_syntax_warning, use_old_parser import inspect import unittest import sys @@ -801,6 +801,23 @@ def test_del_stmt(self): del abc del x, y, (z, xyz) + x, y, z = "xyz" + del x + del y, + del (z) + del () + + a, b, c, d, e, f, g = "abcdefg" + del a, (b, c), (d, (e, f)) + + a, b, c, d, e, f, g = "abcdefg" + del a, [b, c], (d, [e, f]) + + abcd = list("abcd") + del abcd[1:2] + + compile("del a, (b[0].c, (d.e, f.g[1:2])), [h.i.j], ()", "", "exec") + def test_pass_stmt(self): # 'pass' pass @@ -1262,7 +1279,7 @@ def __getitem__(self, i): def test_try(self): ### try_stmt: 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite] ### | 'try' ':' suite 'finally' ':' suite - ### except_clause: 'except' [expr ['as' expr]] + ### except_clause: 'except' [expr ['as' NAME]] try: 1/0 except ZeroDivisionError: @@ -1280,6 +1297,9 @@ def test_try(self): except (EOFError, TypeError, ZeroDivisionError) as msg: pass try: pass finally: pass + with self.assertRaises(SyntaxError): + compile("try:\n pass\nexcept Exception as a.b:\n pass", "?", "exec") + compile("try:\n pass\nexcept Exception as a[b]:\n pass", "?", "exec") def test_suite(self): # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT @@ -1694,6 +1714,70 @@ def __exit__(self, *args): with manager() as x, manager(): pass + if not use_old_parser(): + test_cases = [ + """if 1: + with ( + manager() + ): + pass + """, + """if 1: + with ( + manager() as x + ): + pass + """, + """if 1: + with ( + manager() as (x, y), + manager() as z, + ): + pass + """, + """if 1: + with ( + manager(), + manager() + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y, + manager() as z, + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y, + manager(), + ): + pass + """, + ] + for case in test_cases: + with self.subTest(case=case): + compile(case, "", "exec") + + def test_if_else_expr(self): # Test ifelse expressions in various cases def _checkeval(msg, ret): diff --git a/Lib/test/test_importlib/test_files.py b/Lib/test/test_importlib/test_files.py new file mode 100644 index 00000000000000..fa7af82bf0c28b --- /dev/null +++ b/Lib/test/test_importlib/test_files.py @@ -0,0 +1,39 @@ +import typing +import unittest + +from importlib import resources +from importlib.abc import Traversable +from . import data01 +from . import util + + +class FilesTests: + def test_read_bytes(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_bytes() + assert actual == b'Hello, UTF-8 world!\n' + + def test_read_text(self): + files = resources.files(self.data) + actual = files.joinpath('utf-8.file').read_text() + assert actual == 'Hello, UTF-8 world!\n' + + @unittest.skipUnless( + hasattr(typing, 'runtime_checkable'), + "Only suitable when typing supports runtime_checkable", + ) + def test_traversable(self): + assert isinstance(resources.files(self.data), Traversable) + + +class OpenDiskTests(FilesTests, unittest.TestCase): + def setUp(self): + self.data = data01 + + +class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/Lib/test/test_importlib/test_path.py b/Lib/test/test_importlib/test_path.py index 2d3dcda7ed2e79..c4e7285411322c 100644 --- a/Lib/test/test_importlib/test_path.py +++ b/Lib/test/test_importlib/test_path.py @@ -17,6 +17,7 @@ def test_reading(self): # Test also implicitly verifies the returned object is a pathlib.Path # instance. with resources.path(self.data, 'utf-8.file') as path: + self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) # pathlib.Path.read_text() was introduced in Python 3.5. with path.open('r', encoding='utf-8') as file: text = file.read() diff --git a/Lib/test/test_json/test_recursion.py b/Lib/test/test_json/test_recursion.py index 877dc448b14c15..543c62839b2cdd 100644 --- a/Lib/test/test_json/test_recursion.py +++ b/Lib/test/test_json/test_recursion.py @@ -52,7 +52,7 @@ def default(self, o): return [JSONTestObject] else: return 'JSONTestObject' - return pyjson.JSONEncoder.default(o) + return self.json.JSONEncoder.default(o) enc = RecursiveJSONEncoder() self.assertEqual(enc.encode(JSONTestObject), '"JSONTestObject"') diff --git a/Lib/test/test_peg_generator/test_pegen.py b/Lib/test/test_peg_generator/test_pegen.py index 0a2a6d4ae16019..30e1b675643b23 100644 --- a/Lib/test/test_peg_generator/test_pegen.py +++ b/Lib/test/test_peg_generator/test_pegen.py @@ -540,6 +540,33 @@ def test_missing_start(self) -> None: with self.assertRaises(GrammarError): parser_class = make_parser(grammar) + def test_invalid_rule_name(self) -> None: + grammar = """ + start: _a b + _a: 'a' + b: 'b' + """ + with self.assertRaisesRegex(GrammarError, "cannot start with underscore: '_a'"): + parser_class = make_parser(grammar) + + def test_invalid_variable_name(self) -> None: + grammar = """ + start: a b + a: _x='a' + b: 'b' + """ + with self.assertRaisesRegex(GrammarError, "cannot start with underscore: '_x'"): + parser_class = make_parser(grammar) + + def test_invalid_variable_name_in_temporal_rule(self) -> None: + grammar = """ + start: a b + a: (_x='a' | 'b') | 'c' + b: 'b' + """ + with self.assertRaisesRegex(GrammarError, "cannot start with underscore: '_x'"): + parser_class = make_parser(grammar) + class TestGrammarVisitor: class Visitor(GrammarVisitor): diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py index d6939fdbf618a6..9614e45799dd8c 100644 --- a/Lib/test/test_peg_parser.py +++ b/Lib/test/test_peg_parser.py @@ -35,6 +35,9 @@ ('attribute_simple', 'a.b'), ('attributes_subscript', 'a.b[0]'), ('augmented_assignment', 'x += 42'), + ('augmented_assignment_attribute', 'a.b.c += 42'), + ('augmented_assignment_paren', '(x) += 42'), + ('augmented_assignment_paren_subscript', '(x[0]) -= 42'), ('binop_add', '1 + 1'), ('binop_add_multiple', '1 + 1 + 1 + 1'), ('binop_all', '1 + 2 * 5 + 3 ** 2 - -3'), @@ -547,6 +550,11 @@ def f(*a, b): with a as (x, y): pass '''), + ('with_list_target', + ''' + with a as [x, y]: + pass + '''), ('yield', 'yield'), ('yield_expr', 'yield a'), ('yield_from', 'yield from a'), @@ -560,6 +568,9 @@ def f(*a, b): ("annotation_tuple", "(a,): int"), ("annotation_tuple_without_paren", "a,: int"), ("assignment_keyword", "a = if"), + ("augmented_assignment_list", "[a, b] += 1"), + ("augmented_assignment_tuple", "a, b += 1"), + ("augmented_assignment_tuple_paren", "(a, b) += (1, 2)"), ("comprehension_lambda", "(a for a in lambda: b)"), ("comprehension_else", "(a for a in b if c else d"), ("del_call", "del a()"), @@ -589,6 +600,20 @@ def f(): a """), ("not_terminated_string", "a = 'example"), + ("try_except_attribute_target", + """ + try: + pass + except Exception as a.b: + pass + """), + ("try_except_subscript_target", + """ + try: + pass + except Exception as a[0]: + pass + """), ] FAIL_SPECIALIZED_MESSAGE_CASES = [ @@ -600,7 +625,7 @@ def f(): ("(a, b): int", "only single target (not tuple) can be annotated"), ("[a, b]: int", "only single target (not list) can be annotated"), ("a(): int", "illegal target for annotation"), - ("1 += 1", "cannot assign to literal"), + ("1 += 1", "'literal' is an illegal expression for augmented assignment"), ("pass\n pass", "unexpected indent"), ("def f():\npass", "expected an indented block"), ("def f(*): pass", "named arguments must follow bare *"), @@ -609,6 +634,9 @@ def f(): ("lambda *: pass", "named arguments must follow bare *"), ("lambda *,: pass", "named arguments must follow bare *"), ("lambda *, **a: pass", "named arguments must follow bare *"), + ("f(g()=2", "expression cannot contain assignment, perhaps you meant \"==\"?"), + ("f(a, b, *c, d.e=2", "expression cannot contain assignment, perhaps you meant \"==\"?"), + ("f(*a, **b, c=0, d[1]=3)", "expression cannot contain assignment, perhaps you meant \"==\"?"), ] GOOD_BUT_FAIL_TEST_CASES = [ diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py index 7664b38a720a7e..a5c35dff79b8bc 100644 --- a/Lib/test/test_platform.py +++ b/Lib/test/test_platform.py @@ -154,11 +154,26 @@ def test_uname(self): res = platform.uname() self.assertTrue(any(res)) self.assertEqual(res[0], res.system) + self.assertEqual(res[-6], res.system) self.assertEqual(res[1], res.node) + self.assertEqual(res[-5], res.node) self.assertEqual(res[2], res.release) + self.assertEqual(res[-4], res.release) self.assertEqual(res[3], res.version) + self.assertEqual(res[-3], res.version) self.assertEqual(res[4], res.machine) + self.assertEqual(res[-2], res.machine) self.assertEqual(res[5], res.processor) + self.assertEqual(res[-1], res.processor) + self.assertEqual(len(res), 6) + + def test_uname_cast_to_tuple(self): + res = platform.uname() + expected = ( + res.system, res.node, res.release, res.version, res.machine, + res.processor, + ) + self.assertEqual(tuple(res), expected) @unittest.skipIf(sys.platform in ['win32', 'OpenVMS'], "uname -p not used") def test_uname_processor(self): diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py index bb95ca0884a516..a3710f4aa48a68 100644 --- a/Lib/test/test_random.py +++ b/Lib/test/test_random.py @@ -9,7 +9,7 @@ from math import log, exp, pi, fsum, sin, factorial from test import support from fractions import Fraction - +from collections import Counter class TestBasicOps: # Superclass with tests common to all generators. @@ -161,6 +161,77 @@ def test_sample_on_sets(self): population = {10, 20, 30, 40, 50, 60, 70} self.gen.sample(population, k=5) + def test_sample_with_counts(self): + sample = self.gen.sample + + # General case + colors = ['red', 'green', 'blue', 'orange', 'black', 'brown', 'amber'] + counts = [500, 200, 20, 10, 5, 0, 1 ] + k = 700 + summary = Counter(sample(colors, counts=counts, k=k)) + self.assertEqual(sum(summary.values()), k) + for color, weight in zip(colors, counts): + self.assertLessEqual(summary[color], weight) + self.assertNotIn('brown', summary) + + # Case that exhausts the population + k = sum(counts) + summary = Counter(sample(colors, counts=counts, k=k)) + self.assertEqual(sum(summary.values()), k) + for color, weight in zip(colors, counts): + self.assertLessEqual(summary[color], weight) + self.assertNotIn('brown', summary) + + # Case with population size of 1 + summary = Counter(sample(['x'], counts=[10], k=8)) + self.assertEqual(summary, Counter(x=8)) + + # Case with all counts equal. + nc = len(colors) + summary = Counter(sample(colors, counts=[10]*nc, k=10*nc)) + self.assertEqual(summary, Counter(10*colors)) + + # Test error handling + with self.assertRaises(TypeError): + sample(['red', 'green', 'blue'], counts=10, k=10) # counts not iterable + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[-3, -7, -8], k=2) # counts are negative + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[0, 0, 0], k=2) # counts are zero + with self.assertRaises(ValueError): + sample(['red', 'green'], counts=[10, 10], k=21) # population too small + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[1, 2], k=2) # too few counts + with self.assertRaises(ValueError): + sample(['red', 'green', 'blue'], counts=[1, 2, 3, 4], k=2) # too many counts + + def test_sample_counts_equivalence(self): + # Test the documented strong equivalence to a sample with repeated elements. + # We run this test on random.Random() which makes deterministic selections + # for a given seed value. + sample = random.sample + seed = random.seed + + colors = ['red', 'green', 'blue', 'orange', 'black', 'amber'] + counts = [500, 200, 20, 10, 5, 1 ] + k = 700 + seed(8675309) + s1 = sample(colors, counts=counts, k=k) + seed(8675309) + expanded = [color for (color, count) in zip(colors, counts) for i in range(count)] + self.assertEqual(len(expanded), sum(counts)) + s2 = sample(expanded, k=k) + self.assertEqual(s1, s2) + + pop = 'abcdefghi' + counts = [10, 9, 8, 7, 6, 5, 4, 3, 2] + seed(8675309) + s1 = ''.join(sample(pop, counts=counts, k=30)) + expanded = ''.join([letter for (letter, count) in zip(pop, counts) for i in range(count)]) + seed(8675309) + s2 = ''.join(sample(expanded, k=30)) + self.assertEqual(s1, s2) + def test_choices(self): choices = self.gen.choices data = ['red', 'green', 'blue', 'yellow'] diff --git a/Lib/test/test_source_encoding.py b/Lib/test/test_source_encoding.py index a0bd741c36ac29..5ca43461d9940d 100644 --- a/Lib/test/test_source_encoding.py +++ b/Lib/test/test_source_encoding.py @@ -57,6 +57,9 @@ def test_issue7820(self): # one byte in common with the UTF-16-LE BOM self.assertRaises(SyntaxError, eval, b'\xff\x20') + # one byte in common with the UTF-8 BOM + self.assertRaises(SyntaxError, eval, b'\xef\x20') + # two bytes in common with the UTF-8 BOM self.assertRaises(SyntaxError, eval, b'\xef\xbb\x20') diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index 0e46a7119f0efc..5c3b1fdd8b110d 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -1004,6 +1004,10 @@ def test_nan(self): x = statistics._convert(nan, type(nan)) self.assertTrue(_nan_equal(x, nan)) + def test_invalid_input_type(self): + with self.assertRaises(TypeError): + statistics._convert(None, float) + class FailNegTest(unittest.TestCase): """Test _fail_neg private function.""" @@ -1033,6 +1037,50 @@ def test_error_msg(self): self.assertEqual(errmsg, msg) +class FindLteqTest(unittest.TestCase): + # Test _find_lteq private function. + + def test_invalid_input_values(self): + for a, x in [ + ([], 1), + ([1, 2], 3), + ([1, 3], 2) + ]: + with self.subTest(a=a, x=x): + with self.assertRaises(ValueError): + statistics._find_lteq(a, x) + + def test_locate_successfully(self): + for a, x, expected_i in [ + ([1, 1, 1, 2, 3], 1, 0), + ([0, 1, 1, 1, 2, 3], 1, 1), + ([1, 2, 3, 3, 3], 3, 2) + ]: + with self.subTest(a=a, x=x): + self.assertEqual(expected_i, statistics._find_lteq(a, x)) + + +class FindRteqTest(unittest.TestCase): + # Test _find_rteq private function. + + def test_invalid_input_values(self): + for a, l, x in [ + ([1], 2, 1), + ([1, 3], 0, 2) + ]: + with self.assertRaises(ValueError): + statistics._find_rteq(a, l, x) + + def test_locate_successfully(self): + for a, l, x, expected_i in [ + ([1, 1, 1, 2, 3], 0, 1, 2), + ([0, 1, 1, 1, 2, 3], 0, 1, 3), + ([1, 2, 3, 3, 3], 0, 3, 4) + ]: + with self.subTest(a=a, l=l, x=x): + self.assertEqual(expected_i, statistics._find_rteq(a, l, x)) + + # === Tests for public functions === class UnivariateCommonMixin: @@ -1476,6 +1524,18 @@ def test_negative_error(self): with self.subTest(values=values): self.assertRaises(exc, self.func, values) + def test_invalid_type_error(self): + # Test error is raised when input contains invalid type(s) + for data in [ + ['3.14'], # single string + ['1', '2', '3'], # multiple strings + [1, '2', 3, '4', 5], # mixed strings and valid integers + [2.3, 3.4, 4.5, '5.6'] # only one string and valid floats + ]: + with self.subTest(data=data): + with self.assertRaises(TypeError): + self.func(data) + def test_ints(self): # Test harmonic mean with ints. data = [2, 4, 4, 8, 16, 16] diff --git a/Lib/test/test_string_literals.py b/Lib/test/test_string_literals.py index 5b5477d14d467d..9565ee2485afd1 100644 --- a/Lib/test/test_string_literals.py +++ b/Lib/test/test_string_literals.py @@ -118,8 +118,7 @@ def test_eval_str_invalid_escape(self): eval("'''\n\\z'''") self.assertEqual(len(w), 1) self.assertEqual(w[0].filename, '') - if use_old_parser(): - self.assertEqual(w[0].lineno, 1) + self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('error', category=DeprecationWarning) @@ -128,8 +127,8 @@ def test_eval_str_invalid_escape(self): exc = cm.exception self.assertEqual(w, []) self.assertEqual(exc.filename, '') - if use_old_parser(): - self.assertEqual(exc.lineno, 1) + self.assertEqual(exc.lineno, 1) + self.assertEqual(exc.offset, 1) def test_eval_str_raw(self): self.assertEqual(eval(""" r'x' """), 'x') diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 0c0fc48e0d3de1..60c7d9fd3868e8 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -63,10 +63,9 @@ Traceback (most recent call last): SyntaxError: cannot assign to function call -# Pegen does not support this yet -# >>> del f() -# Traceback (most recent call last): -# SyntaxError: cannot delete function call +>>> del f() +Traceback (most recent call last): +SyntaxError: cannot delete function call >>> a + 1 = 2 Traceback (most recent call last): @@ -101,35 +100,54 @@ This test just checks a couple of cases rather than enumerating all of them. -# All of the following also produce different error messages with pegen -# >>> (a, "b", c) = (1, 2, 3) -# Traceback (most recent call last): -# SyntaxError: cannot assign to literal +>>> (a, "b", c) = (1, 2, 3) +Traceback (most recent call last): +SyntaxError: cannot assign to literal -# >>> (a, True, c) = (1, 2, 3) -# Traceback (most recent call last): -# SyntaxError: cannot assign to True +>>> (a, True, c) = (1, 2, 3) +Traceback (most recent call last): +SyntaxError: cannot assign to True >>> (a, __debug__, c) = (1, 2, 3) Traceback (most recent call last): SyntaxError: cannot assign to __debug__ -# >>> (a, *True, c) = (1, 2, 3) -# Traceback (most recent call last): -# SyntaxError: cannot assign to True +>>> (a, *True, c) = (1, 2, 3) +Traceback (most recent call last): +SyntaxError: cannot assign to True >>> (a, *__debug__, c) = (1, 2, 3) Traceback (most recent call last): SyntaxError: cannot assign to __debug__ -# >>> [a, b, c + 1] = [1, 2, 3] -# Traceback (most recent call last): -# SyntaxError: cannot assign to operator +>>> [a, b, c + 1] = [1, 2, 3] +Traceback (most recent call last): +SyntaxError: cannot assign to operator + +>>> [a, b[1], c + 1] = [1, 2, 3] +Traceback (most recent call last): +SyntaxError: cannot assign to operator + +>>> [a, b.c.d, c + 1] = [1, 2, 3] +Traceback (most recent call last): +SyntaxError: cannot assign to operator >>> a if 1 else b = 1 Traceback (most recent call last): SyntaxError: cannot assign to conditional expression +>>> a, b += 1, 2 +Traceback (most recent call last): +SyntaxError: 'tuple' is an illegal expression for augmented assignment + +>>> (a, b) += 1, 2 +Traceback (most recent call last): +SyntaxError: 'tuple' is an illegal expression for augmented assignment + +>>> [a, b] += 1, 2 +Traceback (most recent call last): +SyntaxError: 'list' is an illegal expression for augmented assignment + From compiler_complex_args(): >>> def f(None=1): @@ -335,16 +353,16 @@ >>> (x for x in x) += 1 Traceback (most recent call last): -SyntaxError: cannot assign to generator expression +SyntaxError: 'generator expression' is an illegal expression for augmented assignment >>> None += 1 Traceback (most recent call last): -SyntaxError: cannot assign to None +SyntaxError: 'None' is an illegal expression for augmented assignment >>> __debug__ += 1 Traceback (most recent call last): SyntaxError: cannot assign to __debug__ >>> f() += 1 Traceback (most recent call last): -SyntaxError: cannot assign to function call +SyntaxError: 'function call' is an illegal expression for augmented assignment Test continue in finally in weird combinations. @@ -665,7 +683,7 @@ def _check_error(self, code, errtext, self.fail("SyntaxError is not a %s" % subclass.__name__) mo = re.search(errtext, str(err)) if mo is None: - self.fail("SyntaxError did not contain '%r'" % (errtext,)) + self.fail("SyntaxError did not contain %r" % (errtext,)) self.assertEqual(err.filename, filename) if lineno is not None: self.assertEqual(err.lineno, lineno) @@ -677,10 +695,37 @@ def _check_error(self, code, errtext, def test_assign_call(self): self._check_error("f() = 1", "assign") - @support.skip_if_new_parser("Pegen does not produce a specialized error " - "message yet") + @unittest.skipIf(support.use_old_parser(), "The old parser cannot generate these error messages") def test_assign_del(self): - self._check_error("del f()", "delete") + self._check_error("del (,)", "invalid syntax") + self._check_error("del 1", "delete literal") + self._check_error("del (1, 2)", "delete literal") + self._check_error("del None", "delete None") + self._check_error("del *x", "delete starred") + self._check_error("del (*x)", "delete starred") + self._check_error("del (*x,)", "delete starred") + self._check_error("del [*x,]", "delete starred") + self._check_error("del f()", "delete function call") + self._check_error("del f(a, b)", "delete function call") + self._check_error("del o.f()", "delete function call") + self._check_error("del a[0]()", "delete function call") + self._check_error("del x, f()", "delete function call") + self._check_error("del f(), x", "delete function call") + self._check_error("del [a, b, ((c), (d,), e.f())]", "delete function call") + self._check_error("del (a if True else b)", "delete conditional") + self._check_error("del +a", "delete operator") + self._check_error("del a, +b", "delete operator") + self._check_error("del a + b", "delete operator") + self._check_error("del (a + b, c)", "delete operator") + self._check_error("del (c[0], a + b)", "delete operator") + self._check_error("del a.b.c + 2", "delete operator") + self._check_error("del a.b.c[0] + 2", "delete operator") + self._check_error("del (a, b, (c, d.e.f + 2))", "delete operator") + self._check_error("del [a, b, (c, d.e.f[0] + 2)]", "delete operator") + self._check_error("del (a := 5)", "delete named expression") + # We don't have a special message for this, but make sure we don't + # report "cannot delete name" + self._check_error("del a += b", "invalid syntax") def test_global_param_err_first(self): source = """if 1: diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 91a645b460ec02..33b34593a0af97 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -1322,7 +1322,7 @@ def delx(self): del self.__x '3P' # PyMappingMethods '10P' # PySequenceMethods '2P' # PyBufferProcs - '4P') + '5P') class newstyleclass(object): pass # Separate block for PyDictKeysObject with 8 keys and 5 entries check(newstyleclass, s + calcsize("2nP2n0P") + 8 + 5*calcsize("n2P")) diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index 7361d091cfbbef..f9a5f2fc53e1e9 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -58,13 +58,13 @@ def test_caret(self): SyntaxError) self.assertIn("^", err[2]) # third line has caret self.assertEqual(err[2].count('\n'), 1) # and no additional newline - self.assertEqual(err[1].find("+"), err[2].find("^")) # in the right place + self.assertEqual(err[1].find("+") + 1, err[2].find("^")) # in the right place err = self.get_exception_format(self.syntax_error_with_caret_non_ascii, SyntaxError) self.assertIn("^", err[2]) # third line has caret self.assertEqual(err[2].count('\n'), 1) # and no additional newline - self.assertEqual(err[1].find("+"), err[2].find("^")) # in the right place + self.assertEqual(err[1].find("+") + 1, err[2].find("^")) # in the right place def test_nocaret(self): exc = SyntaxError("error", ("x.py", 23, None, "bad syntax")) @@ -78,14 +78,13 @@ def test_bad_indentation(self): self.assertEqual(len(err), 4) self.assertEqual(err[1].strip(), "print(2)") self.assertIn("^", err[2]) - self.assertEqual(err[1].find(")"), err[2].find("^")) + self.assertEqual(err[1].find(")") + 1, err[2].find("^")) + # No caret for "unexpected indent" err = self.get_exception_format(self.syntax_error_bad_indentation2, IndentationError) - self.assertEqual(len(err), 4) + self.assertEqual(len(err), 3) self.assertEqual(err[1].strip(), "print(2)") - self.assertIn("^", err[2]) - self.assertEqual(err[1].find("p"), err[2].find("^")) def test_base_exception(self): # Test that exceptions derived from BaseException are formatted right @@ -656,7 +655,7 @@ def outer_raise(): self.assertIn('inner_raise() # Marker', blocks[2]) self.check_zero_div(blocks[2]) - @support.skip_if_new_parser("Pegen is arguably better here, so no need to fix this") + @unittest.skipIf(support.use_old_parser(), "Pegen is arguably better here, so no need to fix this") def test_syntax_error_offset_at_eol(self): # See #10186. def e(): @@ -666,7 +665,7 @@ def e(): def e(): exec("x = 5 | 4 |") msg = self.get_report(e).splitlines() - self.assertEqual(msg[-2], ' ^') + self.assertEqual(msg[-2], ' ^') def test_message_none(self): # A message that looks like "None" should not be treated specially @@ -679,6 +678,25 @@ def test_message_none(self): err = self.get_report(Exception('')) self.assertIn('Exception\n', err) + def test_syntax_error_various_offsets(self): + for offset in range(-5, 10): + for add in [0, 2]: + text = " "*add + "text%d" % offset + expected = [' File "file.py", line 1'] + if offset < 1: + expected.append(" %s" % text.lstrip()) + elif offset <= 6: + expected.append(" %s" % text.lstrip()) + expected.append(" %s^" % (" "*(offset-1))) + else: + expected.append(" %s" % text.lstrip()) + expected.append(" %s^" % (" "*5)) + expected.append("SyntaxError: msg") + expected.append("") + err = self.get_report(SyntaxError("msg", ("file.py", 1, offset+add, text))) + exp = "\n".join(expected) + self.assertEqual(exp, err) + class PyExcReportingTests(BaseExceptionReportingTests, unittest.TestCase): # diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py index f42238762ddcca..28ebfb6e603e36 100644 --- a/Lib/test/test_types.py +++ b/Lib/test/test_types.py @@ -627,6 +627,7 @@ def test_methods(self): '__iter__', '__len__', '__or__', + '__reversed__', '__ror__', 'copy', 'get', @@ -768,6 +769,14 @@ def test_iterators(self): self.assertEqual(set(view.values()), set(values)) self.assertEqual(set(view.items()), set(items)) + def test_reversed(self): + d = {'a': 1, 'b': 2, 'foo': 0, 'c': 3, 'd': 4} + mp = self.mappingproxy(d) + del d['foo'] + r = reversed(mp) + self.assertEqual(list(r), list('dcba')) + self.assertRaises(StopIteration, next, r) + def test_copy(self): original = {'key1': 27, 'key2': 51, 'key3': 93} view = self.mappingproxy(original) diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 21bc7c81f2a30e..f429e883b59538 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -10,7 +10,7 @@ from typing import TypeVar, AnyStr from typing import T, KT, VT # Not in __all__. from typing import Union, Optional, Literal -from typing import Tuple, List, MutableMapping +from typing import Tuple, List, Dict, MutableMapping from typing import Callable from typing import Generic, ClassVar, Final, final, Protocol from typing import cast, runtime_checkable @@ -3173,6 +3173,17 @@ def test_frozenset(self): def test_dict(self): self.assertIsSubclass(dict, typing.Dict) + def test_dict_subscribe(self): + K = TypeVar('K') + V = TypeVar('V') + self.assertEqual(Dict[K, V][str, int], Dict[str, int]) + self.assertEqual(Dict[K, int][str], Dict[str, int]) + self.assertEqual(Dict[str, V][int], Dict[str, int]) + self.assertEqual(Dict[K, List[V]][str, int], Dict[str, List[int]]) + self.assertEqual(Dict[K, List[int]][str], Dict[str, List[int]]) + self.assertEqual(Dict[K, list[V]][str, int], Dict[str, list[int]]) + self.assertEqual(Dict[K, list[int]][str], Dict[str, list[int]]) + def test_no_list_instantiation(self): with self.assertRaises(TypeError): typing.List() diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py index 28398896467898..2ee4e64d635303 100644 --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -720,6 +720,13 @@ def test_isidentifier(self): self.assertFalse("©".isidentifier()) self.assertFalse("0".isidentifier()) + @support.cpython_only + def test_isidentifier_legacy(self): + import _testcapi + u = '𝖀𝖓𝖎𝖈𝖔𝖉𝖊' + self.assertTrue(u.isidentifier()) + self.assertTrue(_testcapi.unicode_legacy_string(u).isidentifier()) + def test_isprintable(self): self.assertTrue("".isprintable()) self.assertTrue(" ".isprintable()) diff --git a/Lib/test/test_unicode_identifiers.py b/Lib/test/test_unicode_identifiers.py index 07332c4631903e..5b9ced5d1cb837 100644 --- a/Lib/test/test_unicode_identifiers.py +++ b/Lib/test/test_unicode_identifiers.py @@ -20,9 +20,11 @@ def test_non_bmp_normalized(self): def test_invalid(self): try: from test import badsyntax_3131 - except SyntaxError as s: - self.assertEqual(str(s), - "invalid character in identifier (badsyntax_3131.py, line 2)") + except SyntaxError as err: + self.assertEqual(str(err), + "invalid character '€' (U+20AC) (badsyntax_3131.py, line 2)") + self.assertEqual(err.lineno, 2) + self.assertEqual(err.offset, 1) else: self.fail("expected exception didn't occur") diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py index ac166ced38afbc..b1c92427dd270b 100644 --- a/Lib/test/test_uuid.py +++ b/Lib/test/test_uuid.py @@ -852,17 +852,6 @@ def test_netstat_getnode(self): node = self.uuid._netstat_getnode() self.check_node(node, 'netstat') - @unittest.skipUnless(os.name == 'nt', 'requires Windows') - def test_ipconfig_getnode(self): - node = self.uuid._ipconfig_getnode() - self.check_node(node, 'ipconfig') - - @unittest.skipUnless(importable('win32wnet'), 'requires win32wnet') - @unittest.skipUnless(importable('netbios'), 'requires netbios') - def test_netbios_getnode(self): - node = self.uuid._netbios_getnode() - self.check_node(node) - def test_random_getnode(self): node = self.uuid._random_getnode() # The multicast bit, i.e. the least significant bit of first octet, @@ -874,6 +863,13 @@ def test_random_getnode(self): node2 = self.uuid._random_getnode() self.assertNotEqual(node2, node, '%012x' % node) +class TestInternalsWithoutExtModule(BaseTestInternals, unittest.TestCase): + uuid = py_uuid + +@unittest.skipUnless(c_uuid, 'requires the C _uuid module') +class TestInternalsWithExtModule(BaseTestInternals, unittest.TestCase): + uuid = c_uuid + @unittest.skipUnless(os.name == 'posix', 'requires Posix') def test_unix_getnode(self): if not importable('_uuid') and not importable('ctypes'): @@ -885,19 +881,10 @@ def test_unix_getnode(self): self.check_node(node, 'unix') @unittest.skipUnless(os.name == 'nt', 'requires Windows') - @unittest.skipUnless(importable('ctypes'), 'requires ctypes') def test_windll_getnode(self): node = self.uuid._windll_getnode() self.check_node(node) -class TestInternalsWithoutExtModule(BaseTestInternals, unittest.TestCase): - uuid = py_uuid - -@unittest.skipUnless(c_uuid, 'requires the C _uuid module') -class TestInternalsWithExtModule(BaseTestInternals, unittest.TestCase): - uuid = c_uuid - - if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_weakref.py b/Lib/test/test_weakref.py index 563507fee3d7ea..56a42f055d0b54 100644 --- a/Lib/test/test_weakref.py +++ b/Lib/test/test_weakref.py @@ -411,6 +411,26 @@ def __iter__(self): # can be killed in the middle of the call "blech" in p + def test_proxy_reversed(self): + class MyObj: + def __len__(self): + return 3 + def __reversed__(self): + return iter('cba') + + obj = MyObj() + self.assertEqual("".join(reversed(weakref.proxy(obj))), "cba") + + def test_proxy_hash(self): + cool_hash = 299_792_458 + + class MyObj: + def __hash__(self): + return cool_hash + + obj = MyObj() + self.assertEqual(hash(weakref.proxy(obj)), cool_hash) + def test_getweakrefcount(self): o = C() ref1 = weakref.ref(o) diff --git a/Lib/traceback.py b/Lib/traceback.py index bf34bbab8a1629..a19e38718b1205 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -569,23 +569,30 @@ def format_exception_only(self): if not issubclass(self.exc_type, SyntaxError): yield _format_final_exc_line(stype, self._str) - return + else: + yield from self._format_syntax_error(stype) - # It was a syntax error; show exactly where the problem was found. + def _format_syntax_error(self, stype): + """Format SyntaxError exceptions (internal helper).""" + # Show exactly where the problem was found. filename = self.filename or "" lineno = str(self.lineno) or '?' yield ' File "{}", line {}\n'.format(filename, lineno) - badline = self.text - offset = self.offset - if badline is not None: - yield ' {}\n'.format(badline.strip()) - if offset is not None: - caretspace = badline.rstrip('\n') - offset = min(len(caretspace), offset) - 1 - caretspace = caretspace[:offset].lstrip() + text = self.text + if text is not None: + # text = " foo\n" + # rtext = " foo" + # ltext = "foo" + rtext = text.rstrip('\n') + ltext = rtext.lstrip(' \n\f') + spaces = len(rtext) - len(ltext) + yield ' {}\n'.format(ltext) + # Convert 1-based column offset to 0-based index into stripped text + caret = (self.offset or 0) - 1 - spaces + if caret >= 0: # non-space whitespace (likes tabs) must be kept for alignment - caretspace = ((c.isspace() and c or ' ') for c in caretspace) + caretspace = ((c if c.isspace() else ' ') for c in ltext[:caret]) yield ' {}^\n'.format(''.join(caretspace)) msg = self.msg or "" yield "{}: {}\n".format(stype, msg) diff --git a/Lib/typing.py b/Lib/typing.py index f3cd280a09e271..f94996daebd6ed 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -181,42 +181,18 @@ def _collect_type_vars(types): for t in types: if isinstance(t, TypeVar) and t not in tvars: tvars.append(t) - if ((isinstance(t, _GenericAlias) and not t._special) - or isinstance(t, GenericAlias)): + if isinstance(t, (_GenericAlias, GenericAlias)): tvars.extend([t for t in t.__parameters__ if t not in tvars]) return tuple(tvars) -def _subs_tvars(tp, tvars, subs): - """Substitute type variables 'tvars' with substitutions 'subs'. - These two must have the same length. - """ - if not isinstance(tp, (_GenericAlias, GenericAlias)): - return tp - new_args = list(tp.__args__) - for a, arg in enumerate(tp.__args__): - if isinstance(arg, TypeVar): - for i, tvar in enumerate(tvars): - if arg == tvar: - new_args[a] = subs[i] - else: - new_args[a] = _subs_tvars(arg, tvars, subs) - if tp.__origin__ is Union: - return Union[tuple(new_args)] - if isinstance(tp, GenericAlias): - return GenericAlias(tp.__origin__, tuple(new_args)) - else: - return tp.copy_with(tuple(new_args)) - - -def _check_generic(cls, parameters): +def _check_generic(cls, parameters, elen): """Check correct count for parameters of a generic cls (internal helper). This gives a nice error message in case of count mismatch. """ - if not cls.__parameters__: + if not elen: raise TypeError(f"{cls} is not a generic class") alen = len(parameters) - elen = len(cls.__parameters__) if alen != elen: raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" f" actual {alen}, expected {elen}") @@ -229,7 +205,7 @@ def _remove_dups_flatten(parameters): # Flatten out Union[Union[...], ...]. params = [] for p in parameters: - if isinstance(p, _GenericAlias) and p.__origin__ is Union: + if isinstance(p, _UnionGenericAlias): params.extend(p.__args__) elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union: params.extend(p[1:]) @@ -274,18 +250,14 @@ def _eval_type(t, globalns, localns): """ if isinstance(t, ForwardRef): return t._evaluate(globalns, localns) - if isinstance(t, _GenericAlias): - ev_args = tuple(_eval_type(a, globalns, localns) for a in t.__args__) - if ev_args == t.__args__: - return t - res = t.copy_with(ev_args) - res._special = t._special - return res - if isinstance(t, GenericAlias): + if isinstance(t, (_GenericAlias, GenericAlias)): ev_args = tuple(_eval_type(a, globalns, localns) for a in t.__args__) if ev_args == t.__args__: return t - return GenericAlias(t.__origin__, ev_args) + if isinstance(t, GenericAlias): + return GenericAlias(t.__origin__, ev_args) + else: + return t.copy_with(ev_args) return t @@ -300,6 +272,7 @@ def __init_subclass__(self, /, *args, **kwds): class _Immutable: """Mixin to indicate that object should not be copied.""" + __slots__ = () def __copy__(self): return self @@ -446,7 +419,7 @@ def Union(self, parameters): parameters = _remove_dups_flatten(parameters) if len(parameters) == 1: return parameters[0] - return _GenericAlias(self, parameters) + return _UnionGenericAlias(self, parameters) @_SpecialForm def Optional(self, parameters): @@ -579,7 +552,7 @@ def longest(x: A, y: A) -> A: """ __slots__ = ('__name__', '__bound__', '__constraints__', - '__covariant__', '__contravariant__') + '__covariant__', '__contravariant__', '__dict__') def __init__(self, name, *constraints, bound=None, covariant=False, contravariant=False): @@ -618,34 +591,10 @@ def __reduce__(self): return self.__name__ -# Special typing constructs Union, Optional, Generic, Callable and Tuple -# use three special attributes for internal bookkeeping of generic types: -# * __parameters__ is a tuple of unique free type parameters of a generic -# type, for example, Dict[T, T].__parameters__ == (T,); -# * __origin__ keeps a reference to a type that was subscripted, -# e.g., Union[T, int].__origin__ == Union, or the non-generic version of -# the type. -# * __args__ is a tuple of all arguments used in subscripting, -# e.g., Dict[T, int].__args__ == (T, int). - - -# Mapping from non-generic type names that have a generic alias in typing -# but with a different name. -_normalize_alias = {'list': 'List', - 'tuple': 'Tuple', - 'dict': 'Dict', - 'set': 'Set', - 'frozenset': 'FrozenSet', - 'deque': 'Deque', - 'defaultdict': 'DefaultDict', - 'type': 'Type', - 'Set': 'AbstractSet'} - def _is_dunder(attr): return attr.startswith('__') and attr.endswith('__') - -class _GenericAlias(_Final, _root=True): +class _BaseGenericAlias(_Final, _root=True): """The central part of internal API. This represents a generic version of type 'origin' with type arguments 'params'. @@ -654,25 +603,87 @@ class _GenericAlias(_Final, _root=True): have 'name' always set. If 'inst' is False, then the alias can't be instantiated, this is used by e.g. typing.List and typing.Dict. """ - def __init__(self, origin, params, *, inst=True, special=False, name=None): + def __init__(self, origin, *, inst=True, name=None): self._inst = inst - self._special = special - if special and name is None: - orig_name = origin.__name__ - name = _normalize_alias.get(orig_name, orig_name) self._name = name + self.__origin__ = origin + self.__slots__ = None # This is not documented. + + def __call__(self, *args, **kwargs): + if not self._inst: + raise TypeError(f"Type {self._name} cannot be instantiated; " + f"use {self.__origin__.__name__}() instead") + result = self.__origin__(*args, **kwargs) + try: + result.__orig_class__ = self + except AttributeError: + pass + return result + + def __mro_entries__(self, bases): + res = [] + if self.__origin__ not in bases: + res.append(self.__origin__) + i = bases.index(self) + for b in bases[i+1:]: + if isinstance(b, _BaseGenericAlias) or issubclass(b, Generic): + break + else: + res.append(Generic) + return tuple(res) + + def __getattr__(self, attr): + # We are careful for copy and pickle. + # Also for simplicity we just don't relay all dunder names + if '__origin__' in self.__dict__ and not _is_dunder(attr): + return getattr(self.__origin__, attr) + raise AttributeError(attr) + + def __setattr__(self, attr, val): + if _is_dunder(attr) or attr in ('_name', '_inst', '_nparams'): + super().__setattr__(attr, val) + else: + setattr(self.__origin__, attr, val) + + def __instancecheck__(self, obj): + return self.__subclasscheck__(type(obj)) + + def __subclasscheck__(self, cls): + raise TypeError("Subscripted generics cannot be used with" + " class and instance checks") + + +# Special typing constructs Union, Optional, Generic, Callable and Tuple +# use three special attributes for internal bookkeeping of generic types: +# * __parameters__ is a tuple of unique free type parameters of a generic +# type, for example, Dict[T, T].__parameters__ == (T,); +# * __origin__ keeps a reference to a type that was subscripted, +# e.g., Union[T, int].__origin__ == Union, or the non-generic version of +# the type. +# * __args__ is a tuple of all arguments used in subscripting, +# e.g., Dict[T, int].__args__ == (T, int). + + +class _GenericAlias(_BaseGenericAlias, _root=True): + def __init__(self, origin, params, *, inst=True, name=None): + super().__init__(origin, inst=inst, name=name) if not isinstance(params, tuple): params = (params,) - self.__origin__ = origin self.__args__ = tuple(... if a is _TypingEllipsis else () if a is _TypingEmpty else a for a in params) self.__parameters__ = _collect_type_vars(params) - self.__slots__ = None # This is not documented. if not name: self.__module__ = origin.__module__ - if special: - self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}' + + def __eq__(self, other): + if not isinstance(other, _GenericAlias): + return NotImplemented + return (self.__origin__ == other.__origin__ + and self.__args__ == other.__args__) + + def __hash__(self): + return hash((self.__origin__, self.__args__)) @_tp_cache def __getitem__(self, params): @@ -683,132 +694,119 @@ def __getitem__(self, params): params = (params,) msg = "Parameters to generic types must be types." params = tuple(_type_check(p, msg) for p in params) - _check_generic(self, params) - return _subs_tvars(self, self.__parameters__, params) + _check_generic(self, params, len(self.__parameters__)) + + subst = dict(zip(self.__parameters__, params)) + new_args = [] + for arg in self.__args__: + if isinstance(arg, TypeVar): + arg = subst[arg] + elif isinstance(arg, (_GenericAlias, GenericAlias)): + subparams = arg.__parameters__ + if subparams: + subargs = tuple(subst[x] for x in subparams) + arg = arg[subargs] + new_args.append(arg) + return self.copy_with(tuple(new_args)) def copy_with(self, params): - # We don't copy self._special. - return _GenericAlias(self.__origin__, params, name=self._name, inst=self._inst) + return self.__class__(self.__origin__, params, name=self._name, inst=self._inst) def __repr__(self): - if (self.__origin__ == Union and len(self.__args__) == 2 - and type(None) in self.__args__): - if self.__args__[0] is not type(None): - arg = self.__args__[0] - else: - arg = self.__args__[1] - return (f'typing.Optional[{_type_repr(arg)}]') - if (self._name != 'Callable' or - len(self.__args__) == 2 and self.__args__[0] is Ellipsis): - if self._name: - name = 'typing.' + self._name - else: - name = _type_repr(self.__origin__) - if not self._special: - args = f'[{", ".join([_type_repr(a) for a in self.__args__])}]' - else: - args = '' - return (f'{name}{args}') - if self._special: - return 'typing.Callable' - return (f'typing.Callable' - f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], ' - f'{_type_repr(self.__args__[-1])}]') - - def __eq__(self, other): - if not isinstance(other, _GenericAlias): - return NotImplemented - if self.__origin__ != other.__origin__: - return False - if self.__origin__ is Union and other.__origin__ is Union: - return frozenset(self.__args__) == frozenset(other.__args__) - return self.__args__ == other.__args__ - - def __hash__(self): - if self.__origin__ is Union: - return hash((Union, frozenset(self.__args__))) - return hash((self.__origin__, self.__args__)) + if self._name: + name = 'typing.' + self._name + else: + name = _type_repr(self.__origin__) + args = ", ".join([_type_repr(a) for a in self.__args__]) + return f'{name}[{args}]' - def __call__(self, *args, **kwargs): - if not self._inst: - raise TypeError(f"Type {self._name} cannot be instantiated; " - f"use {self._name.lower()}() instead") - result = self.__origin__(*args, **kwargs) - try: - result.__orig_class__ = self - except AttributeError: - pass - return result + def __reduce__(self): + if self._name: + origin = globals()[self._name] + else: + origin = self.__origin__ + args = tuple(self.__args__) + if len(args) == 1 and not isinstance(args[0], tuple): + args, = args + return operator.getitem, (origin, args) def __mro_entries__(self, bases): if self._name: # generic version of an ABC or built-in class - res = [] - if self.__origin__ not in bases: - res.append(self.__origin__) - i = bases.index(self) - if not any(isinstance(b, _GenericAlias) or issubclass(b, Generic) - for b in bases[i+1:]): - res.append(Generic) - return tuple(res) + return super().__mro_entries__(bases) if self.__origin__ is Generic: if Protocol in bases: return () i = bases.index(self) for b in bases[i+1:]: - if isinstance(b, _GenericAlias) and b is not self: + if isinstance(b, _BaseGenericAlias) and b is not self: return () return (self.__origin__,) - def __getattr__(self, attr): - # We are careful for copy and pickle. - # Also for simplicity we just don't relay all dunder names - if '__origin__' in self.__dict__ and not _is_dunder(attr): - return getattr(self.__origin__, attr) - raise AttributeError(attr) - def __setattr__(self, attr, val): - if _is_dunder(attr) or attr in ('_name', '_inst', '_special'): - super().__setattr__(attr, val) +# _nparams is the number of accepted parameters, e.g. 0 for Hashable, +# 1 for List and 2 for Dict. It may be -1 if variable number of +# parameters are accepted (needs custom __getitem__). + +class _SpecialGenericAlias(_BaseGenericAlias, _root=True): + def __init__(self, origin, nparams, *, inst=True, name=None): + if name is None: + name = origin.__name__ + super().__init__(origin, inst=inst, name=name) + self._nparams = nparams + if origin.__module__ == 'builtins': + self.__doc__ = f'A generic version of {origin.__qualname__}.' else: - setattr(self.__origin__, attr, val) + self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.' - def __instancecheck__(self, obj): - return self.__subclasscheck__(type(obj)) + @_tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + msg = "Parameters to generic types must be types." + params = tuple(_type_check(p, msg) for p in params) + _check_generic(self, params, self._nparams) + return self.copy_with(params) + + def copy_with(self, params): + return _GenericAlias(self.__origin__, params, + name=self._name, inst=self._inst) + + def __repr__(self): + return 'typing.' + self._name def __subclasscheck__(self, cls): - if self._special: - if not isinstance(cls, _GenericAlias): - return issubclass(cls, self.__origin__) - if cls._special: - return issubclass(cls.__origin__, self.__origin__) - raise TypeError("Subscripted generics cannot be used with" - " class and instance checks") + if isinstance(cls, _SpecialGenericAlias): + return issubclass(cls.__origin__, self.__origin__) + if not isinstance(cls, _GenericAlias): + return issubclass(cls, self.__origin__) + return super().__subclasscheck__(cls) def __reduce__(self): - if self._special: - return self._name + return self._name - if self._name: - origin = globals()[self._name] - else: - origin = self.__origin__ - if (origin is Callable and - not (len(self.__args__) == 2 and self.__args__[0] is Ellipsis)): - args = list(self.__args__[:-1]), self.__args__[-1] - else: - args = tuple(self.__args__) - if len(args) == 1 and not isinstance(args[0], tuple): - args, = args - return operator.getitem, (origin, args) +class _CallableGenericAlias(_GenericAlias, _root=True): + def __repr__(self): + assert self._name == 'Callable' + if len(self.__args__) == 2 and self.__args__[0] is Ellipsis: + return super().__repr__() + return (f'typing.Callable' + f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], ' + f'{_type_repr(self.__args__[-1])}]') + + def __reduce__(self): + args = self.__args__ + if not (len(args) == 2 and args[0] is ...): + args = list(args[:-1]), args[-1] + return operator.getitem, (Callable, args) + + +class _CallableType(_SpecialGenericAlias, _root=True): + def copy_with(self, params): + return _CallableGenericAlias(self.__origin__, params, + name=self._name, inst=self._inst) -class _VariadicGenericAlias(_GenericAlias, _root=True): - """Same as _GenericAlias above but for variadic aliases. Currently, - this is used only by special internal aliases: Tuple and Callable. - """ def __getitem__(self, params): - if self._name != 'Callable' or not self._special: - return self.__getitem_inner__(params) if not isinstance(params, tuple) or len(params) != 2: raise TypeError("Callable must be used as " "Callable[[arg, ...], result].") @@ -824,29 +822,53 @@ def __getitem__(self, params): @_tp_cache def __getitem_inner__(self, params): - if self.__origin__ is tuple and self._special: - if params == (): - return self.copy_with((_TypingEmpty,)) - if not isinstance(params, tuple): - params = (params,) - if len(params) == 2 and params[1] is ...: - msg = "Tuple[t, ...]: t must be a type." - p = _type_check(params[0], msg) - return self.copy_with((p, _TypingEllipsis)) - msg = "Tuple[t0, t1, ...]: each t must be a type." - params = tuple(_type_check(p, msg) for p in params) - return self.copy_with(params) - if self.__origin__ is collections.abc.Callable and self._special: - args, result = params - msg = "Callable[args, result]: result must be a type." - result = _type_check(result, msg) - if args is Ellipsis: - return self.copy_with((_TypingEllipsis, result)) - msg = "Callable[[arg, ...], result]: each arg must be a type." - args = tuple(_type_check(arg, msg) for arg in args) - params = args + (result,) - return self.copy_with(params) - return super().__getitem__(params) + args, result = params + msg = "Callable[args, result]: result must be a type." + result = _type_check(result, msg) + if args is Ellipsis: + return self.copy_with((_TypingEllipsis, result)) + msg = "Callable[[arg, ...], result]: each arg must be a type." + args = tuple(_type_check(arg, msg) for arg in args) + params = args + (result,) + return self.copy_with(params) + + +class _TupleType(_SpecialGenericAlias, _root=True): + @_tp_cache + def __getitem__(self, params): + if params == (): + return self.copy_with((_TypingEmpty,)) + if not isinstance(params, tuple): + params = (params,) + if len(params) == 2 and params[1] is ...: + msg = "Tuple[t, ...]: t must be a type." + p = _type_check(params[0], msg) + return self.copy_with((p, _TypingEllipsis)) + msg = "Tuple[t0, t1, ...]: each t must be a type." + params = tuple(_type_check(p, msg) for p in params) + return self.copy_with(params) + + +class _UnionGenericAlias(_GenericAlias, _root=True): + def copy_with(self, params): + return Union[params] + + def __eq__(self, other): + if not isinstance(other, _UnionGenericAlias): + return NotImplemented + return set(self.__args__) == set(other.__args__) + + def __hash__(self): + return hash(frozenset(self.__args__)) + + def __repr__(self): + args = self.__args__ + if len(args) == 2: + if args[0] is type(None): + return f'typing.Optional[{_type_repr(args[1])}]' + elif args[1] is type(None): + return f'typing.Optional[{_type_repr(args[0])}]' + return super().__repr__() class Generic: @@ -901,7 +923,7 @@ def __class_getitem__(cls, params): f"Parameters to {cls.__name__}[...] must all be unique") else: # Subscripting a regular Generic subclass. - _check_generic(cls, params) + _check_generic(cls, params, len(cls.__parameters__)) return _GenericAlias(cls, params) def __init_subclass__(cls, *args, **kwargs): @@ -1162,9 +1184,8 @@ def __reduce__(self): def __eq__(self, other): if not isinstance(other, _AnnotatedAlias): return NotImplemented - if self.__origin__ != other.__origin__: - return False - return self.__metadata__ == other.__metadata__ + return (self.__origin__ == other.__origin__ + and self.__metadata__ == other.__metadata__) def __hash__(self): return hash((self.__origin__, self.__metadata__)) @@ -1380,9 +1401,7 @@ def _strip_annotations(t): stripped_args = tuple(_strip_annotations(a) for a in t.__args__) if stripped_args == t.__args__: return t - res = t.copy_with(stripped_args) - res._special = t._special - return res + return t.copy_with(stripped_args) if isinstance(t, GenericAlias): stripped_args = tuple(_strip_annotations(a) for a in t.__args__) if stripped_args == t.__args__: @@ -1407,7 +1426,7 @@ def get_origin(tp): """ if isinstance(tp, _AnnotatedAlias): return Annotated - if isinstance(tp, (_GenericAlias, GenericAlias)): + if isinstance(tp, (_BaseGenericAlias, GenericAlias)): return tp.__origin__ if tp is Generic: return Generic @@ -1427,7 +1446,7 @@ def get_args(tp): """ if isinstance(tp, _AnnotatedAlias): return (tp.__origin__,) + tp.__metadata__ - if isinstance(tp, _GenericAlias) and not tp._special: + if isinstance(tp, _GenericAlias): res = tp.__args__ if tp.__origin__ is collections.abc.Callable and res[0] is not Ellipsis: res = (list(res[:-1]), res[-1]) @@ -1561,21 +1580,20 @@ class Other(Leaf): # Error reported by type checker # Various ABCs mimicking those in collections.abc. -def _alias(origin, params, inst=True): - return _GenericAlias(origin, params, special=True, inst=inst) - -Hashable = _alias(collections.abc.Hashable, ()) # Not generic. -Awaitable = _alias(collections.abc.Awaitable, T_co) -Coroutine = _alias(collections.abc.Coroutine, (T_co, T_contra, V_co)) -AsyncIterable = _alias(collections.abc.AsyncIterable, T_co) -AsyncIterator = _alias(collections.abc.AsyncIterator, T_co) -Iterable = _alias(collections.abc.Iterable, T_co) -Iterator = _alias(collections.abc.Iterator, T_co) -Reversible = _alias(collections.abc.Reversible, T_co) -Sized = _alias(collections.abc.Sized, ()) # Not generic. -Container = _alias(collections.abc.Container, T_co) -Collection = _alias(collections.abc.Collection, T_co) -Callable = _VariadicGenericAlias(collections.abc.Callable, (), special=True) +_alias = _SpecialGenericAlias + +Hashable = _alias(collections.abc.Hashable, 0) # Not generic. +Awaitable = _alias(collections.abc.Awaitable, 1) +Coroutine = _alias(collections.abc.Coroutine, 3) +AsyncIterable = _alias(collections.abc.AsyncIterable, 1) +AsyncIterator = _alias(collections.abc.AsyncIterator, 1) +Iterable = _alias(collections.abc.Iterable, 1) +Iterator = _alias(collections.abc.Iterator, 1) +Reversible = _alias(collections.abc.Reversible, 1) +Sized = _alias(collections.abc.Sized, 0) # Not generic. +Container = _alias(collections.abc.Container, 1) +Collection = _alias(collections.abc.Collection, 1) +Callable = _CallableType(collections.abc.Callable, 2) Callable.__doc__ = \ """Callable type; Callable[[int], str] is a function of (int) -> str. @@ -1586,15 +1604,16 @@ def _alias(origin, params, inst=True): There is no syntax to indicate optional or keyword arguments, such function types are rarely used as callback types. """ -AbstractSet = _alias(collections.abc.Set, T_co) -MutableSet = _alias(collections.abc.MutableSet, T) +AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet') +MutableSet = _alias(collections.abc.MutableSet, 1) # NOTE: Mapping is only covariant in the value type. -Mapping = _alias(collections.abc.Mapping, (KT, VT_co)) -MutableMapping = _alias(collections.abc.MutableMapping, (KT, VT)) -Sequence = _alias(collections.abc.Sequence, T_co) -MutableSequence = _alias(collections.abc.MutableSequence, T) -ByteString = _alias(collections.abc.ByteString, ()) # Not generic -Tuple = _VariadicGenericAlias(tuple, (), inst=False, special=True) +Mapping = _alias(collections.abc.Mapping, 2) +MutableMapping = _alias(collections.abc.MutableMapping, 2) +Sequence = _alias(collections.abc.Sequence, 1) +MutableSequence = _alias(collections.abc.MutableSequence, 1) +ByteString = _alias(collections.abc.ByteString, 0) # Not generic +# Tuple accepts variable number of parameters. +Tuple = _TupleType(tuple, -1, inst=False, name='Tuple') Tuple.__doc__ = \ """Tuple type; Tuple[X, Y] is the cross-product type of X and Y. @@ -1604,24 +1623,24 @@ def _alias(origin, params, inst=True): To specify a variable-length tuple of homogeneous type, use Tuple[T, ...]. """ -List = _alias(list, T, inst=False) -Deque = _alias(collections.deque, T) -Set = _alias(set, T, inst=False) -FrozenSet = _alias(frozenset, T_co, inst=False) -MappingView = _alias(collections.abc.MappingView, T_co) -KeysView = _alias(collections.abc.KeysView, KT) -ItemsView = _alias(collections.abc.ItemsView, (KT, VT_co)) -ValuesView = _alias(collections.abc.ValuesView, VT_co) -ContextManager = _alias(contextlib.AbstractContextManager, T_co) -AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, T_co) -Dict = _alias(dict, (KT, VT), inst=False) -DefaultDict = _alias(collections.defaultdict, (KT, VT)) -OrderedDict = _alias(collections.OrderedDict, (KT, VT)) -Counter = _alias(collections.Counter, T) -ChainMap = _alias(collections.ChainMap, (KT, VT)) -Generator = _alias(collections.abc.Generator, (T_co, T_contra, V_co)) -AsyncGenerator = _alias(collections.abc.AsyncGenerator, (T_co, T_contra)) -Type = _alias(type, CT_co, inst=False) +List = _alias(list, 1, inst=False, name='List') +Deque = _alias(collections.deque, 1, name='Deque') +Set = _alias(set, 1, inst=False, name='Set') +FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet') +MappingView = _alias(collections.abc.MappingView, 1) +KeysView = _alias(collections.abc.KeysView, 1) +ItemsView = _alias(collections.abc.ItemsView, 2) +ValuesView = _alias(collections.abc.ValuesView, 1) +ContextManager = _alias(contextlib.AbstractContextManager, 1, name='ContextManager') +AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, 1, name='AsyncContextManager') +Dict = _alias(dict, 2, inst=False, name='Dict') +DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict') +OrderedDict = _alias(collections.OrderedDict, 2) +Counter = _alias(collections.Counter, 1) +ChainMap = _alias(collections.ChainMap, 2) +Generator = _alias(collections.abc.Generator, 3) +AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2) +Type = _alias(type, 1, inst=False, name='Type') Type.__doc__ = \ """A special construct usable to annotate class objects. @@ -2115,8 +2134,8 @@ class io: io.__name__ = __name__ + '.io' sys.modules[io.__name__] = io -Pattern = _alias(stdlib_re.Pattern, AnyStr) -Match = _alias(stdlib_re.Match, AnyStr) +Pattern = _alias(stdlib_re.Pattern, 1) +Match = _alias(stdlib_re.Match, 1) class re: """Wrapper namespace for re type aliases.""" diff --git a/Lib/uuid.py b/Lib/uuid.py index 2799c75ba6a1ad..9ddce813fc4692 100644 --- a/Lib/uuid.py +++ b/Lib/uuid.py @@ -555,178 +555,44 @@ def _netstat_getnode(): return _find_mac_under_heading('netstat', '-ian', b'Address') def _ipconfig_getnode(): - """Get the hardware address on Windows by running ipconfig.exe.""" - import os, re, subprocess - first_local_mac = None - dirs = ['', r'c:\windows\system32', r'c:\winnt\system32'] - try: - import ctypes - buffer = ctypes.create_string_buffer(300) - ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300) - dirs.insert(0, buffer.value.decode('mbcs')) - except: - pass - for dir in dirs: - try: - proc = subprocess.Popen([os.path.join(dir, 'ipconfig'), '/all'], - stdout=subprocess.PIPE, - encoding="oem") - except OSError: - continue - with proc: - for line in proc.stdout: - value = line.split(':')[-1].strip().lower() - if re.fullmatch('(?:[0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value): - mac = int(value.replace('-', ''), 16) - if _is_universal(mac): - return mac - first_local_mac = first_local_mac or mac - return first_local_mac or None + """[DEPRECATED] Get the hardware address on Windows.""" + # bpo-40501: UuidCreateSequential() is now the only supported approach + return _windll_getnode() def _netbios_getnode(): - """Get the hardware address on Windows using NetBIOS calls. - See http://support.microsoft.com/kb/118623 for details.""" - import win32wnet, netbios - first_local_mac = None - ncb = netbios.NCB() - ncb.Command = netbios.NCBENUM - ncb.Buffer = adapters = netbios.LANA_ENUM() - adapters._pack() - if win32wnet.Netbios(ncb) != 0: - return None - adapters._unpack() - for i in range(adapters.length): - ncb.Reset() - ncb.Command = netbios.NCBRESET - ncb.Lana_num = ord(adapters.lana[i]) - if win32wnet.Netbios(ncb) != 0: - continue - ncb.Reset() - ncb.Command = netbios.NCBASTAT - ncb.Lana_num = ord(adapters.lana[i]) - ncb.Callname = '*'.ljust(16) - ncb.Buffer = status = netbios.ADAPTER_STATUS() - if win32wnet.Netbios(ncb) != 0: - continue - status._unpack() - bytes = status.adapter_address[:6] - if len(bytes) != 6: - continue - mac = int.from_bytes(bytes, 'big') - if _is_universal(mac): - return mac - first_local_mac = first_local_mac or mac - return first_local_mac or None + """[DEPRECATED] Get the hardware address on Windows.""" + # bpo-40501: UuidCreateSequential() is now the only supported approach + return _windll_getnode() -_generate_time_safe = _UuidCreate = None -_has_uuid_generate_time_safe = None - # Import optional C extension at toplevel, to help disabling it when testing try: import _uuid + _generate_time_safe = getattr(_uuid, "generate_time_safe", None) + _UuidCreate = getattr(_uuid, "UuidCreate", None) + _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe except ImportError: _uuid = None + _generate_time_safe = None + _UuidCreate = None + _has_uuid_generate_time_safe = None def _load_system_functions(): - """ - Try to load platform-specific functions for generating uuids. - """ - global _generate_time_safe, _UuidCreate, _has_uuid_generate_time_safe - - if _has_uuid_generate_time_safe is not None: - return - - _has_uuid_generate_time_safe = False - - if sys.platform == "darwin" and int(os.uname().release.split('.')[0]) < 9: - # The uuid_generate_* functions are broken on MacOS X 10.5, as noted - # in issue #8621 the function generates the same sequence of values - # in the parent process and all children created using fork (unless - # those children use exec as well). - # - # Assume that the uuid_generate functions are broken from 10.5 onward, - # the test can be adjusted when a later version is fixed. - pass - elif _uuid is not None: - _generate_time_safe = _uuid.generate_time_safe - _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe - return - - try: - # If we couldn't find an extension module, try ctypes to find - # system routines for UUID generation. - # Thanks to Thomas Heller for ctypes and for his help with its use here. - import ctypes - import ctypes.util - - # The uuid_generate_* routines are provided by libuuid on at least - # Linux and FreeBSD, and provided by libc on Mac OS X. - _libnames = ['uuid'] - if not sys.platform.startswith('win'): - _libnames.append('c') - for libname in _libnames: - try: - lib = ctypes.CDLL(ctypes.util.find_library(libname)) - except Exception: # pragma: nocover - continue - # Try to find the safe variety first. - if hasattr(lib, 'uuid_generate_time_safe'): - _uuid_generate_time_safe = lib.uuid_generate_time_safe - # int uuid_generate_time_safe(uuid_t out); - def _generate_time_safe(): - _buffer = ctypes.create_string_buffer(16) - res = _uuid_generate_time_safe(_buffer) - return bytes(_buffer.raw), res - _has_uuid_generate_time_safe = True - break - - elif hasattr(lib, 'uuid_generate_time'): # pragma: nocover - _uuid_generate_time = lib.uuid_generate_time - # void uuid_generate_time(uuid_t out); - _uuid_generate_time.restype = None - def _generate_time_safe(): - _buffer = ctypes.create_string_buffer(16) - _uuid_generate_time(_buffer) - return bytes(_buffer.raw), None - break - - # On Windows prior to 2000, UuidCreate gives a UUID containing the - # hardware address. On Windows 2000 and later, UuidCreate makes a - # random UUID and UuidCreateSequential gives a UUID containing the - # hardware address. These routines are provided by the RPC runtime. - # NOTE: at least on Tim's WinXP Pro SP2 desktop box, while the last - # 6 bytes returned by UuidCreateSequential are fixed, they don't appear - # to bear any relationship to the MAC address of any network device - # on the box. - try: - lib = ctypes.windll.rpcrt4 - except: - lib = None - _UuidCreate = getattr(lib, 'UuidCreateSequential', - getattr(lib, 'UuidCreate', None)) - - except Exception as exc: - import warnings - warnings.warn(f"Could not find fallback ctypes uuid functions: {exc}", - ImportWarning) + """[DEPRECATED] Platform-specific functions loaded at import time""" def _unix_getnode(): - """Get the hardware address on Unix using the _uuid extension module - or ctypes.""" - _load_system_functions() - uuid_time, _ = _generate_time_safe() - return UUID(bytes=uuid_time).node + """Get the hardware address on Unix using the _uuid extension module.""" + if _generate_time_safe: + uuid_time, _ = _generate_time_safe() + return UUID(bytes=uuid_time).node def _windll_getnode(): - """Get the hardware address on Windows using ctypes.""" - import ctypes - _load_system_functions() - _buffer = ctypes.create_string_buffer(16) - if _UuidCreate(_buffer) == 0: - return UUID(bytes=bytes_(_buffer.raw)).node + """Get the hardware address on Windows using the _uuid extension module.""" + if _UuidCreate: + uuid_bytes = _UuidCreate() + return UUID(bytes_le=uuid_bytes).node def _random_getnode(): """Get a random node ID.""" @@ -755,7 +621,8 @@ def _random_getnode(): elif _DARWIN: _OS_GETTERS = [_ifconfig_getnode, _arp_getnode, _netstat_getnode] elif _WINDOWS: - _OS_GETTERS = [_netbios_getnode, _ipconfig_getnode] + # bpo-40201: _windll_getnode will always succeed, so these are not needed + _OS_GETTERS = [] elif _AIX: _OS_GETTERS = [_netstat_getnode] else: @@ -802,7 +669,6 @@ def uuid1(node=None, clock_seq=None): # When the system provides a version-1 UUID generator, use it (but don't # use UuidCreate here because its UUIDs don't conform to RFC 4122). - _load_system_functions() if _generate_time_safe is not None and node is clock_seq is None: uuid_time, safely_generated = _generate_time_safe() try: diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 1ef179a91a6f19..9c73bcfb44ae81 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -69,6 +69,14 @@ def get(using=None): # instead of "from webbrowser import *". def open(url, new=0, autoraise=True): + """Display url using the default browser. + + If possible, open url in a location determined by new. + - 0: the same browser window (the default). + - 1: a new browser window. + - 2: a new browser page ("tab"). + If possible, autoraise raises the window (the default) or not. + """ if _tryorder is None: with _lock: if _tryorder is None: @@ -80,9 +88,17 @@ def open(url, new=0, autoraise=True): return False def open_new(url): + """Open url in a new window of the default browser. + + If not possible, then open url in the only browser window. + """ return open(url, 1) def open_new_tab(url): + """Open url in a new page ("tab") of the default browser. + + If not possible, then the behavior becomes equivalent to open_new(). + """ return open(url, 2) diff --git a/Makefile.pre.in b/Makefile.pre.in index 3cb8b84157f0ed..d545a9efb3cd99 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -351,6 +351,7 @@ PYTHON_OBJS= \ Python/getversion.o \ Python/graminit.o \ Python/hamt.o \ + Python/hashtable.o \ Python/import.o \ Python/importdl.o \ Python/initconfig.o \ @@ -1104,6 +1105,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/cpython/initconfig.h \ $(srcdir)/Include/cpython/interpreteridobject.h \ $(srcdir)/Include/cpython/listobject.h \ + $(srcdir)/Include/cpython/methodobject.h \ $(srcdir)/Include/cpython/object.h \ $(srcdir)/Include/cpython/objimpl.h \ $(srcdir)/Include/cpython/pyerrors.h \ @@ -1130,6 +1132,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_getopt.h \ $(srcdir)/Include/internal/pycore_gil.h \ $(srcdir)/Include/internal/pycore_hamt.h \ + $(srcdir)/Include/internal/pycore_hashtable.h \ $(srcdir)/Include/internal/pycore_import.h \ $(srcdir)/Include/internal/pycore_initconfig.h \ $(srcdir)/Include/internal/pycore_interp.h \ diff --git a/Misc/ACKS b/Misc/ACKS index f744de6b1f66d2..b479aa5d807f56 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -86,6 +86,7 @@ Marcin Bachry Alfonso Baciero Dwayne Bailey Stig Bakken +Lumír Balhar Aleksandr Balezin Greg Ball Lewis Ball diff --git a/Misc/NEWS.d/3.9.0a1.rst b/Misc/NEWS.d/3.9.0a1.rst index e5b4972b1922c4..fb74d3622263d4 100644 --- a/Misc/NEWS.d/3.9.0a1.rst +++ b/Misc/NEWS.d/3.9.0a1.rst @@ -3372,7 +3372,7 @@ markup and any values in the message. Patch by Paul Ganssle .. section: Library Removed methods Element.getchildren(), Element.getiterator() and -ElementTree.getiterator(). +ElementTree.getiterator() and the xml.etree.cElementTree module. .. diff --git a/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst b/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst new file mode 100644 index 00000000000000..ab9062c28f4bb9 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2020-05-05-15-39-11.bpo-40514.bZZmuS.rst @@ -0,0 +1,2 @@ +Add ``--with-experimental-isolated-subinterpreters`` build option to +``configure``: better isolate subinterpreters, experimental build mode. diff --git a/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst b/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst new file mode 100644 index 00000000000000..785ea323c316de --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-01-22-12-38-59.bpo-38787.HUH6hd.rst @@ -0,0 +1,2 @@ +Module C state is now accessible from C-defined heap type methods (:pep:`573`). +Patch by Marcel Plch and Petr Viktorin. diff --git a/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst b/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst new file mode 100644 index 00000000000000..d7f256a2a6b527 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-07-11-41-13.bpo-40545.51DzF1.rst @@ -0,0 +1,3 @@ +Declare ``_PyErr_GetTopmostException()`` with ``PyAPI_FUNC()`` to properly +export the function in the C API. The function remains private (``_Py``) +prefix. diff --git a/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst b/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst new file mode 100644 index 00000000000000..f80be666c1c200 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-10-16-39-08.bpo-38787.XzQ59O.rst @@ -0,0 +1,2 @@ +Add PyCFunction_CheckExact() macro for exact type checks now that we allow subtypes of PyCFunction, +as well as PyCMethod_CheckExact() and PyCMethod_Check() for the new PyCMethod subtype. diff --git a/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst b/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst new file mode 100644 index 00000000000000..a08c3da5660455 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-14-00-36-19.bpo-39465.3a5g-X.rst @@ -0,0 +1 @@ +Remove the ``_PyUnicode_ClearStaticStrings()`` function from the C API. diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst b/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst new file mode 100644 index 00000000000000..62446e35ae01ba --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2019-11-20-09-50-58.bpo-37986.o0lmA7.rst @@ -0,0 +1,2 @@ +Improve performance of :c:func:`PyLong_FromDouble` for values that fit into +:c:type:`long`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst new file mode 100644 index 00000000000000..14f05be59a1edd --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-20-36-15.bpo-40523.hKZVTB.rst @@ -0,0 +1,2 @@ +Add pass-throughs for :func:`hash` and :func:`reversed` to +:class:`weakref.proxy` objects. Patch by Pablo Galindo. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst new file mode 100644 index 00000000000000..197eae97c3d1ab --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-05-21-11-35.bpo-1635741.ggwD3C.rst @@ -0,0 +1 @@ +Port :mod:`errno` to multiphase initialization (:pep:`489`). diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst new file mode 100644 index 00000000000000..19b8888230c659 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-06-14-52-35.bpo-40527.gTNKuy.rst @@ -0,0 +1,2 @@ +Fix command line argument parsing: no longer write errors multiple times +into stderr. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst new file mode 100644 index 00000000000000..b0ea60234634c5 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-08-03-25-26.bpo-40502.e-VCyL.rst @@ -0,0 +1,2 @@ +Initialize ``n->n_col_offset``. +(Patch by Joannah Nanjekye) \ No newline at end of file diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst new file mode 100644 index 00000000000000..92a5e3ce632172 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-09-01-39-16.bpo-40566.wlcjW_.rst @@ -0,0 +1 @@ +Apply :pep:`573` to :mod:`abc`. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst new file mode 100644 index 00000000000000..7a9258ef0a938b --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-00-19-42.bpo-40585.yusknY.rst @@ -0,0 +1,2 @@ +Fixed a bug when using :func:`codeop.compile_command` that was causing +exceptions to be swallowed with the new parser. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst new file mode 100644 index 00000000000000..5587d4f49ccf97 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-13-50-52.bpo-40593.yuOXj3.rst @@ -0,0 +1 @@ +Improved syntax errors for invalid characters in source code. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst new file mode 100644 index 00000000000000..1252db4dc9848d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-11-20-53-52.bpo-40596.dwOH_X.rst @@ -0,0 +1,2 @@ +Fixed :meth:`str.isidentifier` for non-canonicalized strings containing +non-BMP characters on Windows. diff --git a/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst new file mode 100644 index 00000000000000..bda24719b12cb3 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-05-08-08-39-40.bpo-40561.ZMB_2i.rst @@ -0,0 +1 @@ +Provide docstrings for webbrowser open functions. diff --git a/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst b/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst new file mode 100644 index 00000000000000..4f349adff33460 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2020-05-08-20-18-55.bpo-34790.t6kW_1.rst @@ -0,0 +1 @@ +Add version of removal for explicit passing of coros to `asyncio.wait()`'s documentation \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst b/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst new file mode 100644 index 00000000000000..50f547f56c5208 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-14-09-54-35.bpo-40273.IN73Ks.rst @@ -0,0 +1 @@ +:class:`types.MappingProxyType` is now reversible. diff --git a/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst b/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst new file mode 100644 index 00000000000000..237bcf7f99b0f0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-04-27-00-51-40.bpo-39791.wv8Dxn.rst @@ -0,0 +1 @@ +Added ``files()`` function to importlib.resources with support for subdirectories in package data, matching backport in importlib_resources 1.5. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst b/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst new file mode 100644 index 00000000000000..81f9e937a2bff4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-02-14-24-48.bpo-40355.xTujaB.rst @@ -0,0 +1,2 @@ +Improve error reporting in :func:`ast.literal_eval` in the presence of malformed :class:`ast.Dict` +nodes instead of silently ignoring any non-conforming elements. Patch by Curtis Bucher. diff --git a/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst b/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst new file mode 100644 index 00000000000000..d3049b05a78b6c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-04-11-20-49.bpo-40495.TyTc2O.rst @@ -0,0 +1,2 @@ +:mod:`compileall` is now able to use hardlinks to prevent duplicates in a +case when ``.pyc`` files for different optimization levels have the same content. diff --git a/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst b/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst new file mode 100644 index 00000000000000..d046b1422419d7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-04-21-21-43.bpo-40480.mjldWa.rst @@ -0,0 +1 @@ +``fnmatch.fnmatch()`` could take exponential time in the presence of multiple ``*`` pattern characters. This was repaired by generating more elaborate regular expressions to avoid futile backtracking. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst new file mode 100644 index 00000000000000..15846351f25bbe --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-05-08-12-51.bpo-40559.112wwa.rst @@ -0,0 +1 @@ +Fix possible memory leak in the C implementation of :class:`asyncio.Task`. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst b/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst new file mode 100644 index 00000000000000..261a49e4329280 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-05-17-12-47.bpo-40504.EX6wPn.rst @@ -0,0 +1 @@ +:func:`functools.lru_cache` objects can now be the targets of weakrefs. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst b/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst new file mode 100644 index 00000000000000..a2e694ac1ad080 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-06-15-36-47.bpo-40541.LlYghL.rst @@ -0,0 +1 @@ +Added an optional *counts* parameter to random.sample(). diff --git a/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst b/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst new file mode 100644 index 00000000000000..873ff49c1eb00b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-07-20-11-51.bpo-40549.6FiRSV.rst @@ -0,0 +1,2 @@ +Convert posixmodule.c ("posix" or "nt" module) to the multiphase +initialization (PEP 489). diff --git a/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst b/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst new file mode 100644 index 00000000000000..46e806a2dc2222 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-07-21-22-04.bpo-40397.PVWFAn.rst @@ -0,0 +1,2 @@ +Removed attributes ``__args__`` and ``__parameters__`` from special generic +aliases like ``typing.List`` (not subscripted). diff --git a/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst b/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst new file mode 100644 index 00000000000000..476770f6974d2f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-09-15-38-25.bpo-40571.kOXZGC.rst @@ -0,0 +1,2 @@ +Added functools.cache() as a simpler, more discoverable way to access the +unbounded cache variant of lru_cache(maxsize=None). diff --git a/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst new file mode 100644 index 00000000000000..1b9fe609c25b71 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst @@ -0,0 +1 @@ +If text content lines are longer than policy.max_line_length, always use a content-encoding to make sure they are wrapped. diff --git a/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst b/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst new file mode 100644 index 00000000000000..32cc8073d3f79c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-13-10-23-29.bpo-40612.gOIreM.rst @@ -0,0 +1,2 @@ +Fix edge cases in SyntaxError formatting. If the offset is <= 0, no caret is printed. +If the offset is > line length, the caret is printed pointing just after the last character. diff --git a/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst b/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst new file mode 100644 index 00000000000000..87ede982f29677 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-15-17-38-21.bpo-40479.yamSCh.rst @@ -0,0 +1 @@ +The :mod:`hashlib` now compiles with OpenSSL 3.0.0-alpha2. diff --git a/Misc/NEWS.d/next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst b/Misc/NEWS.d/next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst new file mode 100644 index 00000000000000..468c1ac9eee17c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst @@ -0,0 +1 @@ +Restored the deprecated :mod:`xml.etree.cElementTree` module. diff --git a/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst b/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst new file mode 100644 index 00000000000000..5ce22eb8a92eef --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-05-06-00-41-11.bpo-40501._61wv_.rst @@ -0,0 +1,2 @@ +:mod:`uuid` no longer uses :mod:`ctypes` to load :file:`libuuid` or +:file:`rpcrt4.dll` at runtime. diff --git a/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst new file mode 100644 index 00000000000000..edb01182c3a5ce --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst @@ -0,0 +1,3 @@ +distutils.tests now saves/restores warnings filters to leave them unchanged. +Importing tests imports docutils which imports pkg_resources which adds a +warnings filter. diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst new file mode 100644 index 00000000000000..b59035971d7b08 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst @@ -0,0 +1,2 @@ +Update multissltest helper to test with latest OpenSSL 1.0.2, 1.1.0, 1.1.1, +and 3.0.0-alpha. diff --git a/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst b/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst new file mode 100644 index 00000000000000..4dc1ff480df87a --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-05-01-20-57-57.bpo-40458.Eb0ueI.rst @@ -0,0 +1 @@ +Increase reserved stack space to prevent overflow crash on Windows. diff --git a/Modules/Setup b/Modules/Setup index 6bf142419de3d9..87e73bac78faec 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -132,7 +132,7 @@ faulthandler faulthandler.c # # bpo-35053: The module must be builtin since _Py_NewReference() # can call _PyTraceMalloc_NewReference(). -_tracemalloc _tracemalloc.c hashtable.c +_tracemalloc _tracemalloc.c # PEG-based parser module -- slated to be *the* parser _peg_parser _peg_parser.c diff --git a/Modules/_abc.c b/Modules/_abc.c index 7c040ef80ba3da..434bc454175b56 100644 --- a/Modules/_abc.c +++ b/Modules/_abc.c @@ -21,16 +21,9 @@ _Py_IDENTIFIER(__subclasshook__); typedef struct { PyTypeObject *_abc_data_type; + unsigned long long abc_invalidation_counter; } _abcmodule_state; -/* A global counter that is incremented each time a class is - registered as a virtual subclass of anything. It forces the - negative cache to be cleared before its next use. - Note: this counter is private. Use `abc.get_cache_token()` for - external code. */ -// FIXME: PEP 573: Move abc_invalidation_counter into _abcmodule_state. -static unsigned long long abc_invalidation_counter = 0; - static inline _abcmodule_state* get_abc_state(PyObject *module) { @@ -81,14 +74,21 @@ static PyObject * abc_data_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { _abc_data *self = (_abc_data *) type->tp_alloc(type, 0); + _abcmodule_state *state = NULL; if (self == NULL) { return NULL; } + state = PyType_GetModuleState(type); + if (state == NULL) { + Py_DECREF(self); + return NULL; + } + self->_abc_registry = NULL; self->_abc_cache = NULL; self->_abc_negative_cache = NULL; - self->_abc_negative_cache_version = abc_invalidation_counter; + self->_abc_negative_cache_version = state->abc_invalidation_counter; return (PyObject *) self; } @@ -495,7 +495,7 @@ _abc__abc_register_impl(PyObject *module, PyObject *self, PyObject *subclass) Py_DECREF(impl); /* Invalidate negative cache */ - abc_invalidation_counter++; + get_abc_state(module)->abc_invalidation_counter++; Py_INCREF(subclass); return subclass; @@ -540,7 +540,7 @@ _abc__abc_instancecheck_impl(PyObject *module, PyObject *self, } subtype = (PyObject *)Py_TYPE(instance); if (subtype == subclass) { - if (impl->_abc_negative_cache_version == abc_invalidation_counter) { + if (impl->_abc_negative_cache_version == get_abc_state(module)->abc_invalidation_counter) { incache = _in_weak_set(impl->_abc_negative_cache, subclass); if (incache < 0) { goto end; @@ -612,6 +612,7 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self, } PyObject *ok, *subclasses = NULL, *result = NULL; + _abcmodule_state *state = NULL; Py_ssize_t pos; int incache; _abc_data *impl = _get_impl(module, self); @@ -629,15 +630,16 @@ _abc__abc_subclasscheck_impl(PyObject *module, PyObject *self, goto end; } + state = get_abc_state(module); /* 2. Check negative cache; may have to invalidate. */ - if (impl->_abc_negative_cache_version < abc_invalidation_counter) { + if (impl->_abc_negative_cache_version < state->abc_invalidation_counter) { /* Invalidate the negative cache. */ if (impl->_abc_negative_cache != NULL && PySet_Clear(impl->_abc_negative_cache) < 0) { goto end; } - impl->_abc_negative_cache_version = abc_invalidation_counter; + impl->_abc_negative_cache_version = state->abc_invalidation_counter; } else { incache = _in_weak_set(impl->_abc_negative_cache, subclass); @@ -830,7 +832,8 @@ static PyObject * _abc_get_cache_token_impl(PyObject *module) /*[clinic end generated code: output=c7d87841e033dacc input=70413d1c423ad9f9]*/ { - return PyLong_FromUnsignedLongLong(abc_invalidation_counter); + _abcmodule_state *state = get_abc_state(module); + return PyLong_FromUnsignedLongLong(state->abc_invalidation_counter); } static struct PyMethodDef _abcmodule_methods[] = { @@ -849,7 +852,8 @@ static int _abcmodule_exec(PyObject *module) { _abcmodule_state *state = get_abc_state(module); - state->_abc_data_type = (PyTypeObject *)PyType_FromSpec(&_abc_data_type_spec); + state->abc_invalidation_counter = 0; + state->_abc_data_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, &_abc_data_type_spec, NULL); if (state->_abc_data_type == NULL) { return -1; } diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index a03a63119bab3d..cc211a8895a8e8 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -2638,6 +2638,10 @@ task_step_impl(TaskObj *task, PyObject *exc) coro = task->task_coro; if (coro == NULL) { PyErr_SetString(PyExc_RuntimeError, "uninitialized Task object"); + if (clear_exc) { + /* We created 'exc' during this call */ + Py_DECREF(exc); + } return NULL; } diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c index 08991fd54808fb..c70b0e2a19fadc 100644 --- a/Modules/_cursesmodule.c +++ b/Modules/_cursesmodule.c @@ -3814,7 +3814,7 @@ update_lines_cols(void) return 0; } /* PyId_LINES.object will be initialized here. */ - if (PyDict_SetItem(ModDict, PyId_LINES.object, o)) { + if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_LINES), o)) { Py_DECREF(m); Py_DECREF(o); return 0; @@ -3830,7 +3830,7 @@ update_lines_cols(void) Py_DECREF(o); return 0; } - if (PyDict_SetItem(ModDict, PyId_COLS.object, o)) { + if (PyDict_SetItem(ModDict, _PyUnicode_FromId(&PyId_COLS), o)) { Py_DECREF(m); Py_DECREF(o); return 0; diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index fd4b4c268cc979..d158d3bae157b2 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -783,6 +783,7 @@ typedef struct lru_cache_object { Py_ssize_t misses; PyObject *cache_info_type; PyObject *dict; + PyObject *weakreflist; } lru_cache_object; static PyTypeObject lru_cache_type; @@ -1196,6 +1197,7 @@ lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw) Py_INCREF(cache_info_type); obj->cache_info_type = cache_info_type; obj->dict = NULL; + obj->weakreflist = NULL; return (PyObject *)obj; } @@ -1227,6 +1229,8 @@ lru_cache_dealloc(lru_cache_object *obj) lru_list_elem *list; /* bpo-31095: UnTrack is needed before calling any callbacks */ PyObject_GC_UnTrack(obj); + if (obj->weakreflist != NULL) + PyObject_ClearWeakRefs((PyObject*)obj); list = lru_cache_unlink_list(obj); Py_XDECREF(obj->cache); @@ -1384,7 +1388,8 @@ static PyTypeObject lru_cache_type = { (traverseproc)lru_cache_tp_traverse,/* tp_traverse */ (inquiry)lru_cache_tp_clear, /* tp_clear */ 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ + offsetof(lru_cache_object, weakreflist), + /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ lru_cache_methods, /* tp_methods */ diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c index 91834e5330f4bd..b7661b40d0a7ba 100644 --- a/Modules/_hashopenssl.c +++ b/Modules/_hashopenssl.c @@ -1109,19 +1109,25 @@ _hashlib.get_fips_mode -> int Determine the OpenSSL FIPS mode of operation. +For OpenSSL 3.0.0 and newer it returns the state of the default provider +in the default OSSL context. It's not quite the same as FIPS_mode() but good +enough for unittests. + Effectively any non-zero return value indicates FIPS mode; values other than 1 may have additional significance. - -See OpenSSL documentation for the FIPS_mode() function for details. [clinic start generated code]*/ static int _hashlib_get_fips_mode_impl(PyObject *module) -/*[clinic end generated code: output=87eece1bab4d3fa9 input=c2799c3132a36d6c]*/ +/*[clinic end generated code: output=87eece1bab4d3fa9 input=2db61538c41c6fef]*/ { + int result; +#if OPENSSL_VERSION_NUMBER >= 0x30000000L + result = EVP_default_properties_is_fips_enabled(NULL); +#else ERR_clear_error(); - int result = FIPS_mode(); + result = FIPS_mode(); if (result == 0) { // "If the library was built without support of the FIPS Object Module, // then the function will return 0 with an error code of @@ -1134,6 +1140,7 @@ _hashlib_get_fips_mode_impl(PyObject *module) } } return result; +#endif } #endif // !LIBRESSL_VERSION_NUMBER diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c index 1abc9ca6f206aa..f2c72ebd516589 100644 --- a/Modules/_io/textio.c +++ b/Modules/_io/textio.c @@ -1007,7 +1007,7 @@ io_check_errors(PyObject *errors) /* Avoid calling PyCodec_LookupError() before the codec registry is ready: before_PyUnicode_InitEncodings() is called. */ - if (!interp->fs_codec.encoding) { + if (!interp->unicode.fs_codec.encoding) { return 0; } diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 1b7563cb20fc5d..5f217dcb8978e2 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -14,6 +14,7 @@ #include "Python.h" #include "pycore_byteswap.h" // _Py_bswap32() #include "pycore_initconfig.h" // _Py_GetConfigsAsDict() +#include "pycore_hashtable.h" // _Py_hashtable_new() #include "pycore_gc.h" // PyGC_Head @@ -62,10 +63,101 @@ test_bswap(PyObject *self, PyObject *Py_UNUSED(args)) } +#define TO_PTR(ch) ((void*)(uintptr_t)ch) +#define FROM_PTR(ptr) ((uintptr_t)ptr) +#define VALUE(key) (1 + ((int)(key) - 'a')) + +static Py_uhash_t +hash_char(const void *key) +{ + char ch = (char)FROM_PTR(key); + return ch; +} + + +static int +hashtable_cb(_Py_hashtable_t *table, + const void *key_ptr, const void *value_ptr, + void *user_data) +{ + int *count = (int *)user_data; + char key = (char)FROM_PTR(key_ptr); + int value = (int)FROM_PTR(value_ptr); + assert(value == VALUE(key)); + *count += 1; + return 0; +} + + +static PyObject* +test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) +{ + _Py_hashtable_t *table = _Py_hashtable_new(hash_char, + _Py_hashtable_compare_direct); + if (table == NULL) { + return PyErr_NoMemory(); + } + + // Using an newly allocated table must not crash + assert(table->nentries == 0); + assert(table->nbuckets > 0); + assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL); + + // Test _Py_hashtable_set() + char key; + for (key='a'; key <= 'z'; key++) { + int value = VALUE(key); + if (_Py_hashtable_set(table, TO_PTR(key), TO_PTR(value)) < 0) { + _Py_hashtable_destroy(table); + return PyErr_NoMemory(); + } + } + assert(table->nentries == 26); + assert(table->nbuckets > table->nentries); + + // Test _Py_hashtable_get_entry() + for (key='a'; key <= 'z'; key++) { + _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry(table, TO_PTR(key)); + assert(entry != NULL); + assert(entry->key = TO_PTR(key)); + assert(entry->value = TO_PTR(VALUE(key))); + } + + // Test _Py_hashtable_get() + for (key='a'; key <= 'z'; key++) { + void *value_ptr = _Py_hashtable_get(table, TO_PTR(key)); + assert((int)FROM_PTR(value_ptr) == VALUE(key)); + } + + // Test _Py_hashtable_steal() + key = 'p'; + void *value_ptr = _Py_hashtable_steal(table, TO_PTR(key)); + assert((int)FROM_PTR(value_ptr) == VALUE(key)); + assert(table->nentries == 25); + assert(_Py_hashtable_get_entry(table, TO_PTR(key)) == NULL); + + // Test _Py_hashtable_foreach() + int count = 0; + int res = _Py_hashtable_foreach(table, hashtable_cb, &count); + assert(res == 0); + assert(count == 25); + + // Test _Py_hashtable_clear() + _Py_hashtable_clear(table); + assert(table->nentries == 0); + assert(table->nbuckets > 0); + assert(_Py_hashtable_get(table, TO_PTR('x')) == NULL); + + _Py_hashtable_destroy(table); + Py_RETURN_NONE; +} + + static PyMethodDef TestMethods[] = { {"get_configs", get_configs, METH_NOARGS}, {"get_recursion_depth", get_recursion_depth, METH_NOARGS}, {"test_bswap", test_bswap, METH_NOARGS}, + {"test_hashtable", test_hashtable, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; diff --git a/Modules/_testmultiphase.c b/Modules/_testmultiphase.c index eadc46fbf18675..d69ae628fa7a40 100644 --- a/Modules/_testmultiphase.c +++ b/Modules/_testmultiphase.c @@ -4,6 +4,19 @@ #include "Python.h" +/* State for testing module state access from methods */ + +typedef struct { + int counter; +} meth_state; + +/*[clinic input] +module _testmultiphase + +class _testmultiphase.StateAccessType "StateAccessTypeObject *" "!StateAccessType" +[clinic start generated code]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=bab9f2fe3bd312ff]*/ + /* Example objects */ typedef struct { PyObject_HEAD @@ -14,6 +27,10 @@ typedef struct { PyObject *integer; } testmultiphase_state; +typedef struct { + PyObject_HEAD +} StateAccessTypeObject; + /* Example methods */ static int @@ -42,6 +59,7 @@ Example_demo(ExampleObject *self, PyObject *args) Py_RETURN_NONE; } +#include "clinic/_testmultiphase.c.h" static PyMethodDef Example_methods[] = { {"demo", (PyCFunction)Example_demo, METH_VARARGS, @@ -102,6 +120,150 @@ static PyType_Spec Example_Type_spec = { Example_Type_slots }; + +/*[clinic input] +_testmultiphase.StateAccessType.get_defining_module + + cls: defining_class + +Return the module of the defining class. +[clinic start generated code]*/ + +static PyObject * +_testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject *self, + PyTypeObject *cls) +/*[clinic end generated code: output=ba2a14284a5d0921 input=946149f91cf72c0d]*/ +{ + PyObject *retval; + retval = PyType_GetModule(cls); + if (retval == NULL) { + return NULL; + } + Py_INCREF(retval); + return retval; +} + +/*[clinic input] +_testmultiphase.StateAccessType.increment_count_clinic + + cls: defining_class + / + n: int = 1 + * + twice: bool = False + +Add 'n' from the module-state counter. + +Pass 'twice' to double that amount. + +This tests Argument Clinic support for defining_class. +[clinic start generated code]*/ + +static PyObject * +_testmultiphase_StateAccessType_increment_count_clinic_impl(StateAccessTypeObject *self, + PyTypeObject *cls, + int n, int twice) +/*[clinic end generated code: output=3b34f86bc5473204 input=551d482e1fe0b8f5]*/ +{ + meth_state *m_state = PyType_GetModuleState(cls); + if (twice) { + n *= 2; + } + m_state->counter += n; + + Py_RETURN_NONE; +} + +PyDoc_STRVAR(_StateAccessType_decrement_count__doc__, +"decrement_count($self, /, n=1, *, twice=None)\n" +"--\n" +"\n" +"Add 'n' from the module-state counter.\n" +"Pass 'twice' to double that amount.\n" +"(This is to test both positional and keyword arguments."); + +// Intentionally does not use Argument Clinic +static PyObject * +_StateAccessType_increment_count_noclinic(StateAccessTypeObject *self, + PyTypeObject *defining_class, + PyObject *const *args, + Py_ssize_t nargs, + PyObject *kwnames) +{ + if (!_PyArg_CheckPositional("StateAccessTypeObject.decrement_count", nargs, 0, 1)) { + return NULL; + } + long n = 1; + if (nargs) { + n = PyLong_AsLong(args[0]); + if (PyErr_Occurred()) { + return NULL; + } + } + if (kwnames && PyTuple_Check(kwnames)) { + if (PyTuple_GET_SIZE(kwnames) > 1 || + PyUnicode_CompareWithASCIIString( + PyTuple_GET_ITEM(kwnames, 0), + "twice" + )) { + PyErr_SetString( + PyExc_TypeError, + "decrement_count only takes 'twice' keyword argument" + ); + return NULL; + } + n *= 2; + } + meth_state *m_state = PyType_GetModuleState(defining_class); + m_state->counter += n; + + Py_RETURN_NONE; +} + +/*[clinic input] +_testmultiphase.StateAccessType.get_count + + cls: defining_class + +Return the value of the module-state counter. +[clinic start generated code]*/ + +static PyObject * +_testmultiphase_StateAccessType_get_count_impl(StateAccessTypeObject *self, + PyTypeObject *cls) +/*[clinic end generated code: output=64600f95b499a319 input=d5d181f12384849f]*/ +{ + meth_state *m_state = PyType_GetModuleState(cls); + return PyLong_FromLong(m_state->counter); +} + +static PyMethodDef StateAccessType_methods[] = { + _TESTMULTIPHASE_STATEACCESSTYPE_GET_DEFINING_MODULE_METHODDEF + _TESTMULTIPHASE_STATEACCESSTYPE_GET_COUNT_METHODDEF + _TESTMULTIPHASE_STATEACCESSTYPE_INCREMENT_COUNT_CLINIC_METHODDEF + { + "increment_count_noclinic", + (PyCFunction)(void(*)(void))_StateAccessType_increment_count_noclinic, + METH_METHOD|METH_FASTCALL|METH_KEYWORDS, + _StateAccessType_decrement_count__doc__ + }, + {NULL, NULL} /* sentinel */ +}; + +static PyType_Slot StateAccessType_Type_slots[] = { + {Py_tp_doc, "Type for testing per-module state access from methods."}, + {Py_tp_methods, StateAccessType_methods}, + {0, NULL} +}; + +static PyType_Spec StateAccessType_spec = { + "_testimportexec.StateAccessType", + sizeof(StateAccessTypeObject), + 0, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_FINALIZE | Py_TPFLAGS_BASETYPE, + StateAccessType_Type_slots +}; + /* Function of two integers returning integer */ PyDoc_STRVAR(testexport_foo_doc, @@ -193,30 +355,39 @@ static int execfunc(PyObject *m) /* Add a custom type */ temp = PyType_FromSpec(&Example_Type_spec); - if (temp == NULL) + if (temp == NULL) { goto fail; - if (PyModule_AddObject(m, "Example", temp) != 0) + } + if (PyModule_AddObject(m, "Example", temp) != 0) { goto fail; + } + /* Add an exception type */ temp = PyErr_NewException("_testimportexec.error", NULL, NULL); - if (temp == NULL) + if (temp == NULL) { goto fail; - if (PyModule_AddObject(m, "error", temp) != 0) + } + if (PyModule_AddObject(m, "error", temp) != 0) { goto fail; + } /* Add Str */ temp = PyType_FromSpec(&Str_Type_spec); - if (temp == NULL) + if (temp == NULL) { goto fail; - if (PyModule_AddObject(m, "Str", temp) != 0) + } + if (PyModule_AddObject(m, "Str", temp) != 0) { goto fail; + } - if (PyModule_AddIntConstant(m, "int_const", 1969) != 0) + if (PyModule_AddIntConstant(m, "int_const", 1969) != 0) { goto fail; + } - if (PyModule_AddStringConstant(m, "str_const", "something different") != 0) + if (PyModule_AddStringConstant(m, "str_const", "something different") != 0) { goto fail; + } return 0; fail: @@ -620,6 +791,50 @@ PyInit__testmultiphase_exec_unreported_exception(PyObject *spec) return PyModuleDef_Init(&def_exec_unreported_exception); } +static int +meth_state_access_exec(PyObject *m) +{ + PyObject *temp; + meth_state *m_state; + + m_state = PyModule_GetState(m); + if (m_state == NULL) { + return -1; + } + + temp = PyType_FromModuleAndSpec(m, &StateAccessType_spec, NULL); + if (temp == NULL) { + return -1; + } + if (PyModule_AddObject(m, "StateAccessType", temp) != 0) { + return -1; + } + + + return 0; +} + +static PyModuleDef_Slot meth_state_access_slots[] = { + {Py_mod_exec, meth_state_access_exec}, + {0, NULL} +}; + +static PyModuleDef def_meth_state_access = { + PyModuleDef_HEAD_INIT, + .m_name = "_testmultiphase_meth_state_access", + .m_doc = PyDoc_STR("Module testing access" + " to state from methods."), + .m_size = sizeof(meth_state), + .m_slots = meth_state_access_slots, +}; + +PyMODINIT_FUNC +PyInit__testmultiphase_meth_state_access(PyObject *spec) +{ + return PyModuleDef_Init(&def_meth_state_access); +} + + /*** Helper for imp test ***/ static PyModuleDef imp_dummy_def = TEST_MODULE_DEF("imp_dummy", main_slots, testexport_methods); diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c index f530c5b0eb7b69..793c5e71548846 100644 --- a/Modules/_tkinter.c +++ b/Modules/_tkinter.c @@ -54,7 +54,7 @@ Copyright (C) 1994 Steen Lumholt. #if TK_HEX_VERSION >= 0x08050208 && TK_HEX_VERSION < 0x08060000 || \ TK_HEX_VERSION >= 0x08060200 -#define HAVE_LIBTOMMAMTH +#define HAVE_LIBTOMMATH #include #endif @@ -965,7 +965,7 @@ static PyType_Spec PyTclObject_Type_spec = { #define CHECK_STRING_LENGTH(s) #endif -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH static Tcl_Obj* asBignumObj(PyObject *value) { @@ -1045,7 +1045,7 @@ AsObj(PyObject *value) #endif /* If there is an overflow in the wideInt conversion, fall through to bignum handling. */ -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH return asBignumObj(value); #endif /* If there is no wideInt or bignum support, @@ -1167,7 +1167,7 @@ fromWideIntObj(TkappObject *tkapp, Tcl_Obj *value) return NULL; } -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH static PyObject* fromBignumObj(TkappObject *tkapp, Tcl_Obj *value) { @@ -1247,7 +1247,7 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value) fall through to bignum handling. */ } -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH if (value->typePtr == tkapp->IntType || value->typePtr == tkapp->WideIntType || value->typePtr == tkapp->BignumType) { @@ -1300,7 +1300,7 @@ FromObj(TkappObject *tkapp, Tcl_Obj *value) } #endif -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH if (tkapp->BignumType == NULL && strcmp(value->typePtr->name, "bignum") == 0) { /* bignum type is not registered in Tcl */ @@ -2001,7 +2001,7 @@ _tkinter_tkapp_getint(TkappObject *self, PyObject *arg) Prefer bignum because Tcl_GetWideIntFromObj returns ambiguous result for value in ranges -2**64..-2**63-1 and 2**63..2**64-1 (on 32-bit platform). */ -#ifdef HAVE_LIBTOMMAMTH +#ifdef HAVE_LIBTOMMATH result = fromBignumObj(self, value); #else result = fromWideIntObj(self, value); diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index ea7e0127366ab0..4522d1afde9089 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -2,7 +2,7 @@ #include "pycore_gc.h" // PyGC_Head #include "pycore_pymem.h" // _Py_tracemalloc_config #include "pycore_traceback.h" -#include "hashtable.h" +#include "pycore_hashtable.h" #include "frameobject.h" // PyFrame_GetBack() #include "clinic/_tracemalloc.c.h" @@ -23,6 +23,9 @@ static void raw_free(void *ptr); # define TRACE_DEBUG #endif +#define TO_PTR(key) ((const void *)(uintptr_t)(key)) +#define FROM_PTR(key) ((uintptr_t)(key)) + /* Protected by the GIL */ static struct { PyMemAllocatorEx mem; @@ -47,16 +50,6 @@ static PyThread_type_lock tables_lock; #define DEFAULT_DOMAIN 0 -/* Pack the frame_t structure to reduce the memory footprint. */ -typedef struct -#ifdef __GNUC__ -__attribute__((packed)) -#endif -{ - uintptr_t ptr; - unsigned int domain; -} pointer_t; - /* Pack the frame_t structure to reduce the memory footprint on 64-bit architectures: 12 bytes instead of 16. */ typedef struct @@ -129,10 +122,14 @@ static traceback_t *tracemalloc_traceback = NULL; Protected by the GIL */ static _Py_hashtable_t *tracemalloc_tracebacks = NULL; -/* pointer (void*) => trace (trace_t). +/* pointer (void*) => trace (trace_t*). Protected by TABLES_LOCK(). */ static _Py_hashtable_t *tracemalloc_traces = NULL; +/* domain (unsigned int) => traces (_Py_hashtable_t). + Protected by TABLES_LOCK(). */ +static _Py_hashtable_t *tracemalloc_domains = NULL; + #ifdef TRACE_DEBUG static void @@ -209,69 +206,44 @@ set_reentrant(int reentrant) static Py_uhash_t -hashtable_hash_pyobject(_Py_hashtable_t *ht, const void *pkey) +hashtable_hash_pyobject(const void *key) { - PyObject *obj; - - _Py_HASHTABLE_READ_KEY(ht, pkey, obj); + PyObject *obj = (PyObject *)key; return PyObject_Hash(obj); } static int -hashtable_compare_unicode(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) +hashtable_compare_unicode(const void *key1, const void *key2) { - PyObject *key1, *key2; - - _Py_HASHTABLE_READ_KEY(ht, pkey, key1); - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, key2); - - if (key1 != NULL && key2 != NULL) - return (PyUnicode_Compare(key1, key2) == 0); - else - return key1 == key2; + PyObject *obj1 = (PyObject *)key1; + PyObject *obj2 = (PyObject *)key2; + if (obj1 != NULL && obj2 != NULL) { + return (PyUnicode_Compare(obj1, obj2) == 0); + } + else { + return obj1 == obj2; + } } static Py_uhash_t -hashtable_hash_pointer_t(_Py_hashtable_t *ht, const void *pkey) +hashtable_hash_uint(const void *key_raw) { - pointer_t ptr; - Py_uhash_t hash; - - _Py_HASHTABLE_READ_KEY(ht, pkey, ptr); - - hash = (Py_uhash_t)_Py_HashPointer((void*)ptr.ptr); - hash ^= ptr.domain; - return hash; -} - - -static int -hashtable_compare_pointer_t(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) -{ - pointer_t ptr1, ptr2; - - _Py_HASHTABLE_READ_KEY(ht, pkey, ptr1); - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, ptr2); - - /* compare pointer before domain, because pointer is more likely to be - different */ - return (ptr1.ptr == ptr2.ptr && ptr1.domain == ptr2.domain); - + unsigned int key = (unsigned int)FROM_PTR(key_raw); + return (Py_uhash_t)key; } static _Py_hashtable_t * -hashtable_new(size_t key_size, size_t data_size, - _Py_hashtable_hash_func hash_func, - _Py_hashtable_compare_func compare_func) +hashtable_new(_Py_hashtable_hash_func hash_func, + _Py_hashtable_compare_func compare_func, + _Py_hashtable_destroy_func key_destroy_func, + _Py_hashtable_destroy_func value_destroy_func) { _Py_hashtable_allocator_t hashtable_alloc = {malloc, free}; - return _Py_hashtable_new_full(key_size, data_size, 0, - hash_func, compare_func, + return _Py_hashtable_new_full(hash_func, compare_func, + key_destroy_func, value_destroy_func, &hashtable_alloc); } @@ -290,39 +262,33 @@ raw_free(void *ptr) static Py_uhash_t -hashtable_hash_traceback(_Py_hashtable_t *ht, const void *pkey) +hashtable_hash_traceback(const void *key) { - traceback_t *traceback; - - _Py_HASHTABLE_READ_KEY(ht, pkey, traceback); + const traceback_t *traceback = (const traceback_t *)key; return traceback->hash; } static int -hashtable_compare_traceback(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) +hashtable_compare_traceback(const void *key1, const void *key2) { - traceback_t *traceback1, *traceback2; - const frame_t *frame1, *frame2; - int i; - - _Py_HASHTABLE_READ_KEY(ht, pkey, traceback1); - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, traceback2); + const traceback_t *traceback1 = (const traceback_t *)key1; + const traceback_t *traceback2 = (const traceback_t *)key2; - if (traceback1->nframe != traceback2->nframe) + if (traceback1->nframe != traceback2->nframe) { return 0; - - if (traceback1->total_nframe != traceback2->total_nframe) + } + if (traceback1->total_nframe != traceback2->total_nframe) { return 0; + } - for (i=0; i < traceback1->nframe; i++) { - frame1 = &traceback1->frames[i]; - frame2 = &traceback2->frames[i]; + for (int i=0; i < traceback1->nframe; i++) { + const frame_t *frame1 = &traceback1->frames[i]; + const frame_t *frame2 = &traceback2->frames[i]; - if (frame1->lineno != frame2->lineno) + if (frame1->lineno != frame2->lineno) { return 0; - + } if (frame1->filename != frame2->filename) { assert(PyUnicode_Compare(frame1->filename, frame2->filename) != 0); return 0; @@ -374,15 +340,15 @@ tracemalloc_get_frame(PyFrameObject *pyframe, frame_t *frame) /* intern the filename */ _Py_hashtable_entry_t *entry; - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_filenames, filename); + entry = _Py_hashtable_get_entry(tracemalloc_filenames, filename); if (entry != NULL) { - _Py_HASHTABLE_ENTRY_READ_KEY(tracemalloc_filenames, entry, filename); + filename = (PyObject *)entry->key; } else { /* tracemalloc_filenames is responsible to keep a reference to the filename */ Py_INCREF(filename); - if (_Py_HASHTABLE_SET_NODATA(tracemalloc_filenames, filename) < 0) { + if (_Py_hashtable_set(tracemalloc_filenames, filename, NULL) < 0) { Py_DECREF(filename); #ifdef TRACE_DEBUG tracemalloc_error("failed to intern the filename"); @@ -469,9 +435,9 @@ traceback_new(void) traceback->hash = traceback_hash(traceback); /* intern the traceback */ - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_tracebacks, traceback); + entry = _Py_hashtable_get_entry(tracemalloc_tracebacks, traceback); if (entry != NULL) { - _Py_HASHTABLE_ENTRY_READ_KEY(tracemalloc_tracebacks, entry, traceback); + traceback = (traceback_t *)entry->key; } else { traceback_t *copy; @@ -488,7 +454,7 @@ traceback_new(void) } memcpy(copy, traceback, traceback_size); - if (_Py_HASHTABLE_SET_NODATA(tracemalloc_tracebacks, copy) < 0) { + if (_Py_hashtable_set(tracemalloc_tracebacks, copy, NULL) < 0) { raw_free(copy); #ifdef TRACE_DEBUG tracemalloc_error("failed to intern the traceback: putdata failed"); @@ -501,79 +467,54 @@ traceback_new(void) } -static int -tracemalloc_use_domain_cb(_Py_hashtable_t *old_traces, - _Py_hashtable_entry_t *entry, void *user_data) +static _Py_hashtable_t* +tracemalloc_create_traces_table(void) { - uintptr_t ptr; - pointer_t key; - _Py_hashtable_t *new_traces = (_Py_hashtable_t *)user_data; - const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(old_traces, entry); - - _Py_HASHTABLE_ENTRY_READ_KEY(old_traces, entry, ptr); - key.ptr = ptr; - key.domain = DEFAULT_DOMAIN; - - return _Py_hashtable_set(new_traces, - sizeof(key), &key, - old_traces->data_size, pdata); + return hashtable_new(_Py_hashtable_hash_ptr, + _Py_hashtable_compare_direct, + NULL, raw_free); } -/* Convert tracemalloc_traces from compact key (uintptr_t) to pointer_t key. - * Return 0 on success, -1 on error. */ -static int -tracemalloc_use_domain(void) +static _Py_hashtable_t* +tracemalloc_create_domains_table(void) { - _Py_hashtable_t *new_traces = NULL; + return hashtable_new(hashtable_hash_uint, + _Py_hashtable_compare_direct, + NULL, + (_Py_hashtable_destroy_func)_Py_hashtable_destroy); +} - assert(!_Py_tracemalloc_config.use_domain); - new_traces = hashtable_new(sizeof(pointer_t), - sizeof(trace_t), - hashtable_hash_pointer_t, - hashtable_compare_pointer_t); - if (new_traces == NULL) { - return -1; +static _Py_hashtable_t* +tracemalloc_get_traces_table(unsigned int domain) +{ + if (domain == DEFAULT_DOMAIN) { + return tracemalloc_traces; } - - if (_Py_hashtable_foreach(tracemalloc_traces, tracemalloc_use_domain_cb, - new_traces) < 0) - { - _Py_hashtable_destroy(new_traces); - return -1; + else { + return _Py_hashtable_get(tracemalloc_domains, TO_PTR(domain)); } - - _Py_hashtable_destroy(tracemalloc_traces); - tracemalloc_traces = new_traces; - - _Py_tracemalloc_config.use_domain = 1; - - return 0; } static void tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) { - trace_t trace; - int removed; - assert(_Py_tracemalloc_config.tracing); - if (_Py_tracemalloc_config.use_domain) { - pointer_t key = {ptr, domain}; - removed = _Py_HASHTABLE_POP(tracemalloc_traces, key, trace); - } - else { - removed = _Py_HASHTABLE_POP(tracemalloc_traces, ptr, trace); - } - if (!removed) { + _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); + if (!traces) { return; } - assert(tracemalloc_traced_memory >= trace.size); - tracemalloc_traced_memory -= trace.size; + trace_t *trace = _Py_hashtable_steal(traces, TO_PTR(ptr)); + if (!trace) { + return; + } + assert(tracemalloc_traced_memory >= trace->size); + tracemalloc_traced_memory -= trace->size; + raw_free(trace); } #define REMOVE_TRACE(ptr) \ @@ -584,63 +525,55 @@ static int tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, size_t size) { - pointer_t key = {ptr, domain}; - traceback_t *traceback; - trace_t trace; - _Py_hashtable_entry_t* entry; - int res; - assert(_Py_tracemalloc_config.tracing); - traceback = traceback_new(); + traceback_t *traceback = traceback_new(); if (traceback == NULL) { return -1; } - if (!_Py_tracemalloc_config.use_domain && domain != DEFAULT_DOMAIN) { - /* first trace using a non-zero domain whereas traces use compact - (uintptr_t) keys: switch to pointer_t keys. */ - if (tracemalloc_use_domain() < 0) { + _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); + if (traces == NULL) { + traces = tracemalloc_create_traces_table(); + if (traces == NULL) { return -1; } - } - if (_Py_tracemalloc_config.use_domain) { - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, key); - } - else { - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr); + if (_Py_hashtable_set(tracemalloc_domains, TO_PTR(domain), traces) < 0) { + _Py_hashtable_destroy(traces); + return -1; + } } - if (entry != NULL) { + trace_t *trace = _Py_hashtable_get(traces, TO_PTR(ptr)); + if (trace != NULL) { /* the memory block is already tracked */ - _Py_HASHTABLE_ENTRY_READ_DATA(tracemalloc_traces, entry, trace); - assert(tracemalloc_traced_memory >= trace.size); - tracemalloc_traced_memory -= trace.size; + assert(tracemalloc_traced_memory >= trace->size); + tracemalloc_traced_memory -= trace->size; - trace.size = size; - trace.traceback = traceback; - _Py_HASHTABLE_ENTRY_WRITE_DATA(tracemalloc_traces, entry, trace); + trace->size = size; + trace->traceback = traceback; } else { - trace.size = size; - trace.traceback = traceback; - - if (_Py_tracemalloc_config.use_domain) { - res = _Py_HASHTABLE_SET(tracemalloc_traces, key, trace); - } - else { - res = _Py_HASHTABLE_SET(tracemalloc_traces, ptr, trace); + trace = raw_malloc(sizeof(trace_t)); + if (trace == NULL) { + return -1; } + trace->size = size; + trace->traceback = traceback; + + int res = _Py_hashtable_set(traces, TO_PTR(ptr), trace); if (res != 0) { + raw_free(trace); return res; } } assert(tracemalloc_traced_memory <= SIZE_MAX - size); tracemalloc_traced_memory += size; - if (tracemalloc_traced_memory > tracemalloc_peak_traced_memory) + if (tracemalloc_traced_memory > tracemalloc_peak_traced_memory) { tracemalloc_peak_traced_memory = tracemalloc_traced_memory; + } return 0; } @@ -691,7 +624,7 @@ tracemalloc_realloc(void *ctx, void *ptr, size_t new_size) TABLES_LOCK(); /* tracemalloc_add_trace() updates the trace if there is already - a trace at address (domain, ptr2) */ + a trace at address ptr2 */ if (ptr2 != ptr) { REMOVE_TRACE(ptr); } @@ -895,27 +828,11 @@ tracemalloc_raw_realloc(void *ctx, void *ptr, size_t new_size) #endif /* TRACE_RAW_MALLOC */ -static int -tracemalloc_clear_filename(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, - void *user_data) +static void +tracemalloc_clear_filename(void *value) { - PyObject *filename; - - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, filename); + PyObject *filename = (PyObject *)value; Py_DECREF(filename); - return 0; -} - - -static int -traceback_free_traceback(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, - void *user_data) -{ - traceback_t *traceback; - - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, traceback); - raw_free(traceback); - return 0; } @@ -928,14 +845,13 @@ tracemalloc_clear_traces(void) TABLES_LOCK(); _Py_hashtable_clear(tracemalloc_traces); + _Py_hashtable_clear(tracemalloc_domains); tracemalloc_traced_memory = 0; tracemalloc_peak_traced_memory = 0; TABLES_UNLOCK(); - _Py_hashtable_foreach(tracemalloc_tracebacks, traceback_free_traceback, NULL); _Py_hashtable_clear(tracemalloc_tracebacks); - _Py_hashtable_foreach(tracemalloc_filenames, tracemalloc_clear_filename, NULL); _Py_hashtable_clear(tracemalloc_filenames); } @@ -975,29 +891,19 @@ tracemalloc_init(void) } #endif - tracemalloc_filenames = hashtable_new(sizeof(PyObject *), 0, - hashtable_hash_pyobject, - hashtable_compare_unicode); + tracemalloc_filenames = hashtable_new(hashtable_hash_pyobject, + hashtable_compare_unicode, + tracemalloc_clear_filename, NULL); - tracemalloc_tracebacks = hashtable_new(sizeof(traceback_t *), 0, - hashtable_hash_traceback, - hashtable_compare_traceback); + tracemalloc_tracebacks = hashtable_new(hashtable_hash_traceback, + hashtable_compare_traceback, + NULL, raw_free); - if (_Py_tracemalloc_config.use_domain) { - tracemalloc_traces = hashtable_new(sizeof(pointer_t), - sizeof(trace_t), - hashtable_hash_pointer_t, - hashtable_compare_pointer_t); - } - else { - tracemalloc_traces = hashtable_new(sizeof(uintptr_t), - sizeof(trace_t), - _Py_hashtable_hash_ptr, - _Py_hashtable_compare_direct); - } + tracemalloc_traces = tracemalloc_create_traces_table(); + tracemalloc_domains = tracemalloc_create_domains_table(); if (tracemalloc_filenames == NULL || tracemalloc_tracebacks == NULL - || tracemalloc_traces == NULL) { + || tracemalloc_traces == NULL || tracemalloc_domains == NULL) { PyErr_NoMemory(); return -1; } @@ -1029,9 +935,10 @@ tracemalloc_deinit(void) tracemalloc_stop(); /* destroy hash tables */ + _Py_hashtable_destroy(tracemalloc_domains); + _Py_hashtable_destroy(tracemalloc_traces); _Py_hashtable_destroy(tracemalloc_tracebacks); _Py_hashtable_destroy(tracemalloc_filenames); - _Py_hashtable_destroy(tracemalloc_traces); #if defined(TRACE_RAW_MALLOC) if (tables_lock != NULL) { @@ -1198,11 +1105,11 @@ frame_to_pyobject(frame_t *frame) static PyObject* traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) { - int i; - PyObject *frames, *frame; + PyObject *frames; if (intern_table != NULL) { - if (_Py_HASHTABLE_GET(intern_table, traceback, frames)) { + frames = _Py_hashtable_get(intern_table, (const void *)traceback); + if (frames) { Py_INCREF(frames); return frames; } @@ -1212,8 +1119,8 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) if (frames == NULL) return NULL; - for (i=0; i < traceback->nframe; i++) { - frame = frame_to_pyobject(&traceback->frames[i]); + for (int i=0; i < traceback->nframe; i++) { + PyObject *frame = frame_to_pyobject(&traceback->frames[i]); if (frame == NULL) { Py_DECREF(frames); return NULL; @@ -1222,7 +1129,7 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) } if (intern_table != NULL) { - if (_Py_HASHTABLE_SET(intern_table, traceback, frames) < 0) { + if (_Py_hashtable_set(intern_table, traceback, frames) < 0) { Py_DECREF(frames); PyErr_NoMemory(); return NULL; @@ -1235,7 +1142,7 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) static PyObject* -trace_to_pyobject(unsigned int domain, trace_t *trace, +trace_to_pyobject(unsigned int domain, const trace_t *trace, _Py_hashtable_t *intern_tracebacks) { PyObject *trace_obj = NULL; @@ -1279,52 +1186,139 @@ trace_to_pyobject(unsigned int domain, trace_t *trace, typedef struct { _Py_hashtable_t *traces; + _Py_hashtable_t *domains; _Py_hashtable_t *tracebacks; PyObject *list; + unsigned int domain; } get_traces_t; + static int -tracemalloc_get_traces_fill(_Py_hashtable_t *traces, _Py_hashtable_entry_t *entry, - void *user_data) +tracemalloc_copy_trace(_Py_hashtable_t *traces, + const void *key, const void *value, + void *user_data) { - get_traces_t *get_traces = user_data; - unsigned int domain; - trace_t trace; - PyObject *tracemalloc_obj; - int res; + _Py_hashtable_t *traces2 = (_Py_hashtable_t *)user_data; + + trace_t *trace = (trace_t *)value; - if (_Py_tracemalloc_config.use_domain) { - pointer_t key; - _Py_HASHTABLE_ENTRY_READ_KEY(traces, entry, key); - domain = key.domain; + trace_t *trace2 = raw_malloc(sizeof(trace_t)); + if (traces2 == NULL) { + return -1; } - else { - domain = DEFAULT_DOMAIN; + *trace2 = *trace; + if (_Py_hashtable_set(traces2, key, trace2) < 0) { + raw_free(trace2); + return -1; } - _Py_HASHTABLE_ENTRY_READ_DATA(traces, entry, trace); + return 0; +} + + +static _Py_hashtable_t* +tracemalloc_copy_traces(_Py_hashtable_t *traces) +{ + _Py_hashtable_t *traces2 = tracemalloc_create_traces_table(); + if (traces2 == NULL) { + return NULL; + } + + int err = _Py_hashtable_foreach(traces, + tracemalloc_copy_trace, + traces2); + if (err) { + _Py_hashtable_destroy(traces2); + return NULL; + } + return traces2; +} + + +static int +tracemalloc_copy_domain(_Py_hashtable_t *domains, + const void *key, const void *value, + void *user_data) +{ + _Py_hashtable_t *domains2 = (_Py_hashtable_t *)user_data; + + unsigned int domain = (unsigned int)FROM_PTR(key); + _Py_hashtable_t *traces = (_Py_hashtable_t *)value; + + _Py_hashtable_t *traces2 = tracemalloc_copy_traces(traces); + if (_Py_hashtable_set(domains2, TO_PTR(domain), traces2) < 0) { + _Py_hashtable_destroy(traces2); + return -1; + } + return 0; +} + + +static _Py_hashtable_t* +tracemalloc_copy_domains(_Py_hashtable_t *domains) +{ + _Py_hashtable_t *domains2 = tracemalloc_create_domains_table(); + if (domains2 == NULL) { + return NULL; + } + + int err = _Py_hashtable_foreach(domains, + tracemalloc_copy_domain, + domains2); + if (err) { + _Py_hashtable_destroy(domains2); + return NULL; + } + return domains2; +} - tracemalloc_obj = trace_to_pyobject(domain, &trace, get_traces->tracebacks); - if (tracemalloc_obj == NULL) + +static int +tracemalloc_get_traces_fill(_Py_hashtable_t *traces, + const void *key, const void *value, + void *user_data) +{ + get_traces_t *get_traces = user_data; + + const trace_t *trace = (const trace_t *)value; + + PyObject *tuple = trace_to_pyobject(get_traces->domain, trace, + get_traces->tracebacks); + if (tuple == NULL) { return 1; + } - res = PyList_Append(get_traces->list, tracemalloc_obj); - Py_DECREF(tracemalloc_obj); - if (res < 0) + int res = PyList_Append(get_traces->list, tuple); + Py_DECREF(tuple); + if (res < 0) { return 1; + } return 0; } static int -tracemalloc_pyobject_decref_cb(_Py_hashtable_t *tracebacks, - _Py_hashtable_entry_t *entry, - void *user_data) +tracemalloc_get_traces_domain(_Py_hashtable_t *domains, + const void *key, const void *value, + void *user_data) { - PyObject *obj; - _Py_HASHTABLE_ENTRY_READ_DATA(tracebacks, entry, obj); + get_traces_t *get_traces = user_data; + + unsigned int domain = (unsigned int)FROM_PTR(key); + _Py_hashtable_t *traces = (_Py_hashtable_t *)value; + + get_traces->domain = domain; + return _Py_hashtable_foreach(traces, + tracemalloc_get_traces_fill, + get_traces); +} + + +static void +tracemalloc_pyobject_decref(void *value) +{ + PyObject *obj = (PyObject *)value; Py_DECREF(obj); - return 0; } @@ -1345,9 +1339,9 @@ _tracemalloc__get_traces_impl(PyObject *module) /*[clinic end generated code: output=e9929876ced4b5cc input=6c7d2230b24255aa]*/ { get_traces_t get_traces; - int err; - + get_traces.domain = DEFAULT_DOMAIN; get_traces.traces = NULL; + get_traces.domains = NULL; get_traces.tracebacks = NULL; get_traces.list = PyList_New(0); if (get_traces.list == NULL) @@ -1358,45 +1352,65 @@ _tracemalloc__get_traces_impl(PyObject *module) /* the traceback hash table is used temporarily to intern traceback tuple of (filename, lineno) tuples */ - get_traces.tracebacks = hashtable_new(sizeof(traceback_t *), - sizeof(PyObject *), - _Py_hashtable_hash_ptr, - _Py_hashtable_compare_direct); + get_traces.tracebacks = hashtable_new(_Py_hashtable_hash_ptr, + _Py_hashtable_compare_direct, + NULL, tracemalloc_pyobject_decref); if (get_traces.tracebacks == NULL) { - PyErr_NoMemory(); - goto error; + goto no_memory; } + // Copy all traces so tracemalloc_get_traces_fill() doesn't have to disable + // temporarily tracemalloc which would impact other threads and so would + // miss allocations while get_traces() is called. TABLES_LOCK(); - get_traces.traces = _Py_hashtable_copy(tracemalloc_traces); + get_traces.traces = tracemalloc_copy_traces(tracemalloc_traces); TABLES_UNLOCK(); if (get_traces.traces == NULL) { - PyErr_NoMemory(); - goto error; + goto no_memory; } + TABLES_LOCK(); + get_traces.domains = tracemalloc_copy_domains(tracemalloc_domains); + TABLES_UNLOCK(); + + if (get_traces.domains == NULL) { + goto no_memory; + } + + // Convert traces to a list of tuples set_reentrant(1); - err = _Py_hashtable_foreach(get_traces.traces, - tracemalloc_get_traces_fill, &get_traces); + int err = _Py_hashtable_foreach(get_traces.traces, + tracemalloc_get_traces_fill, + &get_traces); + if (!err) { + err = _Py_hashtable_foreach(get_traces.domains, + tracemalloc_get_traces_domain, + &get_traces); + } set_reentrant(0); - if (err) + if (err) { goto error; + } goto finally; +no_memory: + PyErr_NoMemory(); + error: Py_CLEAR(get_traces.list); finally: if (get_traces.tracebacks != NULL) { - _Py_hashtable_foreach(get_traces.tracebacks, - tracemalloc_pyobject_decref_cb, NULL); _Py_hashtable_destroy(get_traces.tracebacks); } if (get_traces.traces != NULL) { _Py_hashtable_destroy(get_traces.traces); } + if (get_traces.domains != NULL) { + _Py_hashtable_destroy(get_traces.domains); + } return get_traces.list; } @@ -1405,26 +1419,26 @@ _tracemalloc__get_traces_impl(PyObject *module) static traceback_t* tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) { - trace_t trace; - int found; if (!_Py_tracemalloc_config.tracing) return NULL; + trace_t *trace; TABLES_LOCK(); - if (_Py_tracemalloc_config.use_domain) { - pointer_t key = {ptr, domain}; - found = _Py_HASHTABLE_GET(tracemalloc_traces, key, trace); + _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); + if (traces) { + trace = _Py_hashtable_get(traces, TO_PTR(ptr)); } else { - found = _Py_HASHTABLE_GET(tracemalloc_traces, ptr, trace); + trace = NULL; } TABLES_UNLOCK(); - if (!found) + if (!trace) { return NULL; + } - return trace.traceback; + return trace->traceback; } @@ -1564,6 +1578,17 @@ _tracemalloc_get_traceback_limit_impl(PyObject *module) } +static int +tracemalloc_get_tracemalloc_memory_cb(_Py_hashtable_t *domains, + const void *key, const void *value, + void *user_data) +{ + const _Py_hashtable_t *traces = value; + size_t *size = (size_t*)user_data; + *size += _Py_hashtable_size(traces); + return 0; +} + /*[clinic input] _tracemalloc.get_tracemalloc_memory @@ -1584,6 +1609,8 @@ _tracemalloc_get_tracemalloc_memory_impl(PyObject *module) TABLES_LOCK(); size += _Py_hashtable_size(tracemalloc_traces); + _Py_hashtable_foreach(tracemalloc_domains, + tracemalloc_get_tracemalloc_memory_cb, &size); TABLES_UNLOCK(); return PyLong_FromSize_t(size); @@ -1741,26 +1768,15 @@ _PyTraceMalloc_NewReference(PyObject *op) ptr = (uintptr_t)op; } - _Py_hashtable_entry_t* entry; int res = -1; TABLES_LOCK(); - if (_Py_tracemalloc_config.use_domain) { - pointer_t key = {ptr, DEFAULT_DOMAIN}; - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, key); - } - else { - entry = _Py_HASHTABLE_GET_ENTRY(tracemalloc_traces, ptr); - } - - if (entry != NULL) { + trace_t *trace = _Py_hashtable_get(tracemalloc_traces, TO_PTR(ptr)); + if (trace != NULL) { /* update the traceback of the memory block */ traceback_t *traceback = traceback_new(); if (traceback != NULL) { - trace_t trace; - _Py_HASHTABLE_ENTRY_READ_DATA(tracemalloc_traces, entry, trace); - trace.traceback = traceback; - _Py_HASHTABLE_ENTRY_WRITE_DATA(tracemalloc_traces, entry, trace); + trace->traceback = traceback; res = 0; } } diff --git a/Modules/_uuidmodule.c b/Modules/_uuidmodule.c index 3be6c848ad6457..3f33e22a055c6d 100644 --- a/Modules/_uuidmodule.c +++ b/Modules/_uuidmodule.c @@ -1,5 +1,5 @@ /* - * Python UUID module that wraps libuuid - + * Python UUID module that wraps libuuid or Windows rpcrt4.dll. * DCE compatible Universally Unique Identifier library. */ @@ -12,6 +12,12 @@ #include #endif +#ifdef MS_WINDOWS +#include +#endif + +#ifndef MS_WINDOWS + static PyObject * py_uuid_generate_time_safe(PyObject *Py_UNUSED(context), PyObject *Py_UNUSED(ignored)) @@ -31,17 +37,50 @@ py_uuid_generate_time_safe(PyObject *Py_UNUSED(context), return Py_BuildValue("y#i", buf, sizeof(uuid), (int) status); # else return Py_BuildValue("y#i", (const char *) &uuid, sizeof(uuid), (int) status); -# endif -#else +# endif /* HAVE_UUID_CREATE */ +#else /* HAVE_UUID_GENERATE_TIME_SAFE */ uuid_generate_time(uuid); return Py_BuildValue("y#O", (const char *) uuid, sizeof(uuid), Py_None); -#endif +#endif /* HAVE_UUID_GENERATE_TIME_SAFE */ } +#else /* MS_WINDOWS */ + +static PyObject * +py_UuidCreate(PyObject *Py_UNUSED(context), + PyObject *Py_UNUSED(ignored)) +{ + UUID uuid; + RPC_STATUS res; + + Py_BEGIN_ALLOW_THREADS + res = UuidCreateSequential(&uuid); + Py_END_ALLOW_THREADS + + switch (res) { + case RPC_S_OK: + case RPC_S_UUID_LOCAL_ONLY: + case RPC_S_UUID_NO_ADDRESS: + /* + All success codes, but the latter two indicate that the UUID is random + rather than based on the MAC address. If the OS can't figure this out, + neither can we, so we'll take it anyway. + */ + return Py_BuildValue("y#", (const char *)&uuid, sizeof(uuid)); + } + PyErr_SetFromWindowsErr(res); + return NULL; +} + +#endif /* MS_WINDOWS */ + + static int uuid_exec(PyObject *module) { assert(sizeof(uuid_t) == 16); -#ifdef HAVE_UUID_GENERATE_TIME_SAFE +#if defined(MS_WINDOWS) + int has_uuid_generate_time_safe = 0; +#elif defined(HAVE_UUID_GENERATE_TIME_SAFE) int has_uuid_generate_time_safe = 1; #else int has_uuid_generate_time_safe = 0; @@ -54,7 +93,12 @@ uuid_exec(PyObject *module) { } static PyMethodDef uuid_methods[] = { +#if defined(HAVE_UUID_UUID_H) || defined(HAVE_UUID_H) {"generate_time_safe", py_uuid_generate_time_safe, METH_NOARGS, NULL}, +#endif +#if defined(MS_WINDOWS) + {"UuidCreate", py_UuidCreate, METH_NOARGS, NULL}, +#endif {NULL, NULL, 0, NULL} /* sentinel */ }; diff --git a/Modules/_xxsubinterpretersmodule.c b/Modules/_xxsubinterpretersmodule.c index de11c090870f94..8a6fce9e0b4bd9 100644 --- a/Modules/_xxsubinterpretersmodule.c +++ b/Modules/_xxsubinterpretersmodule.c @@ -1939,6 +1939,20 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, return -1; } +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + // Switch to interpreter. + PyThreadState *new_tstate = PyInterpreterState_ThreadHead(interp); + PyThreadState *save1 = PyEval_SaveThread(); + + (void)PyThreadState_Swap(new_tstate); + + // Run the script. + _sharedexception *exc = NULL; + int result = _run_script(interp, codestr, shared, &exc); + + // Switch back. + PyEval_RestoreThread(save1); +#else // Switch to interpreter. PyThreadState *save_tstate = NULL; if (interp != PyInterpreterState_Get()) { @@ -1956,6 +1970,7 @@ _run_script_in_interpreter(PyInterpreterState *interp, const char *codestr, if (save_tstate != NULL) { PyThreadState_Swap(save_tstate); } +#endif // Propagate any exception out to the caller. if (exc != NULL) { diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 4920ad7b82124c..732703e481adcd 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -235,24 +235,31 @@ BB_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) static PyObject * u_getitem(arrayobject *ap, Py_ssize_t i) { - return PyUnicode_FromOrdinal(((Py_UNICODE *) ap->ob_item)[i]); + return PyUnicode_FromOrdinal(((wchar_t *) ap->ob_item)[i]); } static int u_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) { - Py_UNICODE *p; - Py_ssize_t len; - - if (!PyArg_Parse(v, "u#;array item must be unicode character", &p, &len)) + PyObject *u; + if (!PyArg_Parse(v, "U;array item must be unicode character", &u)) { return -1; - if (len != 1) { + } + + Py_ssize_t len = PyUnicode_AsWideChar(u, NULL, 0); + if (len != 2) { PyErr_SetString(PyExc_TypeError, "array item must be unicode character"); return -1; } - if (i >= 0) - ((Py_UNICODE *)ap->ob_item)[i] = p[0]; + + wchar_t w; + len = PyUnicode_AsWideChar(u, &w, 1); + assert(len == 1); + + if (i >= 0) { + ((wchar_t *)ap->ob_item)[i] = w; + } return 0; } @@ -530,7 +537,7 @@ d_setitem(arrayobject *ap, Py_ssize_t i, PyObject *v) DEFINE_COMPAREITEMS(b, signed char) DEFINE_COMPAREITEMS(BB, unsigned char) -DEFINE_COMPAREITEMS(u, Py_UNICODE) +DEFINE_COMPAREITEMS(u, wchar_t) DEFINE_COMPAREITEMS(h, short) DEFINE_COMPAREITEMS(HH, unsigned short) DEFINE_COMPAREITEMS(i, int) @@ -548,7 +555,7 @@ DEFINE_COMPAREITEMS(QQ, unsigned long long) static const struct arraydescr descriptors[] = { {'b', 1, b_getitem, b_setitem, b_compareitems, "b", 1, 1}, {'B', 1, BB_getitem, BB_setitem, BB_compareitems, "B", 1, 0}, - {'u', sizeof(Py_UNICODE), u_getitem, u_setitem, u_compareitems, "u", 0, 0}, + {'u', sizeof(wchar_t), u_getitem, u_setitem, u_compareitems, "u", 0, 0}, {'h', sizeof(short), h_getitem, h_setitem, h_compareitems, "h", 1, 1}, {'H', sizeof(short), HH_getitem, HH_setitem, HH_compareitems, "H", 1, 0}, {'i', sizeof(int), i_getitem, i_setitem, i_compareitems, "i", 1, 1}, @@ -1660,7 +1667,7 @@ array_array_tobytes_impl(arrayobject *self) /*[clinic input] array.array.fromunicode - ustr: Py_UNICODE(zeroes=True) + ustr: unicode / Extends this array with data from the unicode string ustr. @@ -1671,25 +1678,28 @@ some other type. [clinic start generated code]*/ static PyObject * -array_array_fromunicode_impl(arrayobject *self, const Py_UNICODE *ustr, - Py_ssize_clean_t ustr_length) -/*[clinic end generated code: output=cf2f662908e2befc input=150f00566ffbca6e]*/ +array_array_fromunicode_impl(arrayobject *self, PyObject *ustr) +/*[clinic end generated code: output=24359f5e001a7f2b input=025db1fdade7a4ce]*/ { - char typecode; - - typecode = self->ob_descr->typecode; - if (typecode != 'u') { + if (self->ob_descr->typecode != 'u') { PyErr_SetString(PyExc_ValueError, "fromunicode() may only be called on " "unicode type arrays"); return NULL; } - if (ustr_length > 0) { + + Py_ssize_t ustr_length = PyUnicode_AsWideChar(ustr, NULL, 0); + assert(ustr_length > 0); + if (ustr_length > 1) { + ustr_length--; /* trim trailing NUL character */ Py_ssize_t old_size = Py_SIZE(self); - if (array_resize(self, old_size + ustr_length) == -1) + if (array_resize(self, old_size + ustr_length) == -1) { return NULL; - memcpy(self->ob_item + old_size * sizeof(Py_UNICODE), - ustr, ustr_length * sizeof(Py_UNICODE)); + } + + // must not fail + PyUnicode_AsWideChar( + ustr, ((wchar_t *)self->ob_item) + old_size, ustr_length); } Py_RETURN_NONE; @@ -1709,14 +1719,12 @@ static PyObject * array_array_tounicode_impl(arrayobject *self) /*[clinic end generated code: output=08e442378336e1ef input=127242eebe70b66d]*/ { - char typecode; - typecode = self->ob_descr->typecode; - if (typecode != 'u') { + if (self->ob_descr->typecode != 'u') { PyErr_SetString(PyExc_ValueError, "tounicode() may only be called on unicode type arrays"); return NULL; } - return PyUnicode_FromWideChar((Py_UNICODE *) self->ob_item, Py_SIZE(self)); + return PyUnicode_FromWideChar((wchar_t *) self->ob_item, Py_SIZE(self)); } /*[clinic input] @@ -2675,30 +2683,20 @@ array_new(PyTypeObject *type, PyObject *args, PyObject *kwds) Py_DECREF(v); } else if (initial != NULL && PyUnicode_Check(initial)) { - Py_UNICODE *ustr; Py_ssize_t n; - - ustr = PyUnicode_AsUnicode(initial); + wchar_t *ustr = PyUnicode_AsWideCharString(initial, &n); if (ustr == NULL) { - PyErr_NoMemory(); Py_DECREF(a); return NULL; } - n = PyUnicode_GET_DATA_SIZE(initial); if (n > 0) { arrayobject *self = (arrayobject *)a; - char *item = self->ob_item; - item = (char *)PyMem_Realloc(item, n); - if (item == NULL) { - PyErr_NoMemory(); - Py_DECREF(a); - return NULL; - } - self->ob_item = item; - Py_SET_SIZE(self, n / sizeof(Py_UNICODE)); - memcpy(item, ustr, n); - self->allocated = Py_SIZE(self); + // self->ob_item may be NULL but it is safe. + PyMem_Free(self->ob_item); + self->ob_item = (char *)ustr; + Py_SET_SIZE(self, n); + self->allocated = n; } } else if (initial != NULL && array_Check(initial) && len > 0) { diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h index 275784dcdcd0e9..1b0c6d0ce43d27 100644 --- a/Modules/clinic/_hashopenssl.c.h +++ b/Modules/clinic/_hashopenssl.c.h @@ -733,10 +733,12 @@ PyDoc_STRVAR(_hashlib_get_fips_mode__doc__, "\n" "Determine the OpenSSL FIPS mode of operation.\n" "\n" -"Effectively any non-zero return value indicates FIPS mode;\n" -"values other than 1 may have additional significance.\n" +"For OpenSSL 3.0.0 and newer it returns the state of the default provider\n" +"in the default OSSL context. It\'s not quite the same as FIPS_mode() but good\n" +"enough for unittests.\n" "\n" -"See OpenSSL documentation for the FIPS_mode() function for details."); +"Effectively any non-zero return value indicates FIPS mode;\n" +"values other than 1 may have additional significance."); #define _HASHLIB_GET_FIPS_MODE_METHODDEF \ {"get_fips_mode", (PyCFunction)_hashlib_get_fips_mode, METH_NOARGS, _hashlib_get_fips_mode__doc__}, @@ -769,4 +771,4 @@ _hashlib_get_fips_mode(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _HASHLIB_GET_FIPS_MODE_METHODDEF #define _HASHLIB_GET_FIPS_MODE_METHODDEF #endif /* !defined(_HASHLIB_GET_FIPS_MODE_METHODDEF) */ -/*[clinic end generated code: output=b0703dd5a043394d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=4babbd88389a196b input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testmultiphase.c.h b/Modules/clinic/_testmultiphase.c.h new file mode 100644 index 00000000000000..0d38c230f71865 --- /dev/null +++ b/Modules/clinic/_testmultiphase.c.h @@ -0,0 +1,101 @@ +/*[clinic input] +preserve +[clinic start generated code]*/ + +PyDoc_STRVAR(_testmultiphase_StateAccessType_get_defining_module__doc__, +"get_defining_module($self, /)\n" +"--\n" +"\n" +"Return the module of the defining class."); + +#define _TESTMULTIPHASE_STATEACCESSTYPE_GET_DEFINING_MODULE_METHODDEF \ + {"get_defining_module", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_get_defining_module, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_get_defining_module__doc__}, + +static PyObject * +_testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject *self, + PyTypeObject *cls); + +static PyObject * +_testmultiphase_StateAccessType_get_defining_module(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":get_defining_module", _keywords, 0}; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + return_value = _testmultiphase_StateAccessType_get_defining_module_impl(self, cls); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testmultiphase_StateAccessType_increment_count_clinic__doc__, +"increment_count_clinic($self, /, n=1, *, twice=False)\n" +"--\n" +"\n" +"Add \'n\' from the module-state counter.\n" +"\n" +"Pass \'twice\' to double that amount.\n" +"\n" +"This tests Argument Clinic support for defining_class."); + +#define _TESTMULTIPHASE_STATEACCESSTYPE_INCREMENT_COUNT_CLINIC_METHODDEF \ + {"increment_count_clinic", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_increment_count_clinic, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_increment_count_clinic__doc__}, + +static PyObject * +_testmultiphase_StateAccessType_increment_count_clinic_impl(StateAccessTypeObject *self, + PyTypeObject *cls, + int n, int twice); + +static PyObject * +_testmultiphase_StateAccessType_increment_count_clinic(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = {"n", "twice", NULL}; + static _PyArg_Parser _parser = {"|i$p:increment_count_clinic", _keywords, 0}; + int n = 1; + int twice = 0; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &n, &twice)) { + goto exit; + } + return_value = _testmultiphase_StateAccessType_increment_count_clinic_impl(self, cls, n, twice); + +exit: + return return_value; +} + +PyDoc_STRVAR(_testmultiphase_StateAccessType_get_count__doc__, +"get_count($self, /)\n" +"--\n" +"\n" +"Return the value of the module-state counter."); + +#define _TESTMULTIPHASE_STATEACCESSTYPE_GET_COUNT_METHODDEF \ + {"get_count", (PyCFunction)(void(*)(void))_testmultiphase_StateAccessType_get_count, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _testmultiphase_StateAccessType_get_count__doc__}, + +static PyObject * +_testmultiphase_StateAccessType_get_count_impl(StateAccessTypeObject *self, + PyTypeObject *cls); + +static PyObject * +_testmultiphase_StateAccessType_get_count(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +{ + PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":get_count", _keywords, 0}; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + return_value = _testmultiphase_StateAccessType_get_count_impl(self, cls); + +exit: + return return_value; +} +/*[clinic end generated code: output=39eea487e94e7f5d input=a9049054013a1b77]*/ diff --git a/Modules/clinic/arraymodule.c.h b/Modules/clinic/arraymodule.c.h index e1f4b0397b9cb5..b9245ca91d5fa9 100644 --- a/Modules/clinic/arraymodule.c.h +++ b/Modules/clinic/arraymodule.c.h @@ -380,20 +380,23 @@ PyDoc_STRVAR(array_array_fromunicode__doc__, {"fromunicode", (PyCFunction)array_array_fromunicode, METH_O, array_array_fromunicode__doc__}, static PyObject * -array_array_fromunicode_impl(arrayobject *self, const Py_UNICODE *ustr, - Py_ssize_clean_t ustr_length); +array_array_fromunicode_impl(arrayobject *self, PyObject *ustr); static PyObject * array_array_fromunicode(arrayobject *self, PyObject *arg) { PyObject *return_value = NULL; - const Py_UNICODE *ustr; - Py_ssize_clean_t ustr_length; + PyObject *ustr; - if (!PyArg_Parse(arg, "u#:fromunicode", &ustr, &ustr_length)) { + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("fromunicode", "argument", "str", arg); goto exit; } - return_value = array_array_fromunicode_impl(self, ustr, ustr_length); + if (PyUnicode_READY(arg) == -1) { + goto exit; + } + ustr = arg; + return_value = array_array_fromunicode_impl(self, ustr); exit: return return_value; @@ -531,4 +534,4 @@ PyDoc_STRVAR(array_arrayiterator___setstate____doc__, #define ARRAY_ARRAYITERATOR___SETSTATE___METHODDEF \ {"__setstate__", (PyCFunction)array_arrayiterator___setstate__, METH_O, array_arrayiterator___setstate____doc__}, -/*[clinic end generated code: output=f649fc0bc9f6b13a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=9f70748dd3bc532f input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index a2b4566443b517..41baa455739797 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -2886,7 +2886,7 @@ PyDoc_STRVAR(os_sched_setscheduler__doc__, static PyObject * os_sched_setscheduler_impl(PyObject *module, pid_t pid, int policy, - struct sched_param *param); + PyObject *param_obj); static PyObject * os_sched_setscheduler(PyObject *module, PyObject *const *args, Py_ssize_t nargs) @@ -2894,13 +2894,13 @@ os_sched_setscheduler(PyObject *module, PyObject *const *args, Py_ssize_t nargs) PyObject *return_value = NULL; pid_t pid; int policy; - struct sched_param param; + PyObject *param_obj; - if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "iO&:sched_setscheduler", - &pid, &policy, convert_sched_param, ¶m)) { + if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "iO:sched_setscheduler", + &pid, &policy, ¶m_obj)) { goto exit; } - return_value = os_sched_setscheduler_impl(module, pid, policy, ¶m); + return_value = os_sched_setscheduler_impl(module, pid, policy, param_obj); exit: return return_value; @@ -2957,21 +2957,20 @@ PyDoc_STRVAR(os_sched_setparam__doc__, {"sched_setparam", (PyCFunction)(void(*)(void))os_sched_setparam, METH_FASTCALL, os_sched_setparam__doc__}, static PyObject * -os_sched_setparam_impl(PyObject *module, pid_t pid, - struct sched_param *param); +os_sched_setparam_impl(PyObject *module, pid_t pid, PyObject *param_obj); static PyObject * os_sched_setparam(PyObject *module, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; pid_t pid; - struct sched_param param; + PyObject *param_obj; - if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "O&:sched_setparam", - &pid, convert_sched_param, ¶m)) { + if (!_PyArg_ParseStack(args, nargs, "" _Py_PARSE_PID "O:sched_setparam", + &pid, ¶m_obj)) { goto exit; } - return_value = os_sched_setparam_impl(module, pid, ¶m); + return_value = os_sched_setparam_impl(module, pid, param_obj); exit: return return_value; @@ -8389,18 +8388,24 @@ PyDoc_STRVAR(os_DirEntry_is_symlink__doc__, "Return True if the entry is a symbolic link; cached per entry."); #define OS_DIRENTRY_IS_SYMLINK_METHODDEF \ - {"is_symlink", (PyCFunction)os_DirEntry_is_symlink, METH_NOARGS, os_DirEntry_is_symlink__doc__}, + {"is_symlink", (PyCFunction)(void(*)(void))os_DirEntry_is_symlink, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_symlink__doc__}, static int -os_DirEntry_is_symlink_impl(DirEntry *self); +os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class); static PyObject * -os_DirEntry_is_symlink(DirEntry *self, PyObject *Py_UNUSED(ignored)) +os_DirEntry_is_symlink(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":is_symlink", _keywords, 0}; int _return_value; - _return_value = os_DirEntry_is_symlink_impl(self); + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + _return_value = os_DirEntry_is_symlink_impl(self, defining_class); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -8417,34 +8422,25 @@ PyDoc_STRVAR(os_DirEntry_stat__doc__, "Return stat_result object for the entry; cached per entry."); #define OS_DIRENTRY_STAT_METHODDEF \ - {"stat", (PyCFunction)(void(*)(void))os_DirEntry_stat, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_stat__doc__}, + {"stat", (PyCFunction)(void(*)(void))os_DirEntry_stat, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_stat__doc__}, static PyObject * -os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks); +os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks); static PyObject * -os_DirEntry_stat(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_stat(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"follow_symlinks", NULL}; - static _PyArg_Parser _parser = {NULL, _keywords, "stat", 0}; - PyObject *argsbuf[1]; - Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + static _PyArg_Parser _parser = {"|$p:stat", _keywords, 0}; int follow_symlinks = 1; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf); - if (!args) { - goto exit; - } - if (!noptargs) { - goto skip_optional_kwonly; - } - follow_symlinks = PyObject_IsTrue(args[0]); - if (follow_symlinks < 0) { + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &follow_symlinks)) { goto exit; } -skip_optional_kwonly: - return_value = os_DirEntry_stat_impl(self, follow_symlinks); + return_value = os_DirEntry_stat_impl(self, defining_class, follow_symlinks); exit: return return_value; @@ -8457,35 +8453,26 @@ PyDoc_STRVAR(os_DirEntry_is_dir__doc__, "Return True if the entry is a directory; cached per entry."); #define OS_DIRENTRY_IS_DIR_METHODDEF \ - {"is_dir", (PyCFunction)(void(*)(void))os_DirEntry_is_dir, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_dir__doc__}, + {"is_dir", (PyCFunction)(void(*)(void))os_DirEntry_is_dir, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_dir__doc__}, static int -os_DirEntry_is_dir_impl(DirEntry *self, int follow_symlinks); +os_DirEntry_is_dir_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks); static PyObject * -os_DirEntry_is_dir(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_dir(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"follow_symlinks", NULL}; - static _PyArg_Parser _parser = {NULL, _keywords, "is_dir", 0}; - PyObject *argsbuf[1]; - Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + static _PyArg_Parser _parser = {"|$p:is_dir", _keywords, 0}; int follow_symlinks = 1; int _return_value; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf); - if (!args) { - goto exit; - } - if (!noptargs) { - goto skip_optional_kwonly; - } - follow_symlinks = PyObject_IsTrue(args[0]); - if (follow_symlinks < 0) { + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &follow_symlinks)) { goto exit; } -skip_optional_kwonly: - _return_value = os_DirEntry_is_dir_impl(self, follow_symlinks); + _return_value = os_DirEntry_is_dir_impl(self, defining_class, follow_symlinks); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -8502,35 +8489,26 @@ PyDoc_STRVAR(os_DirEntry_is_file__doc__, "Return True if the entry is a file; cached per entry."); #define OS_DIRENTRY_IS_FILE_METHODDEF \ - {"is_file", (PyCFunction)(void(*)(void))os_DirEntry_is_file, METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_file__doc__}, + {"is_file", (PyCFunction)(void(*)(void))os_DirEntry_is_file, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, os_DirEntry_is_file__doc__}, static int -os_DirEntry_is_file_impl(DirEntry *self, int follow_symlinks); +os_DirEntry_is_file_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks); static PyObject * -os_DirEntry_is_file(DirEntry *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_file(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; static const char * const _keywords[] = {"follow_symlinks", NULL}; - static _PyArg_Parser _parser = {NULL, _keywords, "is_file", 0}; - PyObject *argsbuf[1]; - Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; + static _PyArg_Parser _parser = {"|$p:is_file", _keywords, 0}; int follow_symlinks = 1; int _return_value; - args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 0, 0, 0, argsbuf); - if (!args) { - goto exit; - } - if (!noptargs) { - goto skip_optional_kwonly; - } - follow_symlinks = PyObject_IsTrue(args[0]); - if (follow_symlinks < 0) { + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &follow_symlinks)) { goto exit; } -skip_optional_kwonly: - _return_value = os_DirEntry_is_file_impl(self, follow_symlinks); + _return_value = os_DirEntry_is_file_impl(self, defining_class, follow_symlinks); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -9418,4 +9396,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=ba73b68f1c435ff6 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=005919eaaef3f8e6 input=a9049054013a1b77]*/ diff --git a/Modules/errnomodule.c b/Modules/errnomodule.c index 06ed53a64dbdc1..d99bed45bd6a23 100644 --- a/Modules/errnomodule.c +++ b/Modules/errnomodule.c @@ -46,66 +46,57 @@ static PyMethodDef errno_methods[] = { /* Helper function doing the dictionary inserting */ -static void -_inscode(PyObject *d, PyObject *de, const char *name, int code) +static int +_add_errcode(PyObject *module_dict, PyObject *error_dict, const char *name_str, int code_int) { - PyObject *u = PyUnicode_FromString(name); - PyObject *v = PyLong_FromLong((long) code); - - /* Don't bother checking for errors; they'll be caught at the end - * of the module initialization function by the caller of - * initerrno(). - */ - if (u && v) { - /* insert in modules dict */ - PyDict_SetItem(d, u, v); - /* insert in errorcode dict */ - PyDict_SetItem(de, v, u); + PyObject *name = PyUnicode_FromString(name_str); + if (!name) { + return -1; } - Py_XDECREF(u); - Py_XDECREF(v); -} -PyDoc_STRVAR(errno__doc__, -"This module makes available standard errno system symbols.\n\ -\n\ -The value of each symbol is the corresponding integer value,\n\ -e.g., on most systems, errno.ENOENT equals the integer 2.\n\ -\n\ -The dictionary errno.errorcode maps numeric codes to symbol names,\n\ -e.g., errno.errorcode[2] could be the string 'ENOENT'.\n\ -\n\ -Symbols that are not relevant to the underlying system are not defined.\n\ -\n\ -To map error codes to error messages, use the function os.strerror(),\n\ -e.g. os.strerror(2) could return 'No such file or directory'."); + PyObject *code = PyLong_FromLong(code_int); + if (!code) { + Py_DECREF(name); + return -1; + } -static struct PyModuleDef errnomodule = { - PyModuleDef_HEAD_INIT, - "errno", - errno__doc__, - -1, - errno_methods, - NULL, - NULL, - NULL, - NULL -}; + int ret = -1; + /* insert in modules dict */ + if (PyDict_SetItem(module_dict, name, code) < 0) { + goto end; + } + /* insert in errorcode dict */ + if (PyDict_SetItem(error_dict, code, name) < 0) { + goto end; + } + ret = 0; +end: + Py_DECREF(name); + Py_DECREF(code); + return ret; +} -PyMODINIT_FUNC -PyInit_errno(void) +static int +errno_exec(PyObject *module) { - PyObject *m, *d, *de; - m = PyModule_Create(&errnomodule); - if (m == NULL) - return NULL; - d = PyModule_GetDict(m); - de = PyDict_New(); - if (!d || !de || PyDict_SetItemString(d, "errorcode", de) < 0) - return NULL; + PyObject *module_dict = PyModule_GetDict(module); + PyObject *error_dict = PyDict_New(); + if (!module_dict || !error_dict) { + return -1; + } + if (PyDict_SetItemString(module_dict, "errorcode", error_dict) < 0) { + Py_DECREF(error_dict); + return -1; + } /* Macro so I don't have to edit each and every line below... */ -#define inscode(d, ds, de, name, code, comment) _inscode(d, de, name, code) +#define add_errcode(name, code, comment) \ + do { \ + if (_add_errcode(module_dict, error_dict, name, code) < 0) { \ + Py_DECREF(error_dict); \ + return -1; \ + } \ + } while (0); /* * The names and comments are borrowed from linux/include/errno.h, @@ -116,820 +107,854 @@ PyInit_errno(void) */ #ifdef ENODEV - inscode(d, ds, de, "ENODEV", ENODEV, "No such device"); + add_errcode("ENODEV", ENODEV, "No such device"); #endif #ifdef ENOCSI - inscode(d, ds, de, "ENOCSI", ENOCSI, "No CSI structure available"); + add_errcode("ENOCSI", ENOCSI, "No CSI structure available"); #endif #ifdef EHOSTUNREACH - inscode(d, ds, de, "EHOSTUNREACH", EHOSTUNREACH, "No route to host"); + add_errcode("EHOSTUNREACH", EHOSTUNREACH, "No route to host"); #else #ifdef WSAEHOSTUNREACH - inscode(d, ds, de, "EHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); + add_errcode("EHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); #endif #endif #ifdef ENOMSG - inscode(d, ds, de, "ENOMSG", ENOMSG, "No message of desired type"); + add_errcode("ENOMSG", ENOMSG, "No message of desired type"); #endif #ifdef EUCLEAN - inscode(d, ds, de, "EUCLEAN", EUCLEAN, "Structure needs cleaning"); + add_errcode("EUCLEAN", EUCLEAN, "Structure needs cleaning"); #endif #ifdef EL2NSYNC - inscode(d, ds, de, "EL2NSYNC", EL2NSYNC, "Level 2 not synchronized"); + add_errcode("EL2NSYNC", EL2NSYNC, "Level 2 not synchronized"); #endif #ifdef EL2HLT - inscode(d, ds, de, "EL2HLT", EL2HLT, "Level 2 halted"); + add_errcode("EL2HLT", EL2HLT, "Level 2 halted"); #endif #ifdef ENODATA - inscode(d, ds, de, "ENODATA", ENODATA, "No data available"); + add_errcode("ENODATA", ENODATA, "No data available"); #endif #ifdef ENOTBLK - inscode(d, ds, de, "ENOTBLK", ENOTBLK, "Block device required"); + add_errcode("ENOTBLK", ENOTBLK, "Block device required"); #endif #ifdef ENOSYS - inscode(d, ds, de, "ENOSYS", ENOSYS, "Function not implemented"); + add_errcode("ENOSYS", ENOSYS, "Function not implemented"); #endif #ifdef EPIPE - inscode(d, ds, de, "EPIPE", EPIPE, "Broken pipe"); + add_errcode("EPIPE", EPIPE, "Broken pipe"); #endif #ifdef EINVAL - inscode(d, ds, de, "EINVAL", EINVAL, "Invalid argument"); + add_errcode("EINVAL", EINVAL, "Invalid argument"); #else #ifdef WSAEINVAL - inscode(d, ds, de, "EINVAL", WSAEINVAL, "Invalid argument"); + add_errcode("EINVAL", WSAEINVAL, "Invalid argument"); #endif #endif #ifdef EOVERFLOW - inscode(d, ds, de, "EOVERFLOW", EOVERFLOW, "Value too large for defined data type"); + add_errcode("EOVERFLOW", EOVERFLOW, "Value too large for defined data type"); #endif #ifdef EADV - inscode(d, ds, de, "EADV", EADV, "Advertise error"); + add_errcode("EADV", EADV, "Advertise error"); #endif #ifdef EINTR - inscode(d, ds, de, "EINTR", EINTR, "Interrupted system call"); + add_errcode("EINTR", EINTR, "Interrupted system call"); #else #ifdef WSAEINTR - inscode(d, ds, de, "EINTR", WSAEINTR, "Interrupted system call"); + add_errcode("EINTR", WSAEINTR, "Interrupted system call"); #endif #endif #ifdef EUSERS - inscode(d, ds, de, "EUSERS", EUSERS, "Too many users"); + add_errcode("EUSERS", EUSERS, "Too many users"); #else #ifdef WSAEUSERS - inscode(d, ds, de, "EUSERS", WSAEUSERS, "Too many users"); + add_errcode("EUSERS", WSAEUSERS, "Too many users"); #endif #endif #ifdef ENOTEMPTY - inscode(d, ds, de, "ENOTEMPTY", ENOTEMPTY, "Directory not empty"); + add_errcode("ENOTEMPTY", ENOTEMPTY, "Directory not empty"); #else #ifdef WSAENOTEMPTY - inscode(d, ds, de, "ENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); + add_errcode("ENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); #endif #endif #ifdef ENOBUFS - inscode(d, ds, de, "ENOBUFS", ENOBUFS, "No buffer space available"); + add_errcode("ENOBUFS", ENOBUFS, "No buffer space available"); #else #ifdef WSAENOBUFS - inscode(d, ds, de, "ENOBUFS", WSAENOBUFS, "No buffer space available"); + add_errcode("ENOBUFS", WSAENOBUFS, "No buffer space available"); #endif #endif #ifdef EPROTO - inscode(d, ds, de, "EPROTO", EPROTO, "Protocol error"); + add_errcode("EPROTO", EPROTO, "Protocol error"); #endif #ifdef EREMOTE - inscode(d, ds, de, "EREMOTE", EREMOTE, "Object is remote"); + add_errcode("EREMOTE", EREMOTE, "Object is remote"); #else #ifdef WSAEREMOTE - inscode(d, ds, de, "EREMOTE", WSAEREMOTE, "Object is remote"); + add_errcode("EREMOTE", WSAEREMOTE, "Object is remote"); #endif #endif #ifdef ENAVAIL - inscode(d, ds, de, "ENAVAIL", ENAVAIL, "No XENIX semaphores available"); + add_errcode("ENAVAIL", ENAVAIL, "No XENIX semaphores available"); #endif #ifdef ECHILD - inscode(d, ds, de, "ECHILD", ECHILD, "No child processes"); + add_errcode("ECHILD", ECHILD, "No child processes"); #endif #ifdef ELOOP - inscode(d, ds, de, "ELOOP", ELOOP, "Too many symbolic links encountered"); + add_errcode("ELOOP", ELOOP, "Too many symbolic links encountered"); #else #ifdef WSAELOOP - inscode(d, ds, de, "ELOOP", WSAELOOP, "Too many symbolic links encountered"); + add_errcode("ELOOP", WSAELOOP, "Too many symbolic links encountered"); #endif #endif #ifdef EXDEV - inscode(d, ds, de, "EXDEV", EXDEV, "Cross-device link"); + add_errcode("EXDEV", EXDEV, "Cross-device link"); #endif #ifdef E2BIG - inscode(d, ds, de, "E2BIG", E2BIG, "Arg list too long"); + add_errcode("E2BIG", E2BIG, "Arg list too long"); #endif #ifdef ESRCH - inscode(d, ds, de, "ESRCH", ESRCH, "No such process"); + add_errcode("ESRCH", ESRCH, "No such process"); #endif #ifdef EMSGSIZE - inscode(d, ds, de, "EMSGSIZE", EMSGSIZE, "Message too long"); + add_errcode("EMSGSIZE", EMSGSIZE, "Message too long"); #else #ifdef WSAEMSGSIZE - inscode(d, ds, de, "EMSGSIZE", WSAEMSGSIZE, "Message too long"); + add_errcode("EMSGSIZE", WSAEMSGSIZE, "Message too long"); #endif #endif #ifdef EAFNOSUPPORT - inscode(d, ds, de, "EAFNOSUPPORT", EAFNOSUPPORT, "Address family not supported by protocol"); + add_errcode("EAFNOSUPPORT", EAFNOSUPPORT, "Address family not supported by protocol"); #else #ifdef WSAEAFNOSUPPORT - inscode(d, ds, de, "EAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); + add_errcode("EAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); #endif #endif #ifdef EBADR - inscode(d, ds, de, "EBADR", EBADR, "Invalid request descriptor"); + add_errcode("EBADR", EBADR, "Invalid request descriptor"); #endif #ifdef EHOSTDOWN - inscode(d, ds, de, "EHOSTDOWN", EHOSTDOWN, "Host is down"); + add_errcode("EHOSTDOWN", EHOSTDOWN, "Host is down"); #else #ifdef WSAEHOSTDOWN - inscode(d, ds, de, "EHOSTDOWN", WSAEHOSTDOWN, "Host is down"); + add_errcode("EHOSTDOWN", WSAEHOSTDOWN, "Host is down"); #endif #endif #ifdef EPFNOSUPPORT - inscode(d, ds, de, "EPFNOSUPPORT", EPFNOSUPPORT, "Protocol family not supported"); + add_errcode("EPFNOSUPPORT", EPFNOSUPPORT, "Protocol family not supported"); #else #ifdef WSAEPFNOSUPPORT - inscode(d, ds, de, "EPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); + add_errcode("EPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); #endif #endif #ifdef ENOPROTOOPT - inscode(d, ds, de, "ENOPROTOOPT", ENOPROTOOPT, "Protocol not available"); + add_errcode("ENOPROTOOPT", ENOPROTOOPT, "Protocol not available"); #else #ifdef WSAENOPROTOOPT - inscode(d, ds, de, "ENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); + add_errcode("ENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); #endif #endif #ifdef EBUSY - inscode(d, ds, de, "EBUSY", EBUSY, "Device or resource busy"); + add_errcode("EBUSY", EBUSY, "Device or resource busy"); #endif #ifdef EWOULDBLOCK - inscode(d, ds, de, "EWOULDBLOCK", EWOULDBLOCK, "Operation would block"); + add_errcode("EWOULDBLOCK", EWOULDBLOCK, "Operation would block"); #else #ifdef WSAEWOULDBLOCK - inscode(d, ds, de, "EWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); + add_errcode("EWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); #endif #endif #ifdef EBADFD - inscode(d, ds, de, "EBADFD", EBADFD, "File descriptor in bad state"); + add_errcode("EBADFD", EBADFD, "File descriptor in bad state"); #endif #ifdef EDOTDOT - inscode(d, ds, de, "EDOTDOT", EDOTDOT, "RFS specific error"); + add_errcode("EDOTDOT", EDOTDOT, "RFS specific error"); #endif #ifdef EISCONN - inscode(d, ds, de, "EISCONN", EISCONN, "Transport endpoint is already connected"); + add_errcode("EISCONN", EISCONN, "Transport endpoint is already connected"); #else #ifdef WSAEISCONN - inscode(d, ds, de, "EISCONN", WSAEISCONN, "Transport endpoint is already connected"); + add_errcode("EISCONN", WSAEISCONN, "Transport endpoint is already connected"); #endif #endif #ifdef ENOANO - inscode(d, ds, de, "ENOANO", ENOANO, "No anode"); + add_errcode("ENOANO", ENOANO, "No anode"); #endif #ifdef ESHUTDOWN - inscode(d, ds, de, "ESHUTDOWN", ESHUTDOWN, "Cannot send after transport endpoint shutdown"); + add_errcode("ESHUTDOWN", ESHUTDOWN, "Cannot send after transport endpoint shutdown"); #else #ifdef WSAESHUTDOWN - inscode(d, ds, de, "ESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); + add_errcode("ESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); #endif #endif #ifdef ECHRNG - inscode(d, ds, de, "ECHRNG", ECHRNG, "Channel number out of range"); + add_errcode("ECHRNG", ECHRNG, "Channel number out of range"); #endif #ifdef ELIBBAD - inscode(d, ds, de, "ELIBBAD", ELIBBAD, "Accessing a corrupted shared library"); + add_errcode("ELIBBAD", ELIBBAD, "Accessing a corrupted shared library"); #endif #ifdef ENONET - inscode(d, ds, de, "ENONET", ENONET, "Machine is not on the network"); + add_errcode("ENONET", ENONET, "Machine is not on the network"); #endif #ifdef EBADE - inscode(d, ds, de, "EBADE", EBADE, "Invalid exchange"); + add_errcode("EBADE", EBADE, "Invalid exchange"); #endif #ifdef EBADF - inscode(d, ds, de, "EBADF", EBADF, "Bad file number"); + add_errcode("EBADF", EBADF, "Bad file number"); #else #ifdef WSAEBADF - inscode(d, ds, de, "EBADF", WSAEBADF, "Bad file number"); + add_errcode("EBADF", WSAEBADF, "Bad file number"); #endif #endif #ifdef EMULTIHOP - inscode(d, ds, de, "EMULTIHOP", EMULTIHOP, "Multihop attempted"); + add_errcode("EMULTIHOP", EMULTIHOP, "Multihop attempted"); #endif #ifdef EIO - inscode(d, ds, de, "EIO", EIO, "I/O error"); + add_errcode("EIO", EIO, "I/O error"); #endif #ifdef EUNATCH - inscode(d, ds, de, "EUNATCH", EUNATCH, "Protocol driver not attached"); + add_errcode("EUNATCH", EUNATCH, "Protocol driver not attached"); #endif #ifdef EPROTOTYPE - inscode(d, ds, de, "EPROTOTYPE", EPROTOTYPE, "Protocol wrong type for socket"); + add_errcode("EPROTOTYPE", EPROTOTYPE, "Protocol wrong type for socket"); #else #ifdef WSAEPROTOTYPE - inscode(d, ds, de, "EPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); + add_errcode("EPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); #endif #endif #ifdef ENOSPC - inscode(d, ds, de, "ENOSPC", ENOSPC, "No space left on device"); + add_errcode("ENOSPC", ENOSPC, "No space left on device"); #endif #ifdef ENOEXEC - inscode(d, ds, de, "ENOEXEC", ENOEXEC, "Exec format error"); + add_errcode("ENOEXEC", ENOEXEC, "Exec format error"); #endif #ifdef EALREADY - inscode(d, ds, de, "EALREADY", EALREADY, "Operation already in progress"); + add_errcode("EALREADY", EALREADY, "Operation already in progress"); #else #ifdef WSAEALREADY - inscode(d, ds, de, "EALREADY", WSAEALREADY, "Operation already in progress"); + add_errcode("EALREADY", WSAEALREADY, "Operation already in progress"); #endif #endif #ifdef ENETDOWN - inscode(d, ds, de, "ENETDOWN", ENETDOWN, "Network is down"); + add_errcode("ENETDOWN", ENETDOWN, "Network is down"); #else #ifdef WSAENETDOWN - inscode(d, ds, de, "ENETDOWN", WSAENETDOWN, "Network is down"); + add_errcode("ENETDOWN", WSAENETDOWN, "Network is down"); #endif #endif #ifdef ENOTNAM - inscode(d, ds, de, "ENOTNAM", ENOTNAM, "Not a XENIX named type file"); + add_errcode("ENOTNAM", ENOTNAM, "Not a XENIX named type file"); #endif #ifdef EACCES - inscode(d, ds, de, "EACCES", EACCES, "Permission denied"); + add_errcode("EACCES", EACCES, "Permission denied"); #else #ifdef WSAEACCES - inscode(d, ds, de, "EACCES", WSAEACCES, "Permission denied"); + add_errcode("EACCES", WSAEACCES, "Permission denied"); #endif #endif #ifdef ELNRNG - inscode(d, ds, de, "ELNRNG", ELNRNG, "Link number out of range"); + add_errcode("ELNRNG", ELNRNG, "Link number out of range"); #endif #ifdef EILSEQ - inscode(d, ds, de, "EILSEQ", EILSEQ, "Illegal byte sequence"); + add_errcode("EILSEQ", EILSEQ, "Illegal byte sequence"); #endif #ifdef ENOTDIR - inscode(d, ds, de, "ENOTDIR", ENOTDIR, "Not a directory"); + add_errcode("ENOTDIR", ENOTDIR, "Not a directory"); #endif #ifdef ENOTUNIQ - inscode(d, ds, de, "ENOTUNIQ", ENOTUNIQ, "Name not unique on network"); + add_errcode("ENOTUNIQ", ENOTUNIQ, "Name not unique on network"); #endif #ifdef EPERM - inscode(d, ds, de, "EPERM", EPERM, "Operation not permitted"); + add_errcode("EPERM", EPERM, "Operation not permitted"); #endif #ifdef EDOM - inscode(d, ds, de, "EDOM", EDOM, "Math argument out of domain of func"); + add_errcode("EDOM", EDOM, "Math argument out of domain of func"); #endif #ifdef EXFULL - inscode(d, ds, de, "EXFULL", EXFULL, "Exchange full"); + add_errcode("EXFULL", EXFULL, "Exchange full"); #endif #ifdef ECONNREFUSED - inscode(d, ds, de, "ECONNREFUSED", ECONNREFUSED, "Connection refused"); + add_errcode("ECONNREFUSED", ECONNREFUSED, "Connection refused"); #else #ifdef WSAECONNREFUSED - inscode(d, ds, de, "ECONNREFUSED", WSAECONNREFUSED, "Connection refused"); + add_errcode("ECONNREFUSED", WSAECONNREFUSED, "Connection refused"); #endif #endif #ifdef EISDIR - inscode(d, ds, de, "EISDIR", EISDIR, "Is a directory"); + add_errcode("EISDIR", EISDIR, "Is a directory"); #endif #ifdef EPROTONOSUPPORT - inscode(d, ds, de, "EPROTONOSUPPORT", EPROTONOSUPPORT, "Protocol not supported"); + add_errcode("EPROTONOSUPPORT", EPROTONOSUPPORT, "Protocol not supported"); #else #ifdef WSAEPROTONOSUPPORT - inscode(d, ds, de, "EPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); + add_errcode("EPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); #endif #endif #ifdef EROFS - inscode(d, ds, de, "EROFS", EROFS, "Read-only file system"); + add_errcode("EROFS", EROFS, "Read-only file system"); #endif #ifdef EADDRNOTAVAIL - inscode(d, ds, de, "EADDRNOTAVAIL", EADDRNOTAVAIL, "Cannot assign requested address"); + add_errcode("EADDRNOTAVAIL", EADDRNOTAVAIL, "Cannot assign requested address"); #else #ifdef WSAEADDRNOTAVAIL - inscode(d, ds, de, "EADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); + add_errcode("EADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); #endif #endif #ifdef EIDRM - inscode(d, ds, de, "EIDRM", EIDRM, "Identifier removed"); + add_errcode("EIDRM", EIDRM, "Identifier removed"); #endif #ifdef ECOMM - inscode(d, ds, de, "ECOMM", ECOMM, "Communication error on send"); + add_errcode("ECOMM", ECOMM, "Communication error on send"); #endif #ifdef ESRMNT - inscode(d, ds, de, "ESRMNT", ESRMNT, "Srmount error"); + add_errcode("ESRMNT", ESRMNT, "Srmount error"); #endif #ifdef EREMOTEIO - inscode(d, ds, de, "EREMOTEIO", EREMOTEIO, "Remote I/O error"); + add_errcode("EREMOTEIO", EREMOTEIO, "Remote I/O error"); #endif #ifdef EL3RST - inscode(d, ds, de, "EL3RST", EL3RST, "Level 3 reset"); + add_errcode("EL3RST", EL3RST, "Level 3 reset"); #endif #ifdef EBADMSG - inscode(d, ds, de, "EBADMSG", EBADMSG, "Not a data message"); + add_errcode("EBADMSG", EBADMSG, "Not a data message"); #endif #ifdef ENFILE - inscode(d, ds, de, "ENFILE", ENFILE, "File table overflow"); + add_errcode("ENFILE", ENFILE, "File table overflow"); #endif #ifdef ELIBMAX - inscode(d, ds, de, "ELIBMAX", ELIBMAX, "Attempting to link in too many shared libraries"); + add_errcode("ELIBMAX", ELIBMAX, "Attempting to link in too many shared libraries"); #endif #ifdef ESPIPE - inscode(d, ds, de, "ESPIPE", ESPIPE, "Illegal seek"); + add_errcode("ESPIPE", ESPIPE, "Illegal seek"); #endif #ifdef ENOLINK - inscode(d, ds, de, "ENOLINK", ENOLINK, "Link has been severed"); + add_errcode("ENOLINK", ENOLINK, "Link has been severed"); #endif #ifdef ENETRESET - inscode(d, ds, de, "ENETRESET", ENETRESET, "Network dropped connection because of reset"); + add_errcode("ENETRESET", ENETRESET, "Network dropped connection because of reset"); #else #ifdef WSAENETRESET - inscode(d, ds, de, "ENETRESET", WSAENETRESET, "Network dropped connection because of reset"); + add_errcode("ENETRESET", WSAENETRESET, "Network dropped connection because of reset"); #endif #endif #ifdef ETIMEDOUT - inscode(d, ds, de, "ETIMEDOUT", ETIMEDOUT, "Connection timed out"); + add_errcode("ETIMEDOUT", ETIMEDOUT, "Connection timed out"); #else #ifdef WSAETIMEDOUT - inscode(d, ds, de, "ETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); + add_errcode("ETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); #endif #endif #ifdef ENOENT - inscode(d, ds, de, "ENOENT", ENOENT, "No such file or directory"); + add_errcode("ENOENT", ENOENT, "No such file or directory"); #endif #ifdef EEXIST - inscode(d, ds, de, "EEXIST", EEXIST, "File exists"); + add_errcode("EEXIST", EEXIST, "File exists"); #endif #ifdef EDQUOT - inscode(d, ds, de, "EDQUOT", EDQUOT, "Quota exceeded"); + add_errcode("EDQUOT", EDQUOT, "Quota exceeded"); #else #ifdef WSAEDQUOT - inscode(d, ds, de, "EDQUOT", WSAEDQUOT, "Quota exceeded"); + add_errcode("EDQUOT", WSAEDQUOT, "Quota exceeded"); #endif #endif #ifdef ENOSTR - inscode(d, ds, de, "ENOSTR", ENOSTR, "Device not a stream"); + add_errcode("ENOSTR", ENOSTR, "Device not a stream"); #endif #ifdef EBADSLT - inscode(d, ds, de, "EBADSLT", EBADSLT, "Invalid slot"); + add_errcode("EBADSLT", EBADSLT, "Invalid slot"); #endif #ifdef EBADRQC - inscode(d, ds, de, "EBADRQC", EBADRQC, "Invalid request code"); + add_errcode("EBADRQC", EBADRQC, "Invalid request code"); #endif #ifdef ELIBACC - inscode(d, ds, de, "ELIBACC", ELIBACC, "Can not access a needed shared library"); + add_errcode("ELIBACC", ELIBACC, "Can not access a needed shared library"); #endif #ifdef EFAULT - inscode(d, ds, de, "EFAULT", EFAULT, "Bad address"); + add_errcode("EFAULT", EFAULT, "Bad address"); #else #ifdef WSAEFAULT - inscode(d, ds, de, "EFAULT", WSAEFAULT, "Bad address"); + add_errcode("EFAULT", WSAEFAULT, "Bad address"); #endif #endif #ifdef EFBIG - inscode(d, ds, de, "EFBIG", EFBIG, "File too large"); + add_errcode("EFBIG", EFBIG, "File too large"); #endif #ifdef EDEADLK - inscode(d, ds, de, "EDEADLK", EDEADLK, "Resource deadlock would occur"); + add_errcode("EDEADLK", EDEADLK, "Resource deadlock would occur"); #endif #ifdef ENOTCONN - inscode(d, ds, de, "ENOTCONN", ENOTCONN, "Transport endpoint is not connected"); + add_errcode("ENOTCONN", ENOTCONN, "Transport endpoint is not connected"); #else #ifdef WSAENOTCONN - inscode(d, ds, de, "ENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); + add_errcode("ENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); #endif #endif #ifdef EDESTADDRREQ - inscode(d, ds, de, "EDESTADDRREQ", EDESTADDRREQ, "Destination address required"); + add_errcode("EDESTADDRREQ", EDESTADDRREQ, "Destination address required"); #else #ifdef WSAEDESTADDRREQ - inscode(d, ds, de, "EDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); + add_errcode("EDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); #endif #endif #ifdef ELIBSCN - inscode(d, ds, de, "ELIBSCN", ELIBSCN, ".lib section in a.out corrupted"); + add_errcode("ELIBSCN", ELIBSCN, ".lib section in a.out corrupted"); #endif #ifdef ENOLCK - inscode(d, ds, de, "ENOLCK", ENOLCK, "No record locks available"); + add_errcode("ENOLCK", ENOLCK, "No record locks available"); #endif #ifdef EISNAM - inscode(d, ds, de, "EISNAM", EISNAM, "Is a named type file"); + add_errcode("EISNAM", EISNAM, "Is a named type file"); #endif #ifdef ECONNABORTED - inscode(d, ds, de, "ECONNABORTED", ECONNABORTED, "Software caused connection abort"); + add_errcode("ECONNABORTED", ECONNABORTED, "Software caused connection abort"); #else #ifdef WSAECONNABORTED - inscode(d, ds, de, "ECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); + add_errcode("ECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); #endif #endif #ifdef ENETUNREACH - inscode(d, ds, de, "ENETUNREACH", ENETUNREACH, "Network is unreachable"); + add_errcode("ENETUNREACH", ENETUNREACH, "Network is unreachable"); #else #ifdef WSAENETUNREACH - inscode(d, ds, de, "ENETUNREACH", WSAENETUNREACH, "Network is unreachable"); + add_errcode("ENETUNREACH", WSAENETUNREACH, "Network is unreachable"); #endif #endif #ifdef ESTALE - inscode(d, ds, de, "ESTALE", ESTALE, "Stale NFS file handle"); + add_errcode("ESTALE", ESTALE, "Stale NFS file handle"); #else #ifdef WSAESTALE - inscode(d, ds, de, "ESTALE", WSAESTALE, "Stale NFS file handle"); + add_errcode("ESTALE", WSAESTALE, "Stale NFS file handle"); #endif #endif #ifdef ENOSR - inscode(d, ds, de, "ENOSR", ENOSR, "Out of streams resources"); + add_errcode("ENOSR", ENOSR, "Out of streams resources"); #endif #ifdef ENOMEM - inscode(d, ds, de, "ENOMEM", ENOMEM, "Out of memory"); + add_errcode("ENOMEM", ENOMEM, "Out of memory"); #endif #ifdef ENOTSOCK - inscode(d, ds, de, "ENOTSOCK", ENOTSOCK, "Socket operation on non-socket"); + add_errcode("ENOTSOCK", ENOTSOCK, "Socket operation on non-socket"); #else #ifdef WSAENOTSOCK - inscode(d, ds, de, "ENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); + add_errcode("ENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); #endif #endif #ifdef ESTRPIPE - inscode(d, ds, de, "ESTRPIPE", ESTRPIPE, "Streams pipe error"); + add_errcode("ESTRPIPE", ESTRPIPE, "Streams pipe error"); #endif #ifdef EMLINK - inscode(d, ds, de, "EMLINK", EMLINK, "Too many links"); + add_errcode("EMLINK", EMLINK, "Too many links"); #endif #ifdef ERANGE - inscode(d, ds, de, "ERANGE", ERANGE, "Math result not representable"); + add_errcode("ERANGE", ERANGE, "Math result not representable"); #endif #ifdef ELIBEXEC - inscode(d, ds, de, "ELIBEXEC", ELIBEXEC, "Cannot exec a shared library directly"); + add_errcode("ELIBEXEC", ELIBEXEC, "Cannot exec a shared library directly"); #endif #ifdef EL3HLT - inscode(d, ds, de, "EL3HLT", EL3HLT, "Level 3 halted"); + add_errcode("EL3HLT", EL3HLT, "Level 3 halted"); #endif #ifdef ECONNRESET - inscode(d, ds, de, "ECONNRESET", ECONNRESET, "Connection reset by peer"); + add_errcode("ECONNRESET", ECONNRESET, "Connection reset by peer"); #else #ifdef WSAECONNRESET - inscode(d, ds, de, "ECONNRESET", WSAECONNRESET, "Connection reset by peer"); + add_errcode("ECONNRESET", WSAECONNRESET, "Connection reset by peer"); #endif #endif #ifdef EADDRINUSE - inscode(d, ds, de, "EADDRINUSE", EADDRINUSE, "Address already in use"); + add_errcode("EADDRINUSE", EADDRINUSE, "Address already in use"); #else #ifdef WSAEADDRINUSE - inscode(d, ds, de, "EADDRINUSE", WSAEADDRINUSE, "Address already in use"); + add_errcode("EADDRINUSE", WSAEADDRINUSE, "Address already in use"); #endif #endif #ifdef EOPNOTSUPP - inscode(d, ds, de, "EOPNOTSUPP", EOPNOTSUPP, "Operation not supported on transport endpoint"); + add_errcode("EOPNOTSUPP", EOPNOTSUPP, "Operation not supported on transport endpoint"); #else #ifdef WSAEOPNOTSUPP - inscode(d, ds, de, "EOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); + add_errcode("EOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); #endif #endif #ifdef EREMCHG - inscode(d, ds, de, "EREMCHG", EREMCHG, "Remote address changed"); + add_errcode("EREMCHG", EREMCHG, "Remote address changed"); #endif #ifdef EAGAIN - inscode(d, ds, de, "EAGAIN", EAGAIN, "Try again"); + add_errcode("EAGAIN", EAGAIN, "Try again"); #endif #ifdef ENAMETOOLONG - inscode(d, ds, de, "ENAMETOOLONG", ENAMETOOLONG, "File name too long"); + add_errcode("ENAMETOOLONG", ENAMETOOLONG, "File name too long"); #else #ifdef WSAENAMETOOLONG - inscode(d, ds, de, "ENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); + add_errcode("ENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); #endif #endif #ifdef ENOTTY - inscode(d, ds, de, "ENOTTY", ENOTTY, "Not a typewriter"); + add_errcode("ENOTTY", ENOTTY, "Not a typewriter"); #endif #ifdef ERESTART - inscode(d, ds, de, "ERESTART", ERESTART, "Interrupted system call should be restarted"); + add_errcode("ERESTART", ERESTART, "Interrupted system call should be restarted"); #endif #ifdef ESOCKTNOSUPPORT - inscode(d, ds, de, "ESOCKTNOSUPPORT", ESOCKTNOSUPPORT, "Socket type not supported"); + add_errcode("ESOCKTNOSUPPORT", ESOCKTNOSUPPORT, "Socket type not supported"); #else #ifdef WSAESOCKTNOSUPPORT - inscode(d, ds, de, "ESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); + add_errcode("ESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); #endif #endif #ifdef ETIME - inscode(d, ds, de, "ETIME", ETIME, "Timer expired"); + add_errcode("ETIME", ETIME, "Timer expired"); #endif #ifdef EBFONT - inscode(d, ds, de, "EBFONT", EBFONT, "Bad font file format"); + add_errcode("EBFONT", EBFONT, "Bad font file format"); #endif #ifdef EDEADLOCK - inscode(d, ds, de, "EDEADLOCK", EDEADLOCK, "Error EDEADLOCK"); + add_errcode("EDEADLOCK", EDEADLOCK, "Error EDEADLOCK"); #endif #ifdef ETOOMANYREFS - inscode(d, ds, de, "ETOOMANYREFS", ETOOMANYREFS, "Too many references: cannot splice"); + add_errcode("ETOOMANYREFS", ETOOMANYREFS, "Too many references: cannot splice"); #else #ifdef WSAETOOMANYREFS - inscode(d, ds, de, "ETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); + add_errcode("ETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); #endif #endif #ifdef EMFILE - inscode(d, ds, de, "EMFILE", EMFILE, "Too many open files"); + add_errcode("EMFILE", EMFILE, "Too many open files"); #else #ifdef WSAEMFILE - inscode(d, ds, de, "EMFILE", WSAEMFILE, "Too many open files"); + add_errcode("EMFILE", WSAEMFILE, "Too many open files"); #endif #endif #ifdef ETXTBSY - inscode(d, ds, de, "ETXTBSY", ETXTBSY, "Text file busy"); + add_errcode("ETXTBSY", ETXTBSY, "Text file busy"); #endif #ifdef EINPROGRESS - inscode(d, ds, de, "EINPROGRESS", EINPROGRESS, "Operation now in progress"); + add_errcode("EINPROGRESS", EINPROGRESS, "Operation now in progress"); #else #ifdef WSAEINPROGRESS - inscode(d, ds, de, "EINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); + add_errcode("EINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); #endif #endif #ifdef ENXIO - inscode(d, ds, de, "ENXIO", ENXIO, "No such device or address"); + add_errcode("ENXIO", ENXIO, "No such device or address"); #endif #ifdef ENOPKG - inscode(d, ds, de, "ENOPKG", ENOPKG, "Package not installed"); + add_errcode("ENOPKG", ENOPKG, "Package not installed"); #endif #ifdef WSASY - inscode(d, ds, de, "WSASY", WSASY, "Error WSASY"); + add_errcode("WSASY", WSASY, "Error WSASY"); #endif #ifdef WSAEHOSTDOWN - inscode(d, ds, de, "WSAEHOSTDOWN", WSAEHOSTDOWN, "Host is down"); + add_errcode("WSAEHOSTDOWN", WSAEHOSTDOWN, "Host is down"); #endif #ifdef WSAENETDOWN - inscode(d, ds, de, "WSAENETDOWN", WSAENETDOWN, "Network is down"); + add_errcode("WSAENETDOWN", WSAENETDOWN, "Network is down"); #endif #ifdef WSAENOTSOCK - inscode(d, ds, de, "WSAENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); + add_errcode("WSAENOTSOCK", WSAENOTSOCK, "Socket operation on non-socket"); #endif #ifdef WSAEHOSTUNREACH - inscode(d, ds, de, "WSAEHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); + add_errcode("WSAEHOSTUNREACH", WSAEHOSTUNREACH, "No route to host"); #endif #ifdef WSAELOOP - inscode(d, ds, de, "WSAELOOP", WSAELOOP, "Too many symbolic links encountered"); + add_errcode("WSAELOOP", WSAELOOP, "Too many symbolic links encountered"); #endif #ifdef WSAEMFILE - inscode(d, ds, de, "WSAEMFILE", WSAEMFILE, "Too many open files"); + add_errcode("WSAEMFILE", WSAEMFILE, "Too many open files"); #endif #ifdef WSAESTALE - inscode(d, ds, de, "WSAESTALE", WSAESTALE, "Stale NFS file handle"); + add_errcode("WSAESTALE", WSAESTALE, "Stale NFS file handle"); #endif #ifdef WSAVERNOTSUPPORTED - inscode(d, ds, de, "WSAVERNOTSUPPORTED", WSAVERNOTSUPPORTED, "Error WSAVERNOTSUPPORTED"); + add_errcode("WSAVERNOTSUPPORTED", WSAVERNOTSUPPORTED, "Error WSAVERNOTSUPPORTED"); #endif #ifdef WSAENETUNREACH - inscode(d, ds, de, "WSAENETUNREACH", WSAENETUNREACH, "Network is unreachable"); + add_errcode("WSAENETUNREACH", WSAENETUNREACH, "Network is unreachable"); #endif #ifdef WSAEPROCLIM - inscode(d, ds, de, "WSAEPROCLIM", WSAEPROCLIM, "Error WSAEPROCLIM"); + add_errcode("WSAEPROCLIM", WSAEPROCLIM, "Error WSAEPROCLIM"); #endif #ifdef WSAEFAULT - inscode(d, ds, de, "WSAEFAULT", WSAEFAULT, "Bad address"); + add_errcode("WSAEFAULT", WSAEFAULT, "Bad address"); #endif #ifdef WSANOTINITIALISED - inscode(d, ds, de, "WSANOTINITIALISED", WSANOTINITIALISED, "Error WSANOTINITIALISED"); + add_errcode("WSANOTINITIALISED", WSANOTINITIALISED, "Error WSANOTINITIALISED"); #endif #ifdef WSAEUSERS - inscode(d, ds, de, "WSAEUSERS", WSAEUSERS, "Too many users"); + add_errcode("WSAEUSERS", WSAEUSERS, "Too many users"); #endif #ifdef WSAMAKEASYNCREPL - inscode(d, ds, de, "WSAMAKEASYNCREPL", WSAMAKEASYNCREPL, "Error WSAMAKEASYNCREPL"); + add_errcode("WSAMAKEASYNCREPL", WSAMAKEASYNCREPL, "Error WSAMAKEASYNCREPL"); #endif #ifdef WSAENOPROTOOPT - inscode(d, ds, de, "WSAENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); + add_errcode("WSAENOPROTOOPT", WSAENOPROTOOPT, "Protocol not available"); #endif #ifdef WSAECONNABORTED - inscode(d, ds, de, "WSAECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); + add_errcode("WSAECONNABORTED", WSAECONNABORTED, "Software caused connection abort"); #endif #ifdef WSAENAMETOOLONG - inscode(d, ds, de, "WSAENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); + add_errcode("WSAENAMETOOLONG", WSAENAMETOOLONG, "File name too long"); #endif #ifdef WSAENOTEMPTY - inscode(d, ds, de, "WSAENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); + add_errcode("WSAENOTEMPTY", WSAENOTEMPTY, "Directory not empty"); #endif #ifdef WSAESHUTDOWN - inscode(d, ds, de, "WSAESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); + add_errcode("WSAESHUTDOWN", WSAESHUTDOWN, "Cannot send after transport endpoint shutdown"); #endif #ifdef WSAEAFNOSUPPORT - inscode(d, ds, de, "WSAEAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); + add_errcode("WSAEAFNOSUPPORT", WSAEAFNOSUPPORT, "Address family not supported by protocol"); #endif #ifdef WSAETOOMANYREFS - inscode(d, ds, de, "WSAETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); + add_errcode("WSAETOOMANYREFS", WSAETOOMANYREFS, "Too many references: cannot splice"); #endif #ifdef WSAEACCES - inscode(d, ds, de, "WSAEACCES", WSAEACCES, "Permission denied"); + add_errcode("WSAEACCES", WSAEACCES, "Permission denied"); #endif #ifdef WSATR - inscode(d, ds, de, "WSATR", WSATR, "Error WSATR"); + add_errcode("WSATR", WSATR, "Error WSATR"); #endif #ifdef WSABASEERR - inscode(d, ds, de, "WSABASEERR", WSABASEERR, "Error WSABASEERR"); + add_errcode("WSABASEERR", WSABASEERR, "Error WSABASEERR"); #endif #ifdef WSADESCRIPTIO - inscode(d, ds, de, "WSADESCRIPTIO", WSADESCRIPTIO, "Error WSADESCRIPTIO"); + add_errcode("WSADESCRIPTIO", WSADESCRIPTIO, "Error WSADESCRIPTIO"); #endif #ifdef WSAEMSGSIZE - inscode(d, ds, de, "WSAEMSGSIZE", WSAEMSGSIZE, "Message too long"); + add_errcode("WSAEMSGSIZE", WSAEMSGSIZE, "Message too long"); #endif #ifdef WSAEBADF - inscode(d, ds, de, "WSAEBADF", WSAEBADF, "Bad file number"); + add_errcode("WSAEBADF", WSAEBADF, "Bad file number"); #endif #ifdef WSAECONNRESET - inscode(d, ds, de, "WSAECONNRESET", WSAECONNRESET, "Connection reset by peer"); + add_errcode("WSAECONNRESET", WSAECONNRESET, "Connection reset by peer"); #endif #ifdef WSAGETSELECTERRO - inscode(d, ds, de, "WSAGETSELECTERRO", WSAGETSELECTERRO, "Error WSAGETSELECTERRO"); + add_errcode("WSAGETSELECTERRO", WSAGETSELECTERRO, "Error WSAGETSELECTERRO"); #endif #ifdef WSAETIMEDOUT - inscode(d, ds, de, "WSAETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); + add_errcode("WSAETIMEDOUT", WSAETIMEDOUT, "Connection timed out"); #endif #ifdef WSAENOBUFS - inscode(d, ds, de, "WSAENOBUFS", WSAENOBUFS, "No buffer space available"); + add_errcode("WSAENOBUFS", WSAENOBUFS, "No buffer space available"); #endif #ifdef WSAEDISCON - inscode(d, ds, de, "WSAEDISCON", WSAEDISCON, "Error WSAEDISCON"); + add_errcode("WSAEDISCON", WSAEDISCON, "Error WSAEDISCON"); #endif #ifdef WSAEINTR - inscode(d, ds, de, "WSAEINTR", WSAEINTR, "Interrupted system call"); + add_errcode("WSAEINTR", WSAEINTR, "Interrupted system call"); #endif #ifdef WSAEPROTOTYPE - inscode(d, ds, de, "WSAEPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); + add_errcode("WSAEPROTOTYPE", WSAEPROTOTYPE, "Protocol wrong type for socket"); #endif #ifdef WSAHOS - inscode(d, ds, de, "WSAHOS", WSAHOS, "Error WSAHOS"); + add_errcode("WSAHOS", WSAHOS, "Error WSAHOS"); #endif #ifdef WSAEADDRINUSE - inscode(d, ds, de, "WSAEADDRINUSE", WSAEADDRINUSE, "Address already in use"); + add_errcode("WSAEADDRINUSE", WSAEADDRINUSE, "Address already in use"); #endif #ifdef WSAEADDRNOTAVAIL - inscode(d, ds, de, "WSAEADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); + add_errcode("WSAEADDRNOTAVAIL", WSAEADDRNOTAVAIL, "Cannot assign requested address"); #endif #ifdef WSAEALREADY - inscode(d, ds, de, "WSAEALREADY", WSAEALREADY, "Operation already in progress"); + add_errcode("WSAEALREADY", WSAEALREADY, "Operation already in progress"); #endif #ifdef WSAEPROTONOSUPPORT - inscode(d, ds, de, "WSAEPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); + add_errcode("WSAEPROTONOSUPPORT", WSAEPROTONOSUPPORT, "Protocol not supported"); #endif #ifdef WSASYSNOTREADY - inscode(d, ds, de, "WSASYSNOTREADY", WSASYSNOTREADY, "Error WSASYSNOTREADY"); + add_errcode("WSASYSNOTREADY", WSASYSNOTREADY, "Error WSASYSNOTREADY"); #endif #ifdef WSAEWOULDBLOCK - inscode(d, ds, de, "WSAEWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); + add_errcode("WSAEWOULDBLOCK", WSAEWOULDBLOCK, "Operation would block"); #endif #ifdef WSAEPFNOSUPPORT - inscode(d, ds, de, "WSAEPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); + add_errcode("WSAEPFNOSUPPORT", WSAEPFNOSUPPORT, "Protocol family not supported"); #endif #ifdef WSAEOPNOTSUPP - inscode(d, ds, de, "WSAEOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); + add_errcode("WSAEOPNOTSUPP", WSAEOPNOTSUPP, "Operation not supported on transport endpoint"); #endif #ifdef WSAEISCONN - inscode(d, ds, de, "WSAEISCONN", WSAEISCONN, "Transport endpoint is already connected"); + add_errcode("WSAEISCONN", WSAEISCONN, "Transport endpoint is already connected"); #endif #ifdef WSAEDQUOT - inscode(d, ds, de, "WSAEDQUOT", WSAEDQUOT, "Quota exceeded"); + add_errcode("WSAEDQUOT", WSAEDQUOT, "Quota exceeded"); #endif #ifdef WSAENOTCONN - inscode(d, ds, de, "WSAENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); + add_errcode("WSAENOTCONN", WSAENOTCONN, "Transport endpoint is not connected"); #endif #ifdef WSAEREMOTE - inscode(d, ds, de, "WSAEREMOTE", WSAEREMOTE, "Object is remote"); + add_errcode("WSAEREMOTE", WSAEREMOTE, "Object is remote"); #endif #ifdef WSAEINVAL - inscode(d, ds, de, "WSAEINVAL", WSAEINVAL, "Invalid argument"); + add_errcode("WSAEINVAL", WSAEINVAL, "Invalid argument"); #endif #ifdef WSAEINPROGRESS - inscode(d, ds, de, "WSAEINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); + add_errcode("WSAEINPROGRESS", WSAEINPROGRESS, "Operation now in progress"); #endif #ifdef WSAGETSELECTEVEN - inscode(d, ds, de, "WSAGETSELECTEVEN", WSAGETSELECTEVEN, "Error WSAGETSELECTEVEN"); + add_errcode("WSAGETSELECTEVEN", WSAGETSELECTEVEN, "Error WSAGETSELECTEVEN"); #endif #ifdef WSAESOCKTNOSUPPORT - inscode(d, ds, de, "WSAESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); + add_errcode("WSAESOCKTNOSUPPORT", WSAESOCKTNOSUPPORT, "Socket type not supported"); #endif #ifdef WSAGETASYNCERRO - inscode(d, ds, de, "WSAGETASYNCERRO", WSAGETASYNCERRO, "Error WSAGETASYNCERRO"); + add_errcode("WSAGETASYNCERRO", WSAGETASYNCERRO, "Error WSAGETASYNCERRO"); #endif #ifdef WSAMAKESELECTREPL - inscode(d, ds, de, "WSAMAKESELECTREPL", WSAMAKESELECTREPL, "Error WSAMAKESELECTREPL"); + add_errcode("WSAMAKESELECTREPL", WSAMAKESELECTREPL, "Error WSAMAKESELECTREPL"); #endif #ifdef WSAGETASYNCBUFLE - inscode(d, ds, de, "WSAGETASYNCBUFLE", WSAGETASYNCBUFLE, "Error WSAGETASYNCBUFLE"); + add_errcode("WSAGETASYNCBUFLE", WSAGETASYNCBUFLE, "Error WSAGETASYNCBUFLE"); #endif #ifdef WSAEDESTADDRREQ - inscode(d, ds, de, "WSAEDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); + add_errcode("WSAEDESTADDRREQ", WSAEDESTADDRREQ, "Destination address required"); #endif #ifdef WSAECONNREFUSED - inscode(d, ds, de, "WSAECONNREFUSED", WSAECONNREFUSED, "Connection refused"); + add_errcode("WSAECONNREFUSED", WSAECONNREFUSED, "Connection refused"); #endif #ifdef WSAENETRESET - inscode(d, ds, de, "WSAENETRESET", WSAENETRESET, "Network dropped connection because of reset"); + add_errcode("WSAENETRESET", WSAENETRESET, "Network dropped connection because of reset"); #endif #ifdef WSAN - inscode(d, ds, de, "WSAN", WSAN, "Error WSAN"); + add_errcode("WSAN", WSAN, "Error WSAN"); #endif #ifdef ENOMEDIUM - inscode(d, ds, de, "ENOMEDIUM", ENOMEDIUM, "No medium found"); + add_errcode("ENOMEDIUM", ENOMEDIUM, "No medium found"); #endif #ifdef EMEDIUMTYPE - inscode(d, ds, de, "EMEDIUMTYPE", EMEDIUMTYPE, "Wrong medium type"); + add_errcode("EMEDIUMTYPE", EMEDIUMTYPE, "Wrong medium type"); #endif #ifdef ECANCELED - inscode(d, ds, de, "ECANCELED", ECANCELED, "Operation Canceled"); + add_errcode("ECANCELED", ECANCELED, "Operation Canceled"); #endif #ifdef ENOKEY - inscode(d, ds, de, "ENOKEY", ENOKEY, "Required key not available"); + add_errcode("ENOKEY", ENOKEY, "Required key not available"); #endif #ifdef EKEYEXPIRED - inscode(d, ds, de, "EKEYEXPIRED", EKEYEXPIRED, "Key has expired"); + add_errcode("EKEYEXPIRED", EKEYEXPIRED, "Key has expired"); #endif #ifdef EKEYREVOKED - inscode(d, ds, de, "EKEYREVOKED", EKEYREVOKED, "Key has been revoked"); + add_errcode("EKEYREVOKED", EKEYREVOKED, "Key has been revoked"); #endif #ifdef EKEYREJECTED - inscode(d, ds, de, "EKEYREJECTED", EKEYREJECTED, "Key was rejected by service"); + add_errcode("EKEYREJECTED", EKEYREJECTED, "Key was rejected by service"); #endif #ifdef EOWNERDEAD - inscode(d, ds, de, "EOWNERDEAD", EOWNERDEAD, "Owner died"); + add_errcode("EOWNERDEAD", EOWNERDEAD, "Owner died"); #endif #ifdef ENOTRECOVERABLE - inscode(d, ds, de, "ENOTRECOVERABLE", ENOTRECOVERABLE, "State not recoverable"); + add_errcode("ENOTRECOVERABLE", ENOTRECOVERABLE, "State not recoverable"); #endif #ifdef ERFKILL - inscode(d, ds, de, "ERFKILL", ERFKILL, "Operation not possible due to RF-kill"); + add_errcode("ERFKILL", ERFKILL, "Operation not possible due to RF-kill"); #endif /* Solaris-specific errnos */ #ifdef ECANCELED - inscode(d, ds, de, "ECANCELED", ECANCELED, "Operation canceled"); + add_errcode("ECANCELED", ECANCELED, "Operation canceled"); #endif #ifdef ENOTSUP - inscode(d, ds, de, "ENOTSUP", ENOTSUP, "Operation not supported"); + add_errcode("ENOTSUP", ENOTSUP, "Operation not supported"); #endif #ifdef EOWNERDEAD - inscode(d, ds, de, "EOWNERDEAD", EOWNERDEAD, "Process died with the lock"); + add_errcode("EOWNERDEAD", EOWNERDEAD, "Process died with the lock"); #endif #ifdef ENOTRECOVERABLE - inscode(d, ds, de, "ENOTRECOVERABLE", ENOTRECOVERABLE, "Lock is not recoverable"); + add_errcode("ENOTRECOVERABLE", ENOTRECOVERABLE, "Lock is not recoverable"); #endif #ifdef ELOCKUNMAPPED - inscode(d, ds, de, "ELOCKUNMAPPED", ELOCKUNMAPPED, "Locked lock was unmapped"); + add_errcode("ELOCKUNMAPPED", ELOCKUNMAPPED, "Locked lock was unmapped"); #endif #ifdef ENOTACTIVE - inscode(d, ds, de, "ENOTACTIVE", ENOTACTIVE, "Facility is not active"); + add_errcode("ENOTACTIVE", ENOTACTIVE, "Facility is not active"); #endif /* MacOSX specific errnos */ #ifdef EAUTH - inscode(d, ds, de, "EAUTH", EAUTH, "Authentication error"); + add_errcode("EAUTH", EAUTH, "Authentication error"); #endif #ifdef EBADARCH - inscode(d, ds, de, "EBADARCH", EBADARCH, "Bad CPU type in executable"); + add_errcode("EBADARCH", EBADARCH, "Bad CPU type in executable"); #endif #ifdef EBADEXEC - inscode(d, ds, de, "EBADEXEC", EBADEXEC, "Bad executable (or shared library)"); + add_errcode("EBADEXEC", EBADEXEC, "Bad executable (or shared library)"); #endif #ifdef EBADMACHO - inscode(d, ds, de, "EBADMACHO", EBADMACHO, "Malformed Mach-o file"); + add_errcode("EBADMACHO", EBADMACHO, "Malformed Mach-o file"); #endif #ifdef EBADRPC - inscode(d, ds, de, "EBADRPC", EBADRPC, "RPC struct is bad"); + add_errcode("EBADRPC", EBADRPC, "RPC struct is bad"); #endif #ifdef EDEVERR - inscode(d, ds, de, "EDEVERR", EDEVERR, "Device error"); + add_errcode("EDEVERR", EDEVERR, "Device error"); #endif #ifdef EFTYPE - inscode(d, ds, de, "EFTYPE", EFTYPE, "Inappropriate file type or format"); + add_errcode("EFTYPE", EFTYPE, "Inappropriate file type or format"); #endif #ifdef ENEEDAUTH - inscode(d, ds, de, "ENEEDAUTH", ENEEDAUTH, "Need authenticator"); + add_errcode("ENEEDAUTH", ENEEDAUTH, "Need authenticator"); #endif #ifdef ENOATTR - inscode(d, ds, de, "ENOATTR", ENOATTR, "Attribute not found"); + add_errcode("ENOATTR", ENOATTR, "Attribute not found"); #endif #ifdef ENOPOLICY - inscode(d, ds, de, "ENOPOLICY", ENOPOLICY, "Policy not found"); + add_errcode("ENOPOLICY", ENOPOLICY, "Policy not found"); #endif #ifdef EPROCLIM - inscode(d, ds, de, "EPROCLIM", EPROCLIM, "Too many processes"); + add_errcode("EPROCLIM", EPROCLIM, "Too many processes"); #endif #ifdef EPROCUNAVAIL - inscode(d, ds, de, "EPROCUNAVAIL", EPROCUNAVAIL, "Bad procedure for program"); + add_errcode("EPROCUNAVAIL", EPROCUNAVAIL, "Bad procedure for program"); #endif #ifdef EPROGMISMATCH - inscode(d, ds, de, "EPROGMISMATCH", EPROGMISMATCH, "Program version wrong"); + add_errcode("EPROGMISMATCH", EPROGMISMATCH, "Program version wrong"); #endif #ifdef EPROGUNAVAIL - inscode(d, ds, de, "EPROGUNAVAIL", EPROGUNAVAIL, "RPC prog. not avail"); + add_errcode("EPROGUNAVAIL", EPROGUNAVAIL, "RPC prog. not avail"); #endif #ifdef EPWROFF - inscode(d, ds, de, "EPWROFF", EPWROFF, "Device power is off"); + add_errcode("EPWROFF", EPWROFF, "Device power is off"); #endif #ifdef ERPCMISMATCH - inscode(d, ds, de, "ERPCMISMATCH", ERPCMISMATCH, "RPC version wrong"); + add_errcode("ERPCMISMATCH", ERPCMISMATCH, "RPC version wrong"); #endif #ifdef ESHLIBVERS - inscode(d, ds, de, "ESHLIBVERS", ESHLIBVERS, "Shared library version mismatch"); + add_errcode("ESHLIBVERS", ESHLIBVERS, "Shared library version mismatch"); #endif - Py_DECREF(de); - return m; + Py_DECREF(error_dict); + return 0; +} + +static PyModuleDef_Slot errno_slots[] = { + {Py_mod_exec, errno_exec}, + {0, NULL} +}; + +PyDoc_STRVAR(errno__doc__, +"This module makes available standard errno system symbols.\n\ +\n\ +The value of each symbol is the corresponding integer value,\n\ +e.g., on most systems, errno.ENOENT equals the integer 2.\n\ +\n\ +The dictionary errno.errorcode maps numeric codes to symbol names,\n\ +e.g., errno.errorcode[2] could be the string 'ENOENT'.\n\ +\n\ +Symbols that are not relevant to the underlying system are not defined.\n\ +\n\ +To map error codes to error messages, use the function os.strerror(),\n\ +e.g. os.strerror(2) could return 'No such file or directory'."); + +static struct PyModuleDef errnomodule = { + PyModuleDef_HEAD_INIT, + .m_name = "errno", + .m_doc = errno__doc__, + .m_size = 0, + .m_methods = errno_methods, + .m_slots = errno_slots, +}; + +PyMODINIT_FUNC +PyInit_errno(void) +{ + return PyModuleDef_Init(&errnomodule); } diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 56dcb101e0005e..a44752b1cc4da7 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1181,6 +1181,14 @@ collect(PyThreadState *tstate, int generation, _PyTime_t t1 = 0; /* initialize to prevent a compiler warning */ GCState *gcstate = &tstate->interp->gc; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + if (tstate->interp->config._isolated_interpreter) { + // bpo-40533: The garbage collector must not be run on parallel on + // Python objects shared by multiple interpreters. + return 0; + } +#endif + if (gcstate->debug & DEBUG_STATS) { PySys_WriteStderr("gc: collecting generation %d...\n", generation); show_stats_each_generations(gcstate); diff --git a/Modules/hashtable.c b/Modules/hashtable.c deleted file mode 100644 index 4a36a1e71cdd05..00000000000000 --- a/Modules/hashtable.c +++ /dev/null @@ -1,524 +0,0 @@ -/* The implementation of the hash table (_Py_hashtable_t) is based on the - cfuhash project: - http://sourceforge.net/projects/libcfu/ - - Copyright of cfuhash: - ---------------------------------- - Creation date: 2005-06-24 21:22:40 - Authors: Don - Change log: - - Copyright (c) 2005 Don Owens - All rights reserved. - - This code is released under the BSD license: - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of the author nor the names of its - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES - (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) - HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, - STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED - OF THE POSSIBILITY OF SUCH DAMAGE. - ---------------------------------- -*/ - -#include "Python.h" -#include "hashtable.h" - -#define HASHTABLE_MIN_SIZE 16 -#define HASHTABLE_HIGH 0.50 -#define HASHTABLE_LOW 0.10 -#define HASHTABLE_REHASH_FACTOR 2.0 / (HASHTABLE_LOW + HASHTABLE_HIGH) - -#define BUCKETS_HEAD(SLIST) \ - ((_Py_hashtable_entry_t *)_Py_SLIST_HEAD(&(SLIST))) -#define TABLE_HEAD(HT, BUCKET) \ - ((_Py_hashtable_entry_t *)_Py_SLIST_HEAD(&(HT)->buckets[BUCKET])) -#define ENTRY_NEXT(ENTRY) \ - ((_Py_hashtable_entry_t *)_Py_SLIST_ITEM_NEXT(ENTRY)) -#define HASHTABLE_ITEM_SIZE(HT) \ - (sizeof(_Py_hashtable_entry_t) + (HT)->key_size + (HT)->data_size) - -#define ENTRY_READ_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ - do { \ - assert((DATA_SIZE) == (TABLE)->data_size); \ - memcpy((PDATA), _Py_HASHTABLE_ENTRY_PDATA(TABLE, (ENTRY)), \ - (DATA_SIZE)); \ - } while (0) - -#define ENTRY_WRITE_PDATA(TABLE, ENTRY, DATA_SIZE, PDATA) \ - do { \ - assert((DATA_SIZE) == (TABLE)->data_size); \ - memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA((TABLE), (ENTRY)), \ - (PDATA), (DATA_SIZE)); \ - } while (0) - -/* Forward declaration */ -static void hashtable_rehash(_Py_hashtable_t *ht); - -static void -_Py_slist_init(_Py_slist_t *list) -{ - list->head = NULL; -} - - -static void -_Py_slist_prepend(_Py_slist_t *list, _Py_slist_item_t *item) -{ - item->next = list->head; - list->head = item; -} - - -static void -_Py_slist_remove(_Py_slist_t *list, _Py_slist_item_t *previous, - _Py_slist_item_t *item) -{ - if (previous != NULL) - previous->next = item->next; - else - list->head = item->next; -} - - -Py_uhash_t -_Py_hashtable_hash_ptr(struct _Py_hashtable_t *ht, const void *pkey) -{ - void *key; - - _Py_HASHTABLE_READ_KEY(ht, pkey, key); - return (Py_uhash_t)_Py_HashPointer(key); -} - - -int -_Py_hashtable_compare_direct(_Py_hashtable_t *ht, const void *pkey, - const _Py_hashtable_entry_t *entry) -{ - const void *pkey2 = _Py_HASHTABLE_ENTRY_PKEY(entry); - return (memcmp(pkey, pkey2, ht->key_size) == 0); -} - - -/* makes sure the real size of the buckets array is a power of 2 */ -static size_t -round_size(size_t s) -{ - size_t i; - if (s < HASHTABLE_MIN_SIZE) - return HASHTABLE_MIN_SIZE; - i = 1; - while (i < s) - i <<= 1; - return i; -} - - -_Py_hashtable_t * -_Py_hashtable_new_full(size_t key_size, size_t data_size, - size_t init_size, - _Py_hashtable_hash_func hash_func, - _Py_hashtable_compare_func compare_func, - _Py_hashtable_allocator_t *allocator) -{ - _Py_hashtable_t *ht; - size_t buckets_size; - _Py_hashtable_allocator_t alloc; - - if (allocator == NULL) { - alloc.malloc = PyMem_RawMalloc; - alloc.free = PyMem_RawFree; - } - else - alloc = *allocator; - - ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); - if (ht == NULL) - return ht; - - ht->num_buckets = round_size(init_size); - ht->entries = 0; - ht->key_size = key_size; - ht->data_size = data_size; - - buckets_size = ht->num_buckets * sizeof(ht->buckets[0]); - ht->buckets = alloc.malloc(buckets_size); - if (ht->buckets == NULL) { - alloc.free(ht); - return NULL; - } - memset(ht->buckets, 0, buckets_size); - - ht->hash_func = hash_func; - ht->compare_func = compare_func; - ht->alloc = alloc; - return ht; -} - - -_Py_hashtable_t * -_Py_hashtable_new(size_t key_size, size_t data_size, - _Py_hashtable_hash_func hash_func, - _Py_hashtable_compare_func compare_func) -{ - return _Py_hashtable_new_full(key_size, data_size, - HASHTABLE_MIN_SIZE, - hash_func, compare_func, - NULL); -} - - -size_t -_Py_hashtable_size(_Py_hashtable_t *ht) -{ - size_t size; - - size = sizeof(_Py_hashtable_t); - - /* buckets */ - size += ht->num_buckets * sizeof(_Py_hashtable_entry_t *); - - /* entries */ - size += ht->entries * HASHTABLE_ITEM_SIZE(ht); - - return size; -} - - -#ifdef Py_DEBUG -void -_Py_hashtable_print_stats(_Py_hashtable_t *ht) -{ - size_t size; - size_t chain_len, max_chain_len, total_chain_len, nchains; - _Py_hashtable_entry_t *entry; - size_t hv; - double load; - - size = _Py_hashtable_size(ht); - - load = (double)ht->entries / ht->num_buckets; - - max_chain_len = 0; - total_chain_len = 0; - nchains = 0; - for (hv = 0; hv < ht->num_buckets; hv++) { - entry = TABLE_HEAD(ht, hv); - if (entry != NULL) { - chain_len = 0; - for (; entry; entry = ENTRY_NEXT(entry)) { - chain_len++; - } - if (chain_len > max_chain_len) - max_chain_len = chain_len; - total_chain_len += chain_len; - nchains++; - } - } - printf("hash table %p: entries=%" - PY_FORMAT_SIZE_T "u/%" PY_FORMAT_SIZE_T "u (%.0f%%), ", - (void *)ht, ht->entries, ht->num_buckets, load * 100.0); - if (nchains) - printf("avg_chain_len=%.1f, ", (double)total_chain_len / nchains); - printf("max_chain_len=%" PY_FORMAT_SIZE_T "u, %" PY_FORMAT_SIZE_T "u KiB\n", - max_chain_len, size / 1024); -} -#endif - - -_Py_hashtable_entry_t * -_Py_hashtable_get_entry(_Py_hashtable_t *ht, - size_t key_size, const void *pkey) -{ - Py_uhash_t key_hash; - size_t index; - _Py_hashtable_entry_t *entry; - - assert(key_size == ht->key_size); - - key_hash = ht->hash_func(ht, pkey); - index = key_hash & (ht->num_buckets - 1); - - for (entry = TABLE_HEAD(ht, index); entry != NULL; entry = ENTRY_NEXT(entry)) { - if (entry->key_hash == key_hash && ht->compare_func(ht, pkey, entry)) - break; - } - - return entry; -} - - -static int -_Py_hashtable_pop_entry(_Py_hashtable_t *ht, size_t key_size, const void *pkey, - void *data, size_t data_size) -{ - Py_uhash_t key_hash; - size_t index; - _Py_hashtable_entry_t *entry, *previous; - - assert(key_size == ht->key_size); - - key_hash = ht->hash_func(ht, pkey); - index = key_hash & (ht->num_buckets - 1); - - previous = NULL; - for (entry = TABLE_HEAD(ht, index); entry != NULL; entry = ENTRY_NEXT(entry)) { - if (entry->key_hash == key_hash && ht->compare_func(ht, pkey, entry)) - break; - previous = entry; - } - - if (entry == NULL) - return 0; - - _Py_slist_remove(&ht->buckets[index], (_Py_slist_item_t *)previous, - (_Py_slist_item_t *)entry); - ht->entries--; - - if (data != NULL) - ENTRY_READ_PDATA(ht, entry, data_size, data); - ht->alloc.free(entry); - - if ((float)ht->entries / (float)ht->num_buckets < HASHTABLE_LOW) - hashtable_rehash(ht); - return 1; -} - - -int -_Py_hashtable_set(_Py_hashtable_t *ht, size_t key_size, const void *pkey, - size_t data_size, const void *data) -{ - Py_uhash_t key_hash; - size_t index; - _Py_hashtable_entry_t *entry; - - assert(key_size == ht->key_size); - - assert(data != NULL || data_size == 0); -#ifndef NDEBUG - /* Don't write the assertion on a single line because it is interesting - to know the duplicated entry if the assertion failed. The entry can - be read using a debugger. */ - entry = _Py_hashtable_get_entry(ht, key_size, pkey); - assert(entry == NULL); -#endif - - key_hash = ht->hash_func(ht, pkey); - index = key_hash & (ht->num_buckets - 1); - - entry = ht->alloc.malloc(HASHTABLE_ITEM_SIZE(ht)); - if (entry == NULL) { - /* memory allocation failed */ - return -1; - } - - entry->key_hash = key_hash; - memcpy((void *)_Py_HASHTABLE_ENTRY_PKEY(entry), pkey, ht->key_size); - if (data) - ENTRY_WRITE_PDATA(ht, entry, data_size, data); - - _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); - ht->entries++; - - if ((float)ht->entries / (float)ht->num_buckets > HASHTABLE_HIGH) - hashtable_rehash(ht); - return 0; -} - - -int -_Py_hashtable_get(_Py_hashtable_t *ht, size_t key_size,const void *pkey, - size_t data_size, void *data) -{ - _Py_hashtable_entry_t *entry; - - assert(data != NULL); - - entry = _Py_hashtable_get_entry(ht, key_size, pkey); - if (entry == NULL) - return 0; - ENTRY_READ_PDATA(ht, entry, data_size, data); - return 1; -} - - -int -_Py_hashtable_pop(_Py_hashtable_t *ht, size_t key_size, const void *pkey, - size_t data_size, void *data) -{ - assert(data != NULL); - return _Py_hashtable_pop_entry(ht, key_size, pkey, data, data_size); -} - - -/* Code commented since the function is not needed in Python */ -#if 0 -void -_Py_hashtable_delete(_Py_hashtable_t *ht, size_t key_size, const void *pkey) -{ -#ifndef NDEBUG - int found = _Py_hashtable_pop_entry(ht, key_size, pkey, NULL, 0); - assert(found); -#else - (void)_Py_hashtable_pop_entry(ht, key_size, pkey, NULL, 0); -#endif -} -#endif - - -int -_Py_hashtable_foreach(_Py_hashtable_t *ht, - _Py_hashtable_foreach_func func, - void *arg) -{ - _Py_hashtable_entry_t *entry; - size_t hv; - - for (hv = 0; hv < ht->num_buckets; hv++) { - for (entry = TABLE_HEAD(ht, hv); entry; entry = ENTRY_NEXT(entry)) { - int res = func(ht, entry, arg); - if (res) - return res; - } - } - return 0; -} - - -static void -hashtable_rehash(_Py_hashtable_t *ht) -{ - size_t buckets_size, new_size, bucket; - _Py_slist_t *old_buckets = NULL; - size_t old_num_buckets; - - new_size = round_size((size_t)(ht->entries * HASHTABLE_REHASH_FACTOR)); - if (new_size == ht->num_buckets) - return; - - old_num_buckets = ht->num_buckets; - - buckets_size = new_size * sizeof(ht->buckets[0]); - old_buckets = ht->buckets; - ht->buckets = ht->alloc.malloc(buckets_size); - if (ht->buckets == NULL) { - /* cancel rehash on memory allocation failure */ - ht->buckets = old_buckets ; - /* memory allocation failed */ - return; - } - memset(ht->buckets, 0, buckets_size); - - ht->num_buckets = new_size; - - for (bucket = 0; bucket < old_num_buckets; bucket++) { - _Py_hashtable_entry_t *entry, *next; - for (entry = BUCKETS_HEAD(old_buckets[bucket]); entry != NULL; entry = next) { - size_t entry_index; - - - assert(ht->hash_func(ht, _Py_HASHTABLE_ENTRY_PKEY(entry)) == entry->key_hash); - next = ENTRY_NEXT(entry); - entry_index = entry->key_hash & (new_size - 1); - - _Py_slist_prepend(&ht->buckets[entry_index], (_Py_slist_item_t*)entry); - } - } - - ht->alloc.free(old_buckets); -} - - -void -_Py_hashtable_clear(_Py_hashtable_t *ht) -{ - _Py_hashtable_entry_t *entry, *next; - size_t i; - - for (i=0; i < ht->num_buckets; i++) { - for (entry = TABLE_HEAD(ht, i); entry != NULL; entry = next) { - next = ENTRY_NEXT(entry); - ht->alloc.free(entry); - } - _Py_slist_init(&ht->buckets[i]); - } - ht->entries = 0; - hashtable_rehash(ht); -} - - -void -_Py_hashtable_destroy(_Py_hashtable_t *ht) -{ - size_t i; - - for (i = 0; i < ht->num_buckets; i++) { - _Py_slist_item_t *entry = ht->buckets[i].head; - while (entry) { - _Py_slist_item_t *entry_next = entry->next; - ht->alloc.free(entry); - entry = entry_next; - } - } - - ht->alloc.free(ht->buckets); - ht->alloc.free(ht); -} - - -_Py_hashtable_t * -_Py_hashtable_copy(_Py_hashtable_t *src) -{ - const size_t key_size = src->key_size; - const size_t data_size = src->data_size; - _Py_hashtable_t *dst; - _Py_hashtable_entry_t *entry; - size_t bucket; - int err; - - dst = _Py_hashtable_new_full(key_size, data_size, - src->num_buckets, - src->hash_func, - src->compare_func, - &src->alloc); - if (dst == NULL) - return NULL; - - for (bucket=0; bucket < src->num_buckets; bucket++) { - entry = TABLE_HEAD(src, bucket); - for (; entry; entry = ENTRY_NEXT(entry)) { - const void *pkey = _Py_HASHTABLE_ENTRY_PKEY(entry); - const void *pdata = _Py_HASHTABLE_ENTRY_PDATA(src, entry); - err = _Py_hashtable_set(dst, key_size, pkey, data_size, pdata); - if (err) { - _Py_hashtable_destroy(dst); - return NULL; - } - } - } - return dst; -} diff --git a/Modules/hashtable.h b/Modules/hashtable.h deleted file mode 100644 index dbec23d2851872..00000000000000 --- a/Modules/hashtable.h +++ /dev/null @@ -1,211 +0,0 @@ -#ifndef Py_HASHTABLE_H -#define Py_HASHTABLE_H -/* The whole API is private */ -#ifndef Py_LIMITED_API - -/* Single linked list */ - -typedef struct _Py_slist_item_s { - struct _Py_slist_item_s *next; -} _Py_slist_item_t; - -typedef struct { - _Py_slist_item_t *head; -} _Py_slist_t; - -#define _Py_SLIST_ITEM_NEXT(ITEM) (((_Py_slist_item_t *)ITEM)->next) - -#define _Py_SLIST_HEAD(SLIST) (((_Py_slist_t *)SLIST)->head) - - -/* _Py_hashtable: table entry */ - -typedef struct { - /* used by _Py_hashtable_t.buckets to link entries */ - _Py_slist_item_t _Py_slist_item; - - Py_uhash_t key_hash; - - /* key (key_size bytes) and then data (data_size bytes) follows */ -} _Py_hashtable_entry_t; - -#define _Py_HASHTABLE_ENTRY_PKEY(ENTRY) \ - ((const void *)((char *)(ENTRY) \ - + sizeof(_Py_hashtable_entry_t))) - -#define _Py_HASHTABLE_ENTRY_PDATA(TABLE, ENTRY) \ - ((const void *)((char *)(ENTRY) \ - + sizeof(_Py_hashtable_entry_t) \ - + (TABLE)->key_size)) - -/* Get a key value from pkey: use memcpy() rather than a pointer dereference - to avoid memory alignment issues. */ -#define _Py_HASHTABLE_READ_KEY(TABLE, PKEY, DST_KEY) \ - do { \ - assert(sizeof(DST_KEY) == (TABLE)->key_size); \ - memcpy(&(DST_KEY), (PKEY), sizeof(DST_KEY)); \ - } while (0) - -#define _Py_HASHTABLE_ENTRY_READ_KEY(TABLE, ENTRY, KEY) \ - do { \ - assert(sizeof(KEY) == (TABLE)->key_size); \ - memcpy(&(KEY), _Py_HASHTABLE_ENTRY_PKEY(ENTRY), sizeof(KEY)); \ - } while (0) - -#define _Py_HASHTABLE_ENTRY_READ_DATA(TABLE, ENTRY, DATA) \ - do { \ - assert(sizeof(DATA) == (TABLE)->data_size); \ - memcpy(&(DATA), _Py_HASHTABLE_ENTRY_PDATA(TABLE, (ENTRY)), \ - sizeof(DATA)); \ - } while (0) - -#define _Py_HASHTABLE_ENTRY_WRITE_DATA(TABLE, ENTRY, DATA) \ - do { \ - assert(sizeof(DATA) == (TABLE)->data_size); \ - memcpy((void *)_Py_HASHTABLE_ENTRY_PDATA((TABLE), (ENTRY)), \ - &(DATA), sizeof(DATA)); \ - } while (0) - - -/* _Py_hashtable: prototypes */ - -/* Forward declaration */ -struct _Py_hashtable_t; - -typedef Py_uhash_t (*_Py_hashtable_hash_func) (struct _Py_hashtable_t *ht, - const void *pkey); -typedef int (*_Py_hashtable_compare_func) (struct _Py_hashtable_t *ht, - const void *pkey, - const _Py_hashtable_entry_t *he); - -typedef struct { - /* allocate a memory block */ - void* (*malloc) (size_t size); - - /* release a memory block */ - void (*free) (void *ptr); -} _Py_hashtable_allocator_t; - - -/* _Py_hashtable: table */ - -typedef struct _Py_hashtable_t { - size_t num_buckets; - size_t entries; /* Total number of entries in the table. */ - _Py_slist_t *buckets; - size_t key_size; - size_t data_size; - - _Py_hashtable_hash_func hash_func; - _Py_hashtable_compare_func compare_func; - _Py_hashtable_allocator_t alloc; -} _Py_hashtable_t; - -/* hash a pointer (void*) */ -PyAPI_FUNC(Py_uhash_t) _Py_hashtable_hash_ptr( - struct _Py_hashtable_t *ht, - const void *pkey); - -/* comparison using memcmp() */ -PyAPI_FUNC(int) _Py_hashtable_compare_direct( - _Py_hashtable_t *ht, - const void *pkey, - const _Py_hashtable_entry_t *entry); - -PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new( - size_t key_size, - size_t data_size, - _Py_hashtable_hash_func hash_func, - _Py_hashtable_compare_func compare_func); - -PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_new_full( - size_t key_size, - size_t data_size, - size_t init_size, - _Py_hashtable_hash_func hash_func, - _Py_hashtable_compare_func compare_func, - _Py_hashtable_allocator_t *allocator); - -PyAPI_FUNC(void) _Py_hashtable_destroy(_Py_hashtable_t *ht); - -/* Return a copy of the hash table */ -PyAPI_FUNC(_Py_hashtable_t *) _Py_hashtable_copy(_Py_hashtable_t *src); - -PyAPI_FUNC(void) _Py_hashtable_clear(_Py_hashtable_t *ht); - -typedef int (*_Py_hashtable_foreach_func) (_Py_hashtable_t *ht, - _Py_hashtable_entry_t *entry, - void *arg); - -/* Call func() on each entry of the hashtable. - Iteration stops if func() result is non-zero, in this case it's the result - of the call. Otherwise, the function returns 0. */ -PyAPI_FUNC(int) _Py_hashtable_foreach( - _Py_hashtable_t *ht, - _Py_hashtable_foreach_func func, - void *arg); - -PyAPI_FUNC(size_t) _Py_hashtable_size(_Py_hashtable_t *ht); - -/* Add a new entry to the hash. The key must not be present in the hash table. - Return 0 on success, -1 on memory error. - - Don't call directly this function, - but use _Py_HASHTABLE_SET() and _Py_HASHTABLE_SET_NODATA() macros */ -PyAPI_FUNC(int) _Py_hashtable_set( - _Py_hashtable_t *ht, - size_t key_size, - const void *pkey, - size_t data_size, - const void *data); - -#define _Py_HASHTABLE_SET(TABLE, KEY, DATA) \ - _Py_hashtable_set(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) - -#define _Py_HASHTABLE_SET_NODATA(TABLE, KEY) \ - _Py_hashtable_set(TABLE, sizeof(KEY), &(KEY), 0, NULL) - - -/* Get an entry. - Return NULL if the key does not exist. - - Don't call directly this function, but use _Py_HASHTABLE_GET_ENTRY() - macro */ -PyAPI_FUNC(_Py_hashtable_entry_t*) _Py_hashtable_get_entry( - _Py_hashtable_t *ht, - size_t key_size, - const void *pkey); - -#define _Py_HASHTABLE_GET_ENTRY(TABLE, KEY) \ - _Py_hashtable_get_entry(TABLE, sizeof(KEY), &(KEY)) - - -/* Get data from an entry. Copy entry data into data and return 1 if the entry - exists, return 0 if the entry does not exist. - - Don't call directly this function, but use _Py_HASHTABLE_GET() macro */ -PyAPI_FUNC(int) _Py_hashtable_get( - _Py_hashtable_t *ht, - size_t key_size, - const void *pkey, - size_t data_size, - void *data); - -#define _Py_HASHTABLE_GET(TABLE, KEY, DATA) \ - _Py_hashtable_get(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) - - -/* Don't call directly this function, but use _Py_HASHTABLE_POP() macro */ -PyAPI_FUNC(int) _Py_hashtable_pop( - _Py_hashtable_t *ht, - size_t key_size, - const void *pkey, - size_t data_size, - void *data); - -#define _Py_HASHTABLE_POP(TABLE, KEY, DATA) \ - _Py_hashtable_pop(TABLE, sizeof(KEY), &(KEY), sizeof(DATA), &(DATA)) - - -#endif /* Py_LIMITED_API */ -#endif diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index 6c503b3429b23a..a3e22d0a5110da 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -816,8 +816,6 @@ static struct PyMethodDef mmap_object_methods[] = { #ifdef MS_WINDOWS {"__sizeof__", (PyCFunction) mmap__sizeof__method, METH_NOARGS}, #endif - {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS, - PyDoc_STR("See PEP 585")}, {NULL, NULL} /* sentinel */ }; diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 0163b0757aefa5..2ddf30de89a680 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -834,7 +834,6 @@ typedef struct { PyObject *st_mode; } _posixstate; -static struct PyModuleDef posixmodule; static inline _posixstate* get_posix_state(PyObject *module) @@ -844,8 +843,6 @@ get_posix_state(PyObject *module) return (_posixstate *)state; } -#define _posixstate_global ((_posixstate *)PyModule_GetState(PyState_FindModule(&posixmodule))) - /* * A PyArg_ParseTuple "converter" function * that handles filesystem paths in the manner @@ -2104,48 +2101,50 @@ statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds) static int _posix_clear(PyObject *module) { - Py_CLEAR(get_posix_state(module)->billion); - Py_CLEAR(get_posix_state(module)->DirEntryType); - Py_CLEAR(get_posix_state(module)->ScandirIteratorType); + _posixstate *state = get_posix_state(module); + Py_CLEAR(state->billion); + Py_CLEAR(state->DirEntryType); + Py_CLEAR(state->ScandirIteratorType); #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) - Py_CLEAR(get_posix_state(module)->SchedParamType); + Py_CLEAR(state->SchedParamType); #endif - Py_CLEAR(get_posix_state(module)->StatResultType); - Py_CLEAR(get_posix_state(module)->StatVFSResultType); - Py_CLEAR(get_posix_state(module)->TerminalSizeType); - Py_CLEAR(get_posix_state(module)->TimesResultType); - Py_CLEAR(get_posix_state(module)->UnameResultType); + Py_CLEAR(state->StatResultType); + Py_CLEAR(state->StatVFSResultType); + Py_CLEAR(state->TerminalSizeType); + Py_CLEAR(state->TimesResultType); + Py_CLEAR(state->UnameResultType); #if defined(HAVE_WAITID) && !defined(__APPLE__) - Py_CLEAR(get_posix_state(module)->WaitidResultType); + Py_CLEAR(state->WaitidResultType); #endif #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) - Py_CLEAR(get_posix_state(module)->struct_rusage); + Py_CLEAR(state->struct_rusage); #endif - Py_CLEAR(get_posix_state(module)->st_mode); + Py_CLEAR(state->st_mode); return 0; } static int _posix_traverse(PyObject *module, visitproc visit, void *arg) { - Py_VISIT(get_posix_state(module)->billion); - Py_VISIT(get_posix_state(module)->DirEntryType); - Py_VISIT(get_posix_state(module)->ScandirIteratorType); + _posixstate *state = get_posix_state(module); + Py_VISIT(state->billion); + Py_VISIT(state->DirEntryType); + Py_VISIT(state->ScandirIteratorType); #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) - Py_VISIT(get_posix_state(module)->SchedParamType); + Py_VISIT(state->SchedParamType); #endif - Py_VISIT(get_posix_state(module)->StatResultType); - Py_VISIT(get_posix_state(module)->StatVFSResultType); - Py_VISIT(get_posix_state(module)->TerminalSizeType); - Py_VISIT(get_posix_state(module)->TimesResultType); - Py_VISIT(get_posix_state(module)->UnameResultType); + Py_VISIT(state->StatResultType); + Py_VISIT(state->StatVFSResultType); + Py_VISIT(state->TerminalSizeType); + Py_VISIT(state->TimesResultType); + Py_VISIT(state->UnameResultType); #if defined(HAVE_WAITID) && !defined(__APPLE__) - Py_VISIT(get_posix_state(module)->WaitidResultType); + Py_VISIT(state->WaitidResultType); #endif #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) - Py_VISIT(get_posix_state(module)->struct_rusage); + Py_VISIT(state->struct_rusage); #endif - Py_VISIT(get_posix_state(module)->st_mode); + Py_VISIT(state->st_mode); return 0; } @@ -2156,7 +2155,7 @@ _posix_free(void *module) } static void -fill_time(PyObject *v, int index, time_t sec, unsigned long nsec) +fill_time(PyObject *module, PyObject *v, int index, time_t sec, unsigned long nsec) { PyObject *s = _PyLong_FromTime_t(sec); PyObject *ns_fractional = PyLong_FromUnsignedLong(nsec); @@ -2167,7 +2166,7 @@ fill_time(PyObject *v, int index, time_t sec, unsigned long nsec) if (!(s && ns_fractional)) goto exit; - s_in_ns = PyNumber_Multiply(s, _posixstate_global->billion); + s_in_ns = PyNumber_Multiply(s, get_posix_state(module)->billion); if (!s_in_ns) goto exit; @@ -2197,10 +2196,10 @@ fill_time(PyObject *v, int index, time_t sec, unsigned long nsec) /* pack a system stat C structure into the Python stat tuple (used by posix_stat() and posix_fstat()) */ static PyObject* -_pystat_fromstructstat(STRUCT_STAT *st) +_pystat_fromstructstat(PyObject *module, STRUCT_STAT *st) { unsigned long ansec, mnsec, cnsec; - PyObject *StatResultType = _posixstate_global->StatResultType; + PyObject *StatResultType = get_posix_state(module)->StatResultType; PyObject *v = PyStructSequence_New((PyTypeObject *)StatResultType); if (v == NULL) return NULL; @@ -2239,9 +2238,9 @@ _pystat_fromstructstat(STRUCT_STAT *st) #else ansec = mnsec = cnsec = 0; #endif - fill_time(v, 7, st->st_atime, ansec); - fill_time(v, 8, st->st_mtime, mnsec); - fill_time(v, 9, st->st_ctime, cnsec); + fill_time(module, v, 7, st->st_atime, ansec); + fill_time(module, v, 8, st->st_mtime, mnsec); + fill_time(module, v, 9, st->st_ctime, cnsec); #ifdef HAVE_STRUCT_STAT_ST_BLKSIZE PyStructSequence_SET_ITEM(v, ST_BLKSIZE_IDX, @@ -2303,7 +2302,7 @@ _pystat_fromstructstat(STRUCT_STAT *st) static PyObject * -posix_do_stat(const char *function_name, path_t *path, +posix_do_stat(PyObject *module, const char *function_name, path_t *path, int dir_fd, int follow_symlinks) { STRUCT_STAT st; @@ -2348,7 +2347,7 @@ posix_do_stat(const char *function_name, path_t *path, return path_error(path); } - return _pystat_fromstructstat(&st); + return _pystat_fromstructstat(module, &st); } /*[python input] @@ -2643,13 +2642,8 @@ class confstr_confname_converter(path_confname_converter): class sysconf_confname_converter(path_confname_converter): converter="conv_sysconf_confname" -class sched_param_converter(CConverter): - type = 'struct sched_param' - converter = 'convert_sched_param' - impl_by_reference = True; - [python start generated code]*/ -/*[python end generated code: output=da39a3ee5e6b4b0d input=418fce0e01144461]*/ +/*[python end generated code: output=da39a3ee5e6b4b0d input=f1c8ae8d744f6c8b]*/ /*[clinic input] @@ -2686,7 +2680,7 @@ static PyObject * os_stat_impl(PyObject *module, path_t *path, int dir_fd, int follow_symlinks) /*[clinic end generated code: output=7d4976e6f18a59c5 input=01d362ebcc06996b]*/ { - return posix_do_stat("stat", path, dir_fd, follow_symlinks); + return posix_do_stat(module, "stat", path, dir_fd, follow_symlinks); } @@ -2710,7 +2704,7 @@ os_lstat_impl(PyObject *module, path_t *path, int dir_fd) /*[clinic end generated code: output=ef82a5d35ce8ab37 input=0b7474765927b925]*/ { int follow_symlinks = 0; - return posix_do_stat("lstat", path, dir_fd, follow_symlinks); + return posix_do_stat(module, "lstat", path, dir_fd, follow_symlinks); } @@ -4852,11 +4846,11 @@ utime_default(utime_t *ut, const char *path) #endif static int -split_py_long_to_s_and_ns(PyObject *py_long, time_t *s, long *ns) +split_py_long_to_s_and_ns(PyObject *module, PyObject *py_long, time_t *s, long *ns) { int result = 0; PyObject *divmod; - divmod = PyNumber_Divmod(py_long, _posixstate_global->billion); + divmod = PyNumber_Divmod(py_long, get_posix_state(module)->billion); if (!divmod) goto exit; if (!PyTuple_Check(divmod) || PyTuple_GET_SIZE(divmod) != 2) { @@ -4968,9 +4962,9 @@ os_utime_impl(PyObject *module, path_t *path, PyObject *times, PyObject *ns, return NULL; } utime.now = 0; - if (!split_py_long_to_s_and_ns(PyTuple_GET_ITEM(ns, 0), + if (!split_py_long_to_s_and_ns(module, PyTuple_GET_ITEM(ns, 0), &utime.atime_s, &utime.atime_ns) || - !split_py_long_to_s_and_ns(PyTuple_GET_ITEM(ns, 1), + !split_py_long_to_s_and_ns(module, PyTuple_GET_ITEM(ns, 1), &utime.mtime_s, &utime.mtime_ns)) { return NULL; } @@ -5421,11 +5415,11 @@ enum posix_spawn_file_actions_identifier { #if defined(HAVE_SCHED_SETPARAM) || defined(HAVE_SCHED_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDULER) || defined(POSIX_SPAWN_SETSCHEDPARAM) static int -convert_sched_param(PyObject *param, struct sched_param *res); +convert_sched_param(PyObject *module, PyObject *param, struct sched_param *res); #endif static int -parse_posix_spawn_flags(const char *func_name, PyObject *setpgroup, +parse_posix_spawn_flags(PyObject *module, const char *func_name, PyObject *setpgroup, int resetids, int setsid, PyObject *setsigmask, PyObject *setsigdef, PyObject *scheduler, posix_spawnattr_t *attrp) @@ -5495,11 +5489,15 @@ parse_posix_spawn_flags(const char *func_name, PyObject *setpgroup, if (scheduler) { #ifdef POSIX_SPAWN_SETSCHEDULER PyObject *py_schedpolicy; + PyObject *schedparam_obj; struct sched_param schedparam; - if (!PyArg_ParseTuple(scheduler, "OO&" + if (!PyArg_ParseTuple(scheduler, "OO" ";A scheduler tuple must have two elements", - &py_schedpolicy, convert_sched_param, &schedparam)) { + &py_schedpolicy, &schedparam_obj)) { + goto fail; + } + if (!convert_sched_param(module, schedparam_obj, &schedparam)) { goto fail; } if (py_schedpolicy != Py_None) { @@ -5728,7 +5726,7 @@ py_posix_spawn(int use_posix_spawnp, PyObject *module, path_t *path, PyObject *a file_actionsp = &file_actions_buf; } - if (parse_posix_spawn_flags(func_name, setpgroup, resetids, setsid, + if (parse_posix_spawn_flags(module, func_name, setpgroup, resetids, setsid, setsigmask, setsigdef, scheduler, &attr)) { goto exit; } @@ -6378,11 +6376,11 @@ static PyStructSequence_Desc sched_param_desc = { }; static int -convert_sched_param(PyObject *param, struct sched_param *res) +convert_sched_param(PyObject *module, PyObject *param, struct sched_param *res) { long priority; - if (!Py_IS_TYPE(param, (PyTypeObject *)_posixstate_global->SchedParamType)) { + if (!Py_IS_TYPE(param, (PyTypeObject *)get_posix_state(module)->SchedParamType)) { PyErr_SetString(PyExc_TypeError, "must have a sched_param object"); return 0; } @@ -6405,7 +6403,7 @@ os.sched_setscheduler pid: pid_t policy: int - param: sched_param + param as param_obj: object / Set the scheduling policy for the process identified by pid. @@ -6416,15 +6414,20 @@ param is an instance of sched_param. static PyObject * os_sched_setscheduler_impl(PyObject *module, pid_t pid, int policy, - struct sched_param *param) -/*[clinic end generated code: output=b0ac0a70d3b1d705 input=c581f9469a5327dd]*/ + PyObject *param_obj) +/*[clinic end generated code: output=cde27faa55dc993e input=73013d731bd8fbe9]*/ { + struct sched_param param; + if (!convert_sched_param(module, param_obj, ¶m)) { + return NULL; + } + /* ** sched_setscheduler() returns 0 in Linux, but the previous ** scheduling policy under Solaris/Illumos, and others. ** On error, -1 is returned in all Operating Systems. */ - if (sched_setscheduler(pid, policy, param) == -1) + if (sched_setscheduler(pid, policy, ¶m) == -1) return posix_error(); Py_RETURN_NONE; } @@ -6453,7 +6456,7 @@ os_sched_getparam_impl(PyObject *module, pid_t pid) if (sched_getparam(pid, ¶m)) return posix_error(); - PyObject *SchedParamType = _posixstate_global->SchedParamType; + PyObject *SchedParamType = get_posix_state(module)->SchedParamType; result = PyStructSequence_New((PyTypeObject *)SchedParamType); if (!result) return NULL; @@ -6470,7 +6473,7 @@ os_sched_getparam_impl(PyObject *module, pid_t pid) /*[clinic input] os.sched_setparam pid: pid_t - param: sched_param + param as param_obj: object / Set scheduling parameters for the process identified by pid. @@ -6480,11 +6483,15 @@ param should be an instance of sched_param. [clinic start generated code]*/ static PyObject * -os_sched_setparam_impl(PyObject *module, pid_t pid, - struct sched_param *param) -/*[clinic end generated code: output=8af013f78a32b591 input=6b8d6dfcecdc21bd]*/ +os_sched_setparam_impl(PyObject *module, pid_t pid, PyObject *param_obj) +/*[clinic end generated code: output=f19fe020a53741c1 input=27b98337c8b2dcc7]*/ { - if (sched_setparam(pid, param)) + struct sched_param param; + if (!convert_sched_param(module, param_obj, ¶m)) { + return NULL; + } + + if (sched_setparam(pid, ¶m)) return posix_error(); Py_RETURN_NONE; } @@ -7710,7 +7717,7 @@ os_setgroups(PyObject *module, PyObject *groups) #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) static PyObject * -wait_helper(pid_t pid, int status, struct rusage *ru) +wait_helper(PyObject *module, pid_t pid, int status, struct rusage *ru) { PyObject *result; PyObject *struct_rusage; @@ -7727,7 +7734,7 @@ wait_helper(pid_t pid, int status, struct rusage *ru) PyObject *m = PyImport_ImportModuleNoBlock("resource"); if (m == NULL) return NULL; - struct_rusage = PyObject_GetAttr(m, _posixstate_global->struct_rusage); + struct_rusage = PyObject_GetAttr(m, get_posix_state(module)->struct_rusage); Py_DECREF(m); if (struct_rusage == NULL) return NULL; @@ -7803,7 +7810,7 @@ os_wait3_impl(PyObject *module, int options) if (pid < 0) return (!async_err) ? posix_error() : NULL; - return wait_helper(pid, WAIT_STATUS_INT(status), &ru); + return wait_helper(module, pid, WAIT_STATUS_INT(status), &ru); } #endif /* HAVE_WAIT3 */ @@ -7840,7 +7847,7 @@ os_wait4_impl(PyObject *module, pid_t pid, int options) if (res < 0) return (!async_err) ? posix_error() : NULL; - return wait_helper(res, WAIT_STATUS_INT(status), &ru); + return wait_helper(module, res, WAIT_STATUS_INT(status), &ru); } #endif /* HAVE_WAIT4 */ @@ -8375,11 +8382,11 @@ static PyStructSequence_Desc times_result_desc = { #ifdef HAVE_TIMES static PyObject * -build_times_result(double user, double system, +build_times_result(PyObject *module, double user, double system, double children_user, double children_system, double elapsed) { - PyObject *TimesResultType = _posixstate_global->TimesResultType; + PyObject *TimesResultType = get_posix_state(module)->TimesResultType; PyObject *value = PyStructSequence_New((PyTypeObject *)TimesResultType); if (value == NULL) return NULL; @@ -8435,7 +8442,7 @@ os_times_impl(PyObject *module) 1e7 is one second in such units; 1e-7 the inverse. 429.4967296 is 2**32 / 1e7 or 2**32 * 1e-7. */ - return build_times_result( + return build_times_result(module, (double)(user.dwHighDateTime*429.4967296 + user.dwLowDateTime*1e-7), (double)(kernel.dwHighDateTime*429.4967296 + @@ -8454,7 +8461,7 @@ os_times_impl(PyObject *module) c = times(&t); if (c == (clock_t) -1) return posix_error(); - return build_times_result( + return build_times_result(module, (double)t.tms_utime / ticks_per_second, (double)t.tms_stime / ticks_per_second, (double)t.tms_cutime / ticks_per_second, @@ -9515,7 +9522,7 @@ os_fstat_impl(PyObject *module, int fd) #endif } - return _pystat_fromstructstat(&st); + return _pystat_fromstructstat(module, &st); } @@ -10601,8 +10608,8 @@ os_WSTOPSIG_impl(PyObject *module, int status) #include static PyObject* -_pystatvfs_fromstructstatvfs(struct statvfs st) { - PyObject *StatVFSResultType = _posixstate_global->StatVFSResultType; +_pystatvfs_fromstructstatvfs(PyObject *module, struct statvfs st) { + PyObject *StatVFSResultType = get_posix_state(module)->StatVFSResultType; PyObject *v = PyStructSequence_New((PyTypeObject *)StatVFSResultType); if (v == NULL) return NULL; @@ -10679,7 +10686,7 @@ os_fstatvfs_impl(PyObject *module, int fd) if (result != 0) return (!async_err) ? posix_error() : NULL; - return _pystatvfs_fromstructstatvfs(st); + return _pystatvfs_fromstructstatvfs(module, st); } #endif /* defined(HAVE_FSTATVFS) && defined(HAVE_SYS_STATVFS_H) */ @@ -10726,7 +10733,7 @@ os_statvfs_impl(PyObject *module, path_t *path) return path_error(path); } - return _pystatvfs_fromstructstatvfs(st); + return _pystatvfs_fromstructstatvfs(module, st); } #endif /* defined(HAVE_STATVFS) && defined(HAVE_SYS_STATVFS_H) */ @@ -12742,17 +12749,20 @@ DirEntry_dealloc(DirEntry *entry) /* Forward reference */ static int -DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits); +DirEntry_test_mode(PyTypeObject *defining_class, DirEntry *self, + int follow_symlinks, unsigned short mode_bits); /*[clinic input] os.DirEntry.is_symlink -> bool + defining_class: defining_class + / Return True if the entry is a symbolic link; cached per entry. [clinic start generated code]*/ static int -os_DirEntry_is_symlink_impl(DirEntry *self) -/*[clinic end generated code: output=42244667d7bcfc25 input=1605a1b4b96976c3]*/ +os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class) +/*[clinic end generated code: output=293096d589b6d47c input=e9acc5ee4d511113]*/ { #ifdef MS_WINDOWS return (self->win32_lstat.st_mode & S_IFMT) == S_IFLNK; @@ -12761,15 +12771,15 @@ os_DirEntry_is_symlink_impl(DirEntry *self) if (self->d_type != DT_UNKNOWN) return self->d_type == DT_LNK; else - return DirEntry_test_mode(self, 0, S_IFLNK); + return DirEntry_test_mode(defining_class, self, 0, S_IFLNK); #else /* POSIX without d_type */ - return DirEntry_test_mode(self, 0, S_IFLNK); + return DirEntry_test_mode(defining_class, self, 0, S_IFLNK); #endif } static PyObject * -DirEntry_fetch_stat(DirEntry *self, int follow_symlinks) +DirEntry_fetch_stat(PyObject *module, DirEntry *self, int follow_symlinks) { int result; STRUCT_STAT st; @@ -12805,17 +12815,18 @@ DirEntry_fetch_stat(DirEntry *self, int follow_symlinks) if (result != 0) return path_object_error(self->path); - return _pystat_fromstructstat(&st); + return _pystat_fromstructstat(module, &st); } static PyObject * -DirEntry_get_lstat(DirEntry *self) +DirEntry_get_lstat(PyTypeObject *defining_class, DirEntry *self) { if (!self->lstat) { + PyObject *module = PyType_GetModule(defining_class); #ifdef MS_WINDOWS - self->lstat = _pystat_fromstructstat(&self->win32_lstat); + self->lstat = _pystat_fromstructstat(module, &self->win32_lstat); #else /* POSIX */ - self->lstat = DirEntry_fetch_stat(self, 0); + self->lstat = DirEntry_fetch_stat(module, self, 0); #endif } Py_XINCREF(self->lstat); @@ -12824,6 +12835,8 @@ DirEntry_get_lstat(DirEntry *self) /*[clinic input] os.DirEntry.stat + defining_class: defining_class + / * follow_symlinks: bool = True @@ -12831,20 +12844,26 @@ Return stat_result object for the entry; cached per entry. [clinic start generated code]*/ static PyObject * -os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks) -/*[clinic end generated code: output=008593b3a6d01305 input=280d14c1d6f1d00d]*/ +os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks) +/*[clinic end generated code: output=23f803e19c3e780e input=e816273c4e67ee98]*/ { - if (!follow_symlinks) - return DirEntry_get_lstat(self); + if (!follow_symlinks) { + return DirEntry_get_lstat(defining_class, self); + } if (!self->stat) { - int result = os_DirEntry_is_symlink_impl(self); - if (result == -1) + int result = os_DirEntry_is_symlink_impl(self, defining_class); + if (result == -1) { return NULL; - else if (result) - self->stat = DirEntry_fetch_stat(self, 1); - else - self->stat = DirEntry_get_lstat(self); + } + if (result) { + PyObject *module = PyType_GetModule(defining_class); + self->stat = DirEntry_fetch_stat(module, self, 1); + } + else { + self->stat = DirEntry_get_lstat(defining_class, self); + } } Py_XINCREF(self->stat); @@ -12853,7 +12872,8 @@ os_DirEntry_stat_impl(DirEntry *self, int follow_symlinks) /* Set exception and return -1 on error, 0 for False, 1 for True */ static int -DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits) +DirEntry_test_mode(PyTypeObject *defining_class, DirEntry *self, + int follow_symlinks, unsigned short mode_bits) { PyObject *stat = NULL; PyObject *st_mode = NULL; @@ -12878,7 +12898,7 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits #if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE) if (need_stat) { #endif - stat = os_DirEntry_stat_impl(self, follow_symlinks); + stat = os_DirEntry_stat_impl(self, defining_class, follow_symlinks); if (!stat) { if (PyErr_ExceptionMatches(PyExc_FileNotFoundError)) { /* If file doesn't exist (anymore), then return False @@ -12888,7 +12908,8 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits } goto error; } - st_mode = PyObject_GetAttr(stat, _posixstate_global->st_mode); + _posixstate* state = get_posix_state(PyType_GetModule(defining_class)); + st_mode = PyObject_GetAttr(stat, state->st_mode); if (!st_mode) goto error; @@ -12931,6 +12952,8 @@ DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits /*[clinic input] os.DirEntry.is_dir -> bool + defining_class: defining_class + / * follow_symlinks: bool = True @@ -12938,14 +12961,17 @@ Return True if the entry is a directory; cached per entry. [clinic start generated code]*/ static int -os_DirEntry_is_dir_impl(DirEntry *self, int follow_symlinks) -/*[clinic end generated code: output=ad2e8d54365da287 input=0135232766f53f58]*/ +os_DirEntry_is_dir_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks) +/*[clinic end generated code: output=0cd453b9c0987fdf input=1a4ffd6dec9920cb]*/ { - return DirEntry_test_mode(self, follow_symlinks, S_IFDIR); + return DirEntry_test_mode(defining_class, self, follow_symlinks, S_IFDIR); } /*[clinic input] os.DirEntry.is_file -> bool + defining_class: defining_class + / * follow_symlinks: bool = True @@ -12953,10 +12979,11 @@ Return True if the entry is a file; cached per entry. [clinic start generated code]*/ static int -os_DirEntry_is_file_impl(DirEntry *self, int follow_symlinks) -/*[clinic end generated code: output=8462ade481d8a476 input=0dc90be168b041ee]*/ +os_DirEntry_is_file_impl(DirEntry *self, PyTypeObject *defining_class, + int follow_symlinks) +/*[clinic end generated code: output=f7c277ab5ba80908 input=0a64c5a12e802e3b]*/ { - return DirEntry_test_mode(self, follow_symlinks, S_IFREG); + return DirEntry_test_mode(defining_class, self, follow_symlinks, S_IFREG); } /*[clinic input] @@ -13092,14 +13119,14 @@ join_path_filenameW(const wchar_t *path_wide, const wchar_t *filename) } static PyObject * -DirEntry_from_find_data(path_t *path, WIN32_FIND_DATAW *dataW) +DirEntry_from_find_data(PyObject *module, path_t *path, WIN32_FIND_DATAW *dataW) { DirEntry *entry; BY_HANDLE_FILE_INFORMATION file_info; ULONG reparse_tag; wchar_t *joined_path; - PyObject *DirEntryType = _posixstate_global->DirEntryType; + PyObject *DirEntryType = get_posix_state(module)->DirEntryType; entry = PyObject_New(DirEntry, (PyTypeObject *)DirEntryType); if (!entry) return NULL; @@ -13177,8 +13204,8 @@ join_path_filename(const char *path_narrow, const char* filename, Py_ssize_t fil } static PyObject * -DirEntry_from_posix_info(path_t *path, const char *name, Py_ssize_t name_len, - ino_t d_ino +DirEntry_from_posix_info(PyObject *module, path_t *path, const char *name, + Py_ssize_t name_len, ino_t d_ino #ifdef HAVE_DIRENT_D_TYPE , unsigned char d_type #endif @@ -13187,7 +13214,7 @@ DirEntry_from_posix_info(path_t *path, const char *name, Py_ssize_t name_len, DirEntry *entry; char *joined_path; - PyObject *DirEntryType = _posixstate_global->DirEntryType; + PyObject *DirEntryType = get_posix_state(module)->DirEntryType; entry = PyObject_New(DirEntry, (PyTypeObject *)DirEntryType); if (!entry) return NULL; @@ -13307,8 +13334,10 @@ ScandirIterator_iternext(ScandirIterator *iterator) /* Skip over . and .. */ if (wcscmp(file_data->cFileName, L".") != 0 && - wcscmp(file_data->cFileName, L"..") != 0) { - entry = DirEntry_from_find_data(&iterator->path, file_data); + wcscmp(file_data->cFileName, L"..") != 0) + { + PyObject *module = PyType_GetModule(Py_TYPE(iterator)); + entry = DirEntry_from_find_data(module, &iterator->path, file_data); if (!entry) break; return entry; @@ -13379,10 +13408,12 @@ ScandirIterator_iternext(ScandirIterator *iterator) is_dot = direntp->d_name[0] == '.' && (name_len == 1 || (direntp->d_name[1] == '.' && name_len == 2)); if (!is_dot) { - entry = DirEntry_from_posix_info(&iterator->path, direntp->d_name, - name_len, direntp->d_ino + PyObject *module = PyType_GetModule(Py_TYPE(iterator)); + entry = DirEntry_from_posix_info(module, + &iterator->path, direntp->d_name, + name_len, direntp->d_ino #ifdef HAVE_DIRENT_D_TYPE - , direntp->d_type + , direntp->d_type #endif ); if (!entry) @@ -13480,6 +13511,8 @@ static PyType_Spec ScandirIteratorType_spec = { MODNAME ".ScandirIterator", sizeof(ScandirIterator), 0, + // bpo-40549: Py_TPFLAGS_BASETYPE should not be used, since + // PyType_GetModule(Py_TYPE(self)) doesn't work on a subclass instance. Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_FINALIZE, ScandirIteratorType_slots }; @@ -14632,19 +14665,6 @@ all_ins(PyObject *m) } -static struct PyModuleDef posixmodule = { - PyModuleDef_HEAD_INIT, - MODNAME, - posix__doc__, - sizeof(_posixstate), - posix_methods, - NULL, - _posix_traverse, - _posix_clear, - _posix_free, -}; - - static const char * const have_functions[] = { #ifdef HAVE_FACCESSAT @@ -14779,35 +14799,23 @@ static const char * const have_functions[] = { }; -PyMODINIT_FUNC -INITFUNC(void) +static int +posixmodule_exec(PyObject *m) { - PyObject *m, *v; - PyObject *list; - const char * const *trace; - - m = PyState_FindModule(&posixmodule); - if (m != NULL) { - Py_INCREF(m); - return m; - } - - m = PyModule_Create(&posixmodule); - if (m == NULL) - return NULL; + _posixstate *state = get_posix_state(m); /* Initialize environ dictionary */ - v = convertenviron(); + PyObject *v = convertenviron(); Py_XINCREF(v); if (v == NULL || PyModule_AddObject(m, "environ", v) != 0) - return NULL; + return -1; Py_DECREF(v); if (all_ins(m)) - return NULL; + return -1; if (setup_confname_tables(m)) - return NULL; + return -1; Py_INCREF(PyExc_OSError); PyModule_AddObject(m, "error", PyExc_OSError); @@ -14816,11 +14824,11 @@ INITFUNC(void) waitid_result_desc.name = MODNAME ".waitid_result"; PyObject *WaitidResultType = (PyObject *)PyStructSequence_NewType(&waitid_result_desc); if (WaitidResultType == NULL) { - return NULL; + return -1; } Py_INCREF(WaitidResultType); PyModule_AddObject(m, "waitid_result", WaitidResultType); - get_posix_state(m)->WaitidResultType = WaitidResultType; + state->WaitidResultType = WaitidResultType; #endif stat_result_desc.name = "os.stat_result"; /* see issue #19209 */ @@ -14829,22 +14837,22 @@ INITFUNC(void) stat_result_desc.fields[9].name = PyStructSequence_UnnamedField; PyObject *StatResultType = (PyObject *)PyStructSequence_NewType(&stat_result_desc); if (StatResultType == NULL) { - return NULL; + return -1; } Py_INCREF(StatResultType); PyModule_AddObject(m, "stat_result", StatResultType); - get_posix_state(m)->StatResultType = StatResultType; + state->StatResultType = StatResultType; structseq_new = ((PyTypeObject *)StatResultType)->tp_new; ((PyTypeObject *)StatResultType)->tp_new = statresult_new; statvfs_result_desc.name = "os.statvfs_result"; /* see issue #19209 */ PyObject *StatVFSResultType = (PyObject *)PyStructSequence_NewType(&statvfs_result_desc); if (StatVFSResultType == NULL) { - return NULL; + return -1; } Py_INCREF(StatVFSResultType); PyModule_AddObject(m, "statvfs_result", StatVFSResultType); - get_posix_state(m)->StatVFSResultType = StatVFSResultType; + state->StatVFSResultType = StatVFSResultType; #ifdef NEED_TICKS_PER_SECOND # if defined(HAVE_SYSCONF) && defined(_SC_CLK_TCK) ticks_per_second = sysconf(_SC_CLK_TCK); @@ -14859,54 +14867,54 @@ INITFUNC(void) sched_param_desc.name = MODNAME ".sched_param"; PyObject *SchedParamType = (PyObject *)PyStructSequence_NewType(&sched_param_desc); if (SchedParamType == NULL) { - return NULL; + return -1; } Py_INCREF(SchedParamType); PyModule_AddObject(m, "sched_param", SchedParamType); - get_posix_state(m)->SchedParamType = SchedParamType; + state->SchedParamType = SchedParamType; ((PyTypeObject *)SchedParamType)->tp_new = os_sched_param; #endif /* initialize TerminalSize_info */ PyObject *TerminalSizeType = (PyObject *)PyStructSequence_NewType(&TerminalSize_desc); if (TerminalSizeType == NULL) { - return NULL; + return -1; } Py_INCREF(TerminalSizeType); PyModule_AddObject(m, "terminal_size", TerminalSizeType); - get_posix_state(m)->TerminalSizeType = TerminalSizeType; + state->TerminalSizeType = TerminalSizeType; /* initialize scandir types */ - PyObject *ScandirIteratorType = PyType_FromSpec(&ScandirIteratorType_spec); + PyObject *ScandirIteratorType = PyType_FromModuleAndSpec(m, &ScandirIteratorType_spec, NULL); if (ScandirIteratorType == NULL) { - return NULL; + return -1; } - get_posix_state(m)->ScandirIteratorType = ScandirIteratorType; + state->ScandirIteratorType = ScandirIteratorType; - PyObject *DirEntryType = PyType_FromSpec(&DirEntryType_spec); + PyObject *DirEntryType = PyType_FromModuleAndSpec(m, &DirEntryType_spec, NULL); if (DirEntryType == NULL) { - return NULL; + return -1; } Py_INCREF(DirEntryType); PyModule_AddObject(m, "DirEntry", DirEntryType); - get_posix_state(m)->DirEntryType = DirEntryType; + state->DirEntryType = DirEntryType; times_result_desc.name = MODNAME ".times_result"; PyObject *TimesResultType = (PyObject *)PyStructSequence_NewType(×_result_desc); if (TimesResultType == NULL) { - return NULL; + return -1; } Py_INCREF(TimesResultType); PyModule_AddObject(m, "times_result", TimesResultType); - get_posix_state(m)->TimesResultType = TimesResultType; + state->TimesResultType = TimesResultType; PyTypeObject *UnameResultType = PyStructSequence_NewType(&uname_result_desc); if (UnameResultType == NULL) { - return NULL; + return -1; } Py_INCREF(UnameResultType); PyModule_AddObject(m, "uname_result", (PyObject *)UnameResultType); - get_posix_state(m)->UnameResultType = (PyObject *)UnameResultType; + state->UnameResultType = (PyObject *)UnameResultType; #ifdef __APPLE__ /* @@ -14922,7 +14930,7 @@ INITFUNC(void) #ifdef HAVE_FSTATVFS if (fstatvfs == NULL) { if (PyObject_DelAttrString(m, "fstatvfs") == -1) { - return NULL; + return -1; } } #endif /* HAVE_FSTATVFS */ @@ -14930,7 +14938,7 @@ INITFUNC(void) #ifdef HAVE_STATVFS if (statvfs == NULL) { if (PyObject_DelAttrString(m, "statvfs") == -1) { - return NULL; + return -1; } } #endif /* HAVE_STATVFS */ @@ -14938,7 +14946,7 @@ INITFUNC(void) # ifdef HAVE_LCHOWN if (lchown == NULL) { if (PyObject_DelAttrString(m, "lchown") == -1) { - return NULL; + return -1; } } #endif /* HAVE_LCHOWN */ @@ -14946,16 +14954,16 @@ INITFUNC(void) #endif /* __APPLE__ */ - if ((get_posix_state(m)->billion = PyLong_FromLong(1000000000)) == NULL) - return NULL; + if ((state->billion = PyLong_FromLong(1000000000)) == NULL) + return -1; #if defined(HAVE_WAIT3) || defined(HAVE_WAIT4) - get_posix_state(m)->struct_rusage = PyUnicode_InternFromString("struct_rusage"); - if (get_posix_state(m)->struct_rusage == NULL) - return NULL; + state->struct_rusage = PyUnicode_InternFromString("struct_rusage"); + if (state->struct_rusage == NULL) + return -1; #endif - get_posix_state(m)->st_mode = PyUnicode_InternFromString("st_mode"); - if (get_posix_state(m)->st_mode == NULL) - return NULL; + state->st_mode = PyUnicode_InternFromString("st_mode"); + if (state->st_mode == NULL) + return -1; /* suppress "function not used" warnings */ { @@ -14971,20 +14979,45 @@ INITFUNC(void) * provide list of locally available functions * so os.py can populate support_* lists */ - list = PyList_New(0); - if (!list) - return NULL; - for (trace = have_functions; *trace; trace++) { + PyObject *list = PyList_New(0); + if (!list) { + return -1; + } + for (const char * const *trace = have_functions; *trace; trace++) { PyObject *unicode = PyUnicode_DecodeASCII(*trace, strlen(*trace), NULL); if (!unicode) - return NULL; + return -1; if (PyList_Append(list, unicode)) - return NULL; + return -1; Py_DECREF(unicode); } PyModule_AddObject(m, "_have_functions", list); - return m; + return 0; +} + + +static PyModuleDef_Slot posixmodile_slots[] = { + {Py_mod_exec, posixmodule_exec}, + {0, NULL} +}; + +static struct PyModuleDef posixmodule = { + PyModuleDef_HEAD_INIT, + .m_name = MODNAME, + .m_doc = posix__doc__, + .m_size = sizeof(_posixstate), + .m_methods = posix_methods, + .m_slots = posixmodile_slots, + .m_traverse = _posix_traverse, + .m_clear = _posix_clear, + .m_free = _posix_free, +}; + +PyMODINIT_FUNC +INITFUNC(void) +{ + return PyModuleDef_Init(&posixmodule); } #ifdef __cplusplus diff --git a/Objects/abstract.c b/Objects/abstract.c index 6e390dd92c3aef..5b85b014bd22e3 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -900,7 +900,7 @@ binary_op(PyObject *v, PyObject *w, const int op_slot, const char *op_name) Py_DECREF(result); if (op_slot == NB_SLOT(nb_rshift) && - PyCFunction_Check(v) && + PyCFunction_CheckExact(v) && strcmp(((PyCFunctionObject *)v)->m_ml->ml_name, "print") == 0) { PyErr_Format(PyExc_TypeError, @@ -2287,7 +2287,7 @@ method_output_as_list(PyObject *o, _Py_Identifier *meth_id) PyErr_Format(PyExc_TypeError, "%.200s.%U() returned a non-iterable (type %.200s)", Py_TYPE(o)->tp_name, - meth_id->object, + _PyUnicode_FromId(meth_id), Py_TYPE(meth_output)->tp_name); } Py_DECREF(meth_output); diff --git a/Objects/descrobject.c b/Objects/descrobject.c index 572baa5e312d26..c29cf7a4c44640 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -127,7 +127,11 @@ classmethod_get(PyMethodDescrObject *descr, PyObject *obj, PyObject *type) ((PyTypeObject *)type)->tp_name); return NULL; } - return PyCFunction_NewEx(descr->d_method, type, NULL); + PyTypeObject *cls = NULL; + if (descr->d_method->ml_flags & METH_METHOD) { + cls = descr->d_common.d_type; + } + return PyCMethod_New(descr->d_method, type, NULL, cls); } static PyObject * @@ -137,7 +141,19 @@ method_get(PyMethodDescrObject *descr, PyObject *obj, PyObject *type) if (descr_check((PyDescrObject *)descr, obj, &res)) return res; - return PyCFunction_NewEx(descr->d_method, obj, NULL); + if (descr->d_method->ml_flags & METH_METHOD) { + if (PyType_Check(type)) { + return PyCMethod_New(descr->d_method, obj, NULL, descr->d_common.d_type); + } else { + PyErr_Format(PyExc_TypeError, + "descriptor '%V' needs a type, not '%s', as arg 2", + descr_name((PyDescrObject *)descr), + Py_TYPE(type)->tp_name); + return NULL; + } + } else { + return PyCFunction_NewEx(descr->d_method, obj, NULL); + } } static PyObject * @@ -335,6 +351,27 @@ method_vectorcall_VARARGS_KEYWORDS( return result; } +static PyObject * +method_vectorcall_FASTCALL_KEYWORDS_METHOD( + PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + PyThreadState *tstate = _PyThreadState_GET(); + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); + if (method_check_args(func, args, nargs, NULL)) { + return NULL; + } + NULL; + PyCMethod meth = (PyCMethod) method_enter_call(tstate, func); + if (meth == NULL) { + return NULL; + } + PyObject *result = meth(args[0], + ((PyMethodDescrObject *)func)->d_common.d_type, + args+1, nargs-1, kwnames); + Py_LeaveRecursiveCall(); + return result; +} + static PyObject * method_vectorcall_FASTCALL( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) @@ -868,7 +905,8 @@ PyDescr_NewMethod(PyTypeObject *type, PyMethodDef *method) { /* Figure out correct vectorcall function to use */ vectorcallfunc vectorcall; - switch (method->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS)) + switch (method->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | + METH_O | METH_KEYWORDS | METH_METHOD)) { case METH_VARARGS: vectorcall = method_vectorcall_VARARGS; @@ -888,6 +926,9 @@ PyDescr_NewMethod(PyTypeObject *type, PyMethodDef *method) case METH_O: vectorcall = method_vectorcall_O; break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + vectorcall = method_vectorcall_FASTCALL_KEYWORDS_METHOD; + break; default: PyErr_Format(PyExc_SystemError, "%s() method: bad call flags", method->ml_name); @@ -1077,6 +1118,13 @@ mappingproxy_copy(mappingproxyobject *pp, PyObject *Py_UNUSED(ignored)) return _PyObject_CallMethodIdNoArgs(pp->mapping, &PyId_copy); } +static PyObject * +mappingproxy_reversed(mappingproxyobject *pp, PyObject *Py_UNUSED(ignored)) +{ + _Py_IDENTIFIER(__reversed__); + return _PyObject_CallMethodIdNoArgs(pp->mapping, &PyId___reversed__); +} + /* WARNING: mappingproxy methods must not give access to the underlying mapping */ @@ -1094,6 +1142,8 @@ static PyMethodDef mappingproxy_methods[] = { PyDoc_STR("D.copy() -> a shallow copy of D")}, {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, + {"__reversed__", (PyCFunction)mappingproxy_reversed, METH_NOARGS, + PyDoc_STR("D.__reversed__() -> reverse iterator")}, {0} }; diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 9c35f3c3f14d01..809a5ed7787370 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -250,16 +250,26 @@ static uint64_t pydict_global_version = 0; #ifndef PyDict_MAXFREELIST #define PyDict_MAXFREELIST 80 #endif + +/* bpo-40521: dict free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyDict_MAXFREELIST +# define PyDict_MAXFREELIST 0 +#endif + +#if PyDict_MAXFREELIST > 0 static PyDictObject *free_list[PyDict_MAXFREELIST]; static int numfree = 0; static PyDictKeysObject *keys_free_list[PyDict_MAXFREELIST]; static int numfreekeys = 0; +#endif #include "clinic/dictobject.c.h" void _PyDict_ClearFreeList(void) { +#if PyDict_MAXFREELIST > 0 while (numfree) { PyDictObject *op = free_list[--numfree]; assert(PyDict_CheckExact(op)); @@ -268,14 +278,17 @@ _PyDict_ClearFreeList(void) while (numfreekeys) { PyObject_FREE(keys_free_list[--numfreekeys]); } +#endif } /* Print summary info about the state of the optimized allocator */ void _PyDict_DebugMallocStats(FILE *out) { +#if PyDict_MAXFREELIST > 0 _PyDebugAllocatorStats(out, "free PyDictObject", numfree, sizeof(PyDictObject)); +#endif } @@ -553,10 +566,13 @@ static PyDictKeysObject *new_keys_object(Py_ssize_t size) es = sizeof(Py_ssize_t); } +#if PyDict_MAXFREELIST > 0 if (size == PyDict_MINSIZE && numfreekeys > 0) { dk = keys_free_list[--numfreekeys]; } - else { + else +#endif + { dk = PyObject_MALLOC(sizeof(PyDictKeysObject) + es * size + sizeof(PyDictKeyEntry) * usable); @@ -587,10 +603,12 @@ free_keys_object(PyDictKeysObject *keys) Py_XDECREF(entries[i].me_key); Py_XDECREF(entries[i].me_value); } +#if PyDict_MAXFREELIST > 0 if (keys->dk_size == PyDict_MINSIZE && numfreekeys < PyDict_MAXFREELIST) { keys_free_list[numfreekeys++] = keys; return; } +#endif PyObject_FREE(keys); } @@ -603,13 +621,16 @@ new_dict(PyDictKeysObject *keys, PyObject **values) { PyDictObject *mp; assert(keys != NULL); +#if PyDict_MAXFREELIST > 0 if (numfree) { mp = free_list[--numfree]; assert (mp != NULL); assert (Py_IS_TYPE(mp, &PyDict_Type)); _Py_NewReference((PyObject *)mp); } - else { + else +#endif + { mp = PyObject_GC_New(PyDictObject, &PyDict_Type); if (mp == NULL) { dictkeys_decref(keys); @@ -1258,12 +1279,15 @@ dictresize(PyDictObject *mp, Py_ssize_t minsize) #ifdef Py_REF_DEBUG _Py_RefTotal--; #endif +#if PyDict_MAXFREELIST > 0 if (oldkeys->dk_size == PyDict_MINSIZE && numfreekeys < PyDict_MAXFREELIST) { keys_free_list[numfreekeys++] = oldkeys; } - else { + else +#endif + { PyObject_FREE(oldkeys); } } @@ -1468,7 +1492,9 @@ _PyDict_GetItemIdWithError(PyObject *dp, struct _Py_Identifier *key) kv = _PyUnicode_FromId(key); /* borrowed */ if (kv == NULL) return NULL; - return PyDict_GetItemWithError(dp, kv); + Py_hash_t hash = ((PyASCIIObject *) kv)->hash; + assert (hash != -1); /* interned strings have their hash value initialised */ + return _PyDict_GetItem_KnownHash(dp, kv, hash); } PyObject * @@ -2005,10 +2031,15 @@ dict_dealloc(PyDictObject *mp) assert(keys->dk_refcnt == 1); dictkeys_decref(keys); } - if (numfree < PyDict_MAXFREELIST && Py_IS_TYPE(mp, &PyDict_Type)) +#if PyDict_MAXFREELIST > 0 + if (numfree < PyDict_MAXFREELIST && Py_IS_TYPE(mp, &PyDict_Type)) { free_list[numfree++] = mp; + } else +#endif + { Py_TYPE(mp)->tp_free((PyObject *)mp); + } Py_TRASHCAN_END } diff --git a/Objects/exceptions.c b/Objects/exceptions.c index ca917b436c4bb7..db5e3da12b00f3 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -304,22 +304,33 @@ static PyGetSetDef BaseException_getset[] = { }; +static inline PyBaseExceptionObject* +_PyBaseExceptionObject_cast(PyObject *exc) +{ + assert(PyExceptionInstance_Check(exc)); + return (PyBaseExceptionObject *)exc; +} + + PyObject * -PyException_GetTraceback(PyObject *self) { - PyBaseExceptionObject *base_self = (PyBaseExceptionObject *)self; +PyException_GetTraceback(PyObject *self) +{ + PyBaseExceptionObject *base_self = _PyBaseExceptionObject_cast(self); Py_XINCREF(base_self->traceback); return base_self->traceback; } int -PyException_SetTraceback(PyObject *self, PyObject *tb) { - return BaseException_set_tb((PyBaseExceptionObject *)self, tb, NULL); +PyException_SetTraceback(PyObject *self, PyObject *tb) +{ + return BaseException_set_tb(_PyBaseExceptionObject_cast(self), tb, NULL); } PyObject * -PyException_GetCause(PyObject *self) { - PyObject *cause = ((PyBaseExceptionObject *)self)->cause; +PyException_GetCause(PyObject *self) +{ + PyObject *cause = _PyBaseExceptionObject_cast(self)->cause; Py_XINCREF(cause); return cause; } @@ -328,13 +339,15 @@ PyException_GetCause(PyObject *self) { void PyException_SetCause(PyObject *self, PyObject *cause) { - ((PyBaseExceptionObject *)self)->suppress_context = 1; - Py_XSETREF(((PyBaseExceptionObject *)self)->cause, cause); + PyBaseExceptionObject *base_self = _PyBaseExceptionObject_cast(self); + base_self->suppress_context = 1; + Py_XSETREF(base_self->cause, cause); } PyObject * -PyException_GetContext(PyObject *self) { - PyObject *context = ((PyBaseExceptionObject *)self)->context; +PyException_GetContext(PyObject *self) +{ + PyObject *context = _PyBaseExceptionObject_cast(self)->context; Py_XINCREF(context); return context; } @@ -343,7 +356,7 @@ PyException_GetContext(PyObject *self) { void PyException_SetContext(PyObject *self, PyObject *context) { - Py_XSETREF(((PyBaseExceptionObject *)self)->context, context); + Py_XSETREF(_PyBaseExceptionObject_cast(self)->context, context); } #undef PyExceptionClass_Name @@ -351,6 +364,7 @@ PyException_SetContext(PyObject *self, PyObject *context) const char * PyExceptionClass_Name(PyObject *ob) { + assert(PyExceptionClass_Check(ob)); return ((PyTypeObject*)ob)->tp_name; } diff --git a/Objects/floatobject.c b/Objects/floatobject.c index faa02f2f05795c..9f5014092cf20a 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -862,27 +862,7 @@ static PyObject * float___trunc___impl(PyObject *self) /*[clinic end generated code: output=dd3e289dd4c6b538 input=591b9ba0d650fdff]*/ { - double x = PyFloat_AsDouble(self); - double wholepart; /* integral portion of x, rounded toward 0 */ - - (void)modf(x, &wholepart); - /* Try to get out cheap if this fits in a Python int. The attempt - * to cast to long must be protected, as C doesn't define what - * happens if the double is too big to fit in a long. Some rare - * systems raise an exception then (RISCOS was mentioned as one, - * and someone using a non-default option on Sun also bumped into - * that). Note that checking for >= and <= LONG_{MIN,MAX} would - * still be vulnerable: if a long has more bits of precision than - * a double, casting MIN/MAX to double may yield an approximation, - * and if that's rounded up, then, e.g., wholepart=LONG_MAX+1 would - * yield true from the C expression wholepart<=LONG_MAX, despite - * that wholepart is actually greater than LONG_MAX. - */ - if (LONG_MIN < wholepart && wholepart < LONG_MAX) { - const long aslong = (long)wholepart; - return PyLong_FromLong(aslong); - } - return PyLong_FromDouble(wholepart); + return PyLong_FromDouble(PyFloat_AS_DOUBLE(self)); } /*[clinic input] diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 4f5054d32bb011..af32276c98b24a 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -556,11 +556,19 @@ static PyGetSetDef frame_getsetlist[] = { free_list. Else programs creating lots of cyclic trash involving frames could provoke free_list into growing without bound. */ +/* max value for numfree */ +#define PyFrame_MAXFREELIST 200 + +/* bpo-40521: frame free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyFrame_MAXFREELIST +# define PyFrame_MAXFREELIST 0 +#endif +#if PyFrame_MAXFREELIST > 0 static PyFrameObject *free_list = NULL; static int numfree = 0; /* number of frames currently in free_list */ -/* max value for numfree */ -#define PyFrame_MAXFREELIST 200 +#endif static void _Py_HOT_FUNCTION frame_dealloc(PyFrameObject *f) @@ -590,15 +598,19 @@ frame_dealloc(PyFrameObject *f) Py_CLEAR(f->f_trace); co = f->f_code; - if (co->co_zombieframe == NULL) + if (co->co_zombieframe == NULL) { co->co_zombieframe = f; + } +#if PyFrame_MAXFREELIST > 0 else if (numfree < PyFrame_MAXFREELIST) { ++numfree; f->f_back = free_list; free_list = f; } - else +#endif + else { PyObject_GC_Del(f); + } Py_DECREF(co); Py_TRASHCAN_SAFE_END(f) @@ -759,98 +771,127 @@ PyTypeObject PyFrame_Type = { _Py_IDENTIFIER(__builtins__); -PyFrameObject* _Py_HOT_FUNCTION -_PyFrame_New_NoTrack(PyThreadState *tstate, PyCodeObject *code, - PyObject *globals, PyObject *locals) +static inline PyFrameObject* +frame_alloc(PyCodeObject *code) { - PyFrameObject *back = tstate->frame; PyFrameObject *f; - PyObject *builtins; - Py_ssize_t i; -#ifdef Py_DEBUG - if (code == NULL || globals == NULL || !PyDict_Check(globals) || - (locals != NULL && !PyMapping_Check(locals))) { - PyErr_BadInternalCall(); - return NULL; + f = code->co_zombieframe; + if (f != NULL) { + code->co_zombieframe = NULL; + _Py_NewReference((PyObject *)f); + assert(f->f_code == code); + return f; } + + Py_ssize_t ncells = PyTuple_GET_SIZE(code->co_cellvars); + Py_ssize_t nfrees = PyTuple_GET_SIZE(code->co_freevars); + Py_ssize_t extras = code->co_stacksize + code->co_nlocals + ncells + nfrees; +#if PyFrame_MAXFREELIST > 0 + if (free_list == NULL) #endif - if (back == NULL || back->f_globals != globals) { - builtins = _PyDict_GetItemIdWithError(globals, &PyId___builtins__); - if (builtins) { - if (PyModule_Check(builtins)) { - builtins = PyModule_GetDict(builtins); - assert(builtins != NULL); - } + { + f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, extras); + if (f == NULL) { + return NULL; } - if (builtins == NULL) { - if (PyErr_Occurred()) { + } +#if PyFrame_MAXFREELIST > 0 + else { + assert(numfree > 0); + --numfree; + f = free_list; + free_list = free_list->f_back; + if (Py_SIZE(f) < extras) { + PyFrameObject *new_f = PyObject_GC_Resize(PyFrameObject, f, extras); + if (new_f == NULL) { + PyObject_GC_Del(f); return NULL; } - /* No builtins! Make up a minimal one - Give them 'None', at least. */ - builtins = PyDict_New(); - if (builtins == NULL || - PyDict_SetItemString( - builtins, "None", Py_None) < 0) - return NULL; + f = new_f; } - else - Py_INCREF(builtins); + _Py_NewReference((PyObject *)f); + } +#endif + f->f_code = code; + extras = code->co_nlocals + ncells + nfrees; + f->f_valuestack = f->f_localsplus + extras; + for (Py_ssize_t i=0; if_localsplus[i] = NULL; } - else { + f->f_locals = NULL; + f->f_trace = NULL; + return f; +} + + +static inline PyObject * +frame_get_builtins(PyFrameObject *back, PyObject *globals) +{ + PyObject *builtins; + + if (back != NULL && back->f_globals == globals) { /* If we share the globals, we share the builtins. Save a lookup and a call. */ builtins = back->f_builtins; assert(builtins != NULL); Py_INCREF(builtins); + return builtins; } - if (code->co_zombieframe != NULL) { - f = code->co_zombieframe; - code->co_zombieframe = NULL; - _Py_NewReference((PyObject *)f); - assert(f->f_code == code); + + builtins = _PyDict_GetItemIdWithError(globals, &PyId___builtins__); + if (builtins != NULL && PyModule_Check(builtins)) { + builtins = PyModule_GetDict(builtins); + assert(builtins != NULL); } - else { - Py_ssize_t extras, ncells, nfrees; - ncells = PyTuple_GET_SIZE(code->co_cellvars); - nfrees = PyTuple_GET_SIZE(code->co_freevars); - extras = code->co_stacksize + code->co_nlocals + ncells + - nfrees; - if (free_list == NULL) { - f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, - extras); - if (f == NULL) { - Py_DECREF(builtins); - return NULL; - } - } - else { - assert(numfree > 0); - --numfree; - f = free_list; - free_list = free_list->f_back; - if (Py_SIZE(f) < extras) { - PyFrameObject *new_f = PyObject_GC_Resize(PyFrameObject, f, extras); - if (new_f == NULL) { - PyObject_GC_Del(f); - Py_DECREF(builtins); - return NULL; - } - f = new_f; - } - _Py_NewReference((PyObject *)f); - } + if (builtins != NULL) { + Py_INCREF(builtins); + return builtins; + } + + if (PyErr_Occurred()) { + return NULL; + } + + /* No builtins! Make up a minimal one. + Give them 'None', at least. */ + builtins = PyDict_New(); + if (builtins == NULL) { + return NULL; + } + if (PyDict_SetItemString(builtins, "None", Py_None) < 0) { + Py_DECREF(builtins); + return NULL; + } + return builtins; +} - f->f_code = code; - extras = code->co_nlocals + ncells + nfrees; - f->f_valuestack = f->f_localsplus + extras; - for (i=0; if_localsplus[i] = NULL; - f->f_locals = NULL; - f->f_trace = NULL; + +PyFrameObject* _Py_HOT_FUNCTION +_PyFrame_New_NoTrack(PyThreadState *tstate, PyCodeObject *code, + PyObject *globals, PyObject *locals) +{ +#ifdef Py_DEBUG + if (code == NULL || globals == NULL || !PyDict_Check(globals) || + (locals != NULL && !PyMapping_Check(locals))) { + PyErr_BadInternalCall(); + return NULL; + } +#endif + + PyFrameObject *back = tstate->frame; + PyObject *builtins = frame_get_builtins(back, globals); + if (builtins == NULL) { + return NULL; } + + PyFrameObject *f = frame_alloc(code); + if (f == NULL) { + Py_DECREF(builtins); + return NULL; + } + f->f_stacktop = f->f_valuestack; f->f_builtins = builtins; Py_XINCREF(back); @@ -1142,6 +1183,7 @@ PyFrame_LocalsToFast(PyFrameObject *f, int clear) void _PyFrame_ClearFreeList(void) { +#if PyFrame_MAXFREELIST > 0 while (free_list != NULL) { PyFrameObject *f = free_list; free_list = free_list->f_back; @@ -1149,6 +1191,7 @@ _PyFrame_ClearFreeList(void) --numfree; } assert(numfree == 0); +#endif } void @@ -1161,9 +1204,11 @@ _PyFrame_Fini(void) void _PyFrame_DebugMallocStats(FILE *out) { +#if PyFrame_MAXFREELIST > 0 _PyDebugAllocatorStats(out, "free PyFrameObject", numfree, sizeof(PyFrameObject)); +#endif } diff --git a/Objects/genobject.c b/Objects/genobject.c index b27fa929a26258..fb01e581f8ae15 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -217,6 +217,18 @@ gen_send_ex(PyGenObject *gen, PyObject *arg, int exc, int closing) assert(f->f_back == NULL); f->f_back = tstate->frame; + _PyErr_StackItem *gi_exc_state = &gen->gi_exc_state; + if (exc && gi_exc_state->exc_type != NULL && + gi_exc_state->exc_type != Py_None) + { + Py_INCREF(gi_exc_state->exc_type); + Py_XINCREF(gi_exc_state->exc_value); + Py_XINCREF(gi_exc_state->exc_traceback); + _PyErr_ChainExceptions(gi_exc_state->exc_type, + gi_exc_state->exc_value, + gi_exc_state->exc_traceback); + } + gen->gi_running = 1; gen->gi_exc_state.previous_item = tstate->exc_info; tstate->exc_info = &gen->gi_exc_state; @@ -512,16 +524,6 @@ _gen_throw(PyGenObject *gen, int close_on_genexit, } PyErr_Restore(typ, val, tb); - /* XXX It seems like we shouldn't have to check not equal to Py_None - here because exc_type should only ever be a class. But not including - this check was causing crashes on certain tests e.g. on Fedora. */ - if (gen->gi_exc_state.exc_type && gen->gi_exc_state.exc_type != Py_None) { - Py_INCREF(gen->gi_exc_state.exc_type); - Py_XINCREF(gen->gi_exc_state.exc_value); - Py_XINCREF(gen->gi_exc_state.exc_traceback); - _PyErr_ChainExceptions(gen->gi_exc_state.exc_type, - gen->gi_exc_state.exc_value, gen->gi_exc_state.exc_traceback); - } return gen_send_ex(gen, Py_None, 1, 0); failed_throw: diff --git a/Objects/listobject.c b/Objects/listobject.c index 904bea317c9da8..37fadca129ac02 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -98,8 +98,15 @@ list_preallocate_exact(PyListObject *self, Py_ssize_t size) /* Empty list reuse scheme to save calls to malloc and free */ #ifndef PyList_MAXFREELIST -#define PyList_MAXFREELIST 80 +# define PyList_MAXFREELIST 80 #endif + +/* bpo-40521: list free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyList_MAXFREELIST +# define PyList_MAXFREELIST 0 +#endif + static PyListObject *free_list[PyList_MAXFREELIST]; static int numfree = 0; diff --git a/Objects/longobject.c b/Objects/longobject.c index 11fc75b918f77f..0ff0e80cd42696 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -416,6 +416,21 @@ PyLong_FromSize_t(size_t ival) PyObject * PyLong_FromDouble(double dval) { + /* Try to get out cheap if this fits in a long. When a finite value of real + * floating type is converted to an integer type, the value is truncated + * toward zero. If the value of the integral part cannot be represented by + * the integer type, the behavior is undefined. Thus, we must check that + * value is in range (LONG_MIN - 1, LONG_MAX + 1). If a long has more bits + * of precision than a double, casting LONG_MIN - 1 to double may yield an + * approximation, but LONG_MAX + 1 is a power of two and can be represented + * as double exactly (assuming FLT_RADIX is 2 or 16), so for simplicity + * check against [-(LONG_MAX + 1), LONG_MAX + 1). + */ + const double int_max = (unsigned long)LONG_MAX + 1; + if (-int_max < dval && dval < int_max) { + return PyLong_FromLong((long)dval); + } + PyLongObject *v; double frac; int i, ndig, expo, neg; @@ -435,8 +450,7 @@ PyLong_FromDouble(double dval) dval = -dval; } frac = frexp(dval, &expo); /* dval = frac*2**expo; 0.0 <= frac < 1.0 */ - if (expo <= 0) - return PyLong_FromLong(0L); + assert(expo > 0); ndig = (expo-1) / PyLong_SHIFT + 1; /* Number of 'digits' in result */ v = _PyLong_New(ndig); if (v == NULL) diff --git a/Objects/methodobject.c b/Objects/methodobject.c index 20eba6fa8643bf..5659f2143d1823 100644 --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -10,12 +10,16 @@ /* undefine macro trampoline to PyCFunction_NewEx */ #undef PyCFunction_New +/* undefine macro trampoline to PyCMethod_New */ +#undef PyCFunction_NewEx /* Forward declarations */ static PyObject * cfunction_vectorcall_FASTCALL( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * cfunction_vectorcall_FASTCALL_KEYWORDS( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); +static PyObject * cfunction_vectorcall_FASTCALL_KEYWORDS_METHOD( + PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * cfunction_vectorcall_NOARGS( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames); static PyObject * cfunction_vectorcall_O( @@ -32,10 +36,17 @@ PyCFunction_New(PyMethodDef *ml, PyObject *self) PyObject * PyCFunction_NewEx(PyMethodDef *ml, PyObject *self, PyObject *module) +{ + return PyCMethod_New(ml, self, module, NULL); +} + +PyObject * +PyCMethod_New(PyMethodDef *ml, PyObject *self, PyObject *module, PyTypeObject *cls) { /* Figure out correct vectorcall function to use */ vectorcallfunc vectorcall; - switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | METH_O | METH_KEYWORDS)) + switch (ml->ml_flags & (METH_VARARGS | METH_FASTCALL | METH_NOARGS | + METH_O | METH_KEYWORDS | METH_METHOD)) { case METH_VARARGS: case METH_VARARGS | METH_KEYWORDS: @@ -55,17 +66,44 @@ PyCFunction_NewEx(PyMethodDef *ml, PyObject *self, PyObject *module) case METH_O: vectorcall = cfunction_vectorcall_O; break; + case METH_METHOD | METH_FASTCALL | METH_KEYWORDS: + vectorcall = cfunction_vectorcall_FASTCALL_KEYWORDS_METHOD; + break; default: PyErr_Format(PyExc_SystemError, "%s() method: bad call flags", ml->ml_name); return NULL; } - PyCFunctionObject *op = - PyObject_GC_New(PyCFunctionObject, &PyCFunction_Type); - if (op == NULL) { - return NULL; + PyCFunctionObject *op = NULL; + + if (ml->ml_flags & METH_METHOD) { + if (!cls) { + PyErr_SetString(PyExc_SystemError, + "attempting to create PyCMethod with a METH_METHOD " + "flag but no class"); + return NULL; + } + PyCMethodObject *om = PyObject_GC_New(PyCMethodObject, &PyCMethod_Type); + if (om == NULL) { + return NULL; + } + Py_INCREF(cls); + om->mm_class = cls; + op = (PyCFunctionObject *)om; + } else { + if (cls) { + PyErr_SetString(PyExc_SystemError, + "attempting to create PyCFunction with class " + "but no METH_METHOD flag"); + return NULL; + } + op = PyObject_GC_New(PyCFunctionObject, &PyCFunction_Type); + if (op == NULL) { + return NULL; + } } + op->m_weakreflist = NULL; op->m_ml = ml; Py_XINCREF(self); @@ -107,6 +145,16 @@ PyCFunction_GetFlags(PyObject *op) return PyCFunction_GET_FLAGS(op); } +PyTypeObject * +PyCMethod_GetClass(PyObject *op) +{ + if (!PyCFunction_Check(op)) { + PyErr_BadInternalCall(); + return NULL; + } + return PyCFunction_GET_CLASS(op); +} + /* Methods (the standard built-in methods, that is) */ static void @@ -118,6 +166,7 @@ meth_dealloc(PyCFunctionObject *m) } Py_XDECREF(m->m_self); Py_XDECREF(m->m_module); + Py_XDECREF(PyCFunction_GET_CLASS(m)); PyObject_GC_Del(m); } @@ -196,6 +245,7 @@ meth_traverse(PyCFunctionObject *m, visitproc visit, void *arg) { Py_VISIT(m->m_self); Py_VISIT(m->m_module); + Py_VISIT(PyCFunction_GET_CLASS(m)); return 0; } @@ -314,6 +364,13 @@ PyTypeObject PyCFunction_Type = { 0, /* tp_dict */ }; +PyTypeObject PyCMethod_Type = { + PyVarObject_HEAD_INIT(&PyType_Type, 0) + .tp_name = "builtin_method", + .tp_basicsize = sizeof(PyCMethodObject), + .tp_base = &PyCFunction_Type, +}; + /* Vectorcall functions for each of the PyCFunction calling conventions, * except for METH_VARARGS (possibly combined with METH_KEYWORDS) which * doesn't use vectorcall. @@ -385,6 +442,22 @@ cfunction_vectorcall_FASTCALL_KEYWORDS( return result; } +static PyObject * +cfunction_vectorcall_FASTCALL_KEYWORDS_METHOD( + PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) +{ + PyThreadState *tstate = _PyThreadState_GET(); + PyTypeObject *cls = PyCFunction_GET_CLASS(func); + Py_ssize_t nargs = PyVectorcall_NARGS(nargsf); + PyCMethod meth = (PyCMethod)cfunction_enter_call(tstate, func); + if (meth == NULL) { + return NULL; + } + PyObject *result = meth(PyCFunction_GET_SELF(func), cls, args, nargs, kwnames); + _Py_LeaveRecursiveCall(tstate); + return result; +} + static PyObject * cfunction_vectorcall_NOARGS( PyObject *func, PyObject *const *args, size_t nargsf, PyObject *kwnames) diff --git a/Objects/object.c b/Objects/object.c index 75ea92ad9005c9..623ee52eb1e22d 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -1789,6 +1789,7 @@ _PyTypes_Init(void) INIT_TYPE(&PyCode_Type, "code"); INIT_TYPE(&PyFrame_Type, "frame"); INIT_TYPE(&PyCFunction_Type, "builtin function"); + INIT_TYPE(&PyCMethod_Type, "builtin method"); INIT_TYPE(&PyMethod_Type, "method"); INIT_TYPE(&PyFunction_Type, "function"); INIT_TYPE(&PyDictProxy_Type, "dict proxy"); diff --git a/Objects/setobject.c b/Objects/setobject.c index 0e4e45f60a9ccc..76b1944db45588 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -57,77 +57,43 @@ set_lookkey(PySetObject *so, PyObject *key, Py_hash_t hash) { setentry *table; setentry *entry; - size_t perturb; + size_t perturb = hash; size_t mask = so->mask; size_t i = (size_t)hash & mask; /* Unsigned for defined overflow behavior */ - size_t j; + int probes; int cmp; - entry = &so->table[i]; - if (entry->key == NULL) - return entry; - - perturb = hash; - while (1) { - if (entry->hash == hash) { - PyObject *startkey = entry->key; - /* startkey cannot be a dummy because the dummy hash field is -1 */ - assert(startkey != dummy); - if (startkey == key) - return entry; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - return entry; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp < 0) /* unlikely */ - return NULL; - if (table != so->table || entry->key != startkey) /* unlikely */ - return set_lookkey(so, key, hash); - if (cmp > 0) /* likely */ + entry = &so->table[i]; + probes = (i + LINEAR_PROBES <= mask) ? LINEAR_PROBES: 0; + do { + if (entry->hash == 0 && entry->key == NULL) return entry; - mask = so->mask; /* help avoid a register spill */ - } - - if (i + LINEAR_PROBES <= mask) { - for (j = 0 ; j < LINEAR_PROBES ; j++) { - entry++; - if (entry->hash == 0 && entry->key == NULL) + if (entry->hash == hash) { + PyObject *startkey = entry->key; + assert(startkey != dummy); + if (startkey == key) return entry; - if (entry->hash == hash) { - PyObject *startkey = entry->key; - assert(startkey != dummy); - if (startkey == key) - return entry; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - return entry; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp < 0) - return NULL; - if (table != so->table || entry->key != startkey) - return set_lookkey(so, key, hash); - if (cmp > 0) - return entry; - mask = so->mask; - } + if (PyUnicode_CheckExact(startkey) + && PyUnicode_CheckExact(key) + && _PyUnicode_EQ(startkey, key)) + return entry; + table = so->table; + Py_INCREF(startkey); + cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); + Py_DECREF(startkey); + if (cmp < 0) + return NULL; + if (table != so->table || entry->key != startkey) + return set_lookkey(so, key, hash); + if (cmp > 0) + return entry; + mask = so->mask; } - } - + entry++; + } while (probes--); perturb >>= PERTURB_SHIFT; i = (i * 5 + 1 + perturb) & mask; - - entry = &so->table[i]; - if (entry->key == NULL) - return entry; } } @@ -141,7 +107,7 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) size_t perturb; size_t mask; size_t i; /* Unsigned for defined overflow behavior */ - size_t j; + int probes; int cmp; /* Pre-increment is necessary to prevent arbitrary code in the rich @@ -152,75 +118,39 @@ set_add_entry(PySetObject *so, PyObject *key, Py_hash_t hash) mask = so->mask; i = (size_t)hash & mask; - - entry = &so->table[i]; - if (entry->key == NULL) - goto found_unused; - perturb = hash; while (1) { - if (entry->hash == hash) { - PyObject *startkey = entry->key; - /* startkey cannot be a dummy because the dummy hash field is -1 */ - assert(startkey != dummy); - if (startkey == key) - goto found_active; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - goto found_active; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp > 0) /* likely */ - goto found_active; - if (cmp < 0) - goto comparison_error; - /* Continuing the search from the current entry only makes - sense if the table and entry are unchanged; otherwise, - we have to restart from the beginning */ - if (table != so->table || entry->key != startkey) - goto restart; - mask = so->mask; /* help avoid a register spill */ - } - - if (i + LINEAR_PROBES <= mask) { - for (j = 0 ; j < LINEAR_PROBES ; j++) { - entry++; - if (entry->hash == 0 && entry->key == NULL) - goto found_unused; - if (entry->hash == hash) { - PyObject *startkey = entry->key; - assert(startkey != dummy); - if (startkey == key) - goto found_active; - if (PyUnicode_CheckExact(startkey) - && PyUnicode_CheckExact(key) - && _PyUnicode_EQ(startkey, key)) - goto found_active; - table = so->table; - Py_INCREF(startkey); - cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); - Py_DECREF(startkey); - if (cmp > 0) - goto found_active; - if (cmp < 0) - goto comparison_error; - if (table != so->table || entry->key != startkey) - goto restart; - mask = so->mask; - } + entry = &so->table[i]; + probes = (i + LINEAR_PROBES <= mask) ? LINEAR_PROBES: 0; + do { + if (entry->hash == 0 && entry->key == NULL) + goto found_unused; + if (entry->hash == hash) { + PyObject *startkey = entry->key; + assert(startkey != dummy); + if (startkey == key) + goto found_active; + if (PyUnicode_CheckExact(startkey) + && PyUnicode_CheckExact(key) + && _PyUnicode_EQ(startkey, key)) + goto found_active; + table = so->table; + Py_INCREF(startkey); + cmp = PyObject_RichCompareBool(startkey, key, Py_EQ); + Py_DECREF(startkey); + if (cmp > 0) + goto found_active; + if (cmp < 0) + goto comparison_error; + if (table != so->table || entry->key != startkey) + goto restart; + mask = so->mask; } - } - + entry++; + } while (probes--); perturb >>= PERTURB_SHIFT; i = (i * 5 + 1 + perturb) & mask; - - entry = &so->table[i]; - if (entry->key == NULL) - goto found_unused; } found_unused: diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index f8648d24f1c876..c0b59c009a2e94 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -22,6 +22,12 @@ class tuple "PyTupleObject *" "&PyTuple_Type" #define PyTuple_MAXFREELIST 2000 /* Maximum number of tuples of each size to save */ #endif +/* bpo-40521: tuple free lists are shared by all interpreters. */ +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# undef PyTuple_MAXSAVESIZE +# define PyTuple_MAXSAVESIZE 0 +#endif + #if PyTuple_MAXSAVESIZE > 0 /* Entries 1 up to PyTuple_MAXSAVESIZE are free lists, entry 0 is the empty tuple () of which at most one instance will be allocated. @@ -248,7 +254,9 @@ tupledealloc(PyTupleObject *op) #endif } Py_TYPE(op)->tp_free((PyObject *)op); +#if PyTuple_MAXSAVESIZE > 0 done: +#endif Py_TRASHCAN_END } diff --git a/Objects/typeobject.c b/Objects/typeobject.c index db0ae970090ba9..243f8811b62571 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -19,6 +19,12 @@ class object "PyObject *" "&PyBaseObject_Type" #include "clinic/typeobject.c.h" +/* bpo-40521: Type method cache is shared by all subinterpreters */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define MCACHE +#endif + +#ifdef MCACHE /* Support type attribute cache */ /* The cache can keep references to the names alive for longer than @@ -47,6 +53,7 @@ struct method_cache_entry { static struct method_cache_entry method_cache[1 << MCACHE_SIZE_EXP]; static unsigned int next_version_tag = 0; +#endif #define MCACHE_STATS 0 @@ -56,6 +63,11 @@ static size_t method_cache_misses = 0; static size_t method_cache_collisions = 0; #endif +/* bpo-40521: Interned strings are shared by all subinterpreters */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define INTERN_NAME_STRINGS +#endif + /* alphabetical order */ _Py_IDENTIFIER(__abstractmethods__); _Py_IDENTIFIER(__class__); @@ -211,6 +223,7 @@ _PyType_GetTextSignatureFromInternalDoc(const char *name, const char *internal_d unsigned int PyType_ClearCache(void) { +#ifdef MCACHE Py_ssize_t i; unsigned int cur_version_tag = next_version_tag - 1; @@ -235,6 +248,9 @@ PyType_ClearCache(void) /* mark all version tags as invalid */ PyType_Modified(&PyBaseObject_Type); return cur_version_tag; +#else + return 0; +#endif } void @@ -345,6 +361,7 @@ type_mro_modified(PyTypeObject *type, PyObject *bases) { Py_TPFLAGS_VALID_VERSION_TAG); } +#ifdef MCACHE static int assign_version_tag(PyTypeObject *type) { @@ -391,6 +408,7 @@ assign_version_tag(PyTypeObject *type) type->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG; return 1; } +#endif static PyMemberDef type_members[] = { @@ -1501,7 +1519,7 @@ lookup_method(PyObject *self, _Py_Identifier *attrid, int *unbound) { PyObject *res = lookup_maybe_method(self, attrid, unbound); if (res == NULL && !PyErr_Occurred()) { - PyErr_SetObject(PyExc_AttributeError, attrid->object); + PyErr_SetObject(PyExc_AttributeError, _PyUnicode_FromId(attrid)); } return res; } @@ -2690,6 +2708,9 @@ type_new(PyTypeObject *metatype, PyObject *args, PyObject *kwds) if (qualname != NULL && _PyDict_DelItemId(dict, &PyId___qualname__) < 0) goto error; + /* Set ht_module */ + et->ht_module = NULL; + /* Set tp_doc to a copy of dict['__doc__'], if the latter is there and is a string. The __doc__ accessor will first look for tp_doc; if that fails, it will still look into __dict__. @@ -2921,16 +2942,22 @@ PyType_FromSpec_tp_traverse(PyObject *self, visitproc visit, void *arg) PyObject * PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) +{ + return PyType_FromModuleAndSpec(NULL, spec, bases); +} + +PyObject * +PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) { PyHeapTypeObject *res; PyObject *modname; PyTypeObject *type, *base; const PyType_Slot *slot; - Py_ssize_t nmembers, weaklistoffset, dictoffset; + Py_ssize_t nmembers, weaklistoffset, dictoffset, vectorcalloffset; char *res_start; - nmembers = weaklistoffset = dictoffset = 0; + nmembers = weaklistoffset = dictoffset = vectorcalloffset = 0; for (slot = spec->slots; slot->slot; slot++) { if (slot->slot == Py_tp_members) { nmembers = 0; @@ -2948,6 +2975,12 @@ PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) assert(memb->flags == READONLY); dictoffset = memb->offset; } + if (strcmp(memb->name, "__vectorcalloffset__") == 0) { + // The PyMemberDef must be a Py_ssize_t and readonly + assert(memb->type == T_PYSSIZET); + assert(memb->flags == READONLY); + vectorcalloffset = memb->offset; + } } } } @@ -2980,6 +3013,9 @@ PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) Py_INCREF(res->ht_qualname); type->tp_name = spec->name; + Py_XINCREF(module); + res->ht_module = module; + /* Adjust for empty tuple bases */ if (!bases) { base = &PyBaseObject_Type; @@ -3093,6 +3129,10 @@ PyType_FromSpecWithBases(PyType_Spec *spec, PyObject *bases) type->tp_dealloc = subtype_dealloc; } + if (vectorcalloffset) { + type->tp_vectorcall_offset = vectorcalloffset; + } + if (PyType_Ready(type) < 0) goto fail; @@ -3158,6 +3198,40 @@ PyType_GetSlot(PyTypeObject *type, int slot) return *(void**)(((char*)type) + slotoffsets[slot]); } +PyObject * +PyType_GetModule(PyTypeObject *type) +{ + assert(PyType_Check(type)); + if (!_PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) { + PyErr_Format( + PyExc_TypeError, + "PyType_GetModule: Type '%s' is not a heap type", + type->tp_name); + return NULL; + } + + PyHeapTypeObject* et = (PyHeapTypeObject*)type; + if (!et->ht_module) { + PyErr_Format( + PyExc_TypeError, + "PyType_GetModule: Type '%s' has no associated module", + type->tp_name); + return NULL; + } + return et->ht_module; + +} + +void * +PyType_GetModuleState(PyTypeObject *type) +{ + PyObject *m = PyType_GetModule(type); + if (m == NULL) { + return NULL; + } + return PyModule_GetState(m); +} + /* Internal API to look for a name through the MRO, bypassing the method cache. This returns a borrowed reference, and might set an exception. 'error' is set to: -1: error with exception; 1: error without exception; 0: ok */ @@ -3227,12 +3301,12 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) { PyObject *res; int error; - unsigned int h; +#ifdef MCACHE if (MCACHE_CACHEABLE_NAME(name) && _PyType_HasFeature(type, Py_TPFLAGS_VALID_VERSION_TAG)) { /* fast path */ - h = MCACHE_HASH_METHOD(type, name); + unsigned int h = MCACHE_HASH_METHOD(type, name); if (method_cache[h].version == type->tp_version_tag && method_cache[h].name == name) { #if MCACHE_STATS @@ -3241,6 +3315,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) return method_cache[h].value; } } +#endif /* We may end up clearing live exceptions below, so make sure it's ours. */ assert(!PyErr_Occurred()); @@ -3262,8 +3337,9 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) return NULL; } +#ifdef MCACHE if (MCACHE_CACHEABLE_NAME(name) && assign_version_tag(type)) { - h = MCACHE_HASH_METHOD(type, name); + unsigned int h = MCACHE_HASH_METHOD(type, name); method_cache[h].version = type->tp_version_tag; method_cache[h].value = res; /* borrowed */ Py_INCREF(name); @@ -3276,6 +3352,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name) #endif Py_SETREF(method_cache[h].name, name); } +#endif return res; } @@ -3418,6 +3495,7 @@ type_setattro(PyTypeObject *type, PyObject *name, PyObject *value) if (name == NULL) return -1; } +#ifdef INTERN_NAME_STRINGS if (!PyUnicode_CHECK_INTERNED(name)) { PyUnicode_InternInPlace(&name); if (!PyUnicode_CHECK_INTERNED(name)) { @@ -3427,6 +3505,7 @@ type_setattro(PyTypeObject *type, PyObject *name, PyObject *value) return -1; } } +#endif } else { /* Will fail in _PyObject_GenericSetAttrWithDict. */ @@ -3480,8 +3559,10 @@ type_dealloc(PyTypeObject *type) Py_XDECREF(et->ht_name); Py_XDECREF(et->ht_qualname); Py_XDECREF(et->ht_slots); - if (et->ht_cached_keys) + if (et->ht_cached_keys) { _PyDictKeys_DecRef(et->ht_cached_keys); + } + Py_XDECREF(et->ht_module); Py_TYPE(type)->tp_free((PyObject *)type); } @@ -3671,6 +3752,7 @@ type_traverse(PyTypeObject *type, visitproc visit, void *arg) Py_VISIT(type->tp_mro); Py_VISIT(type->tp_bases); Py_VISIT(type->tp_base); + Py_VISIT(((PyHeapTypeObject *)type)->ht_module); /* There's no need to visit type->tp_subclasses or ((PyHeapTypeObject *)type)->ht_slots, because they can't be involved @@ -3692,10 +3774,13 @@ type_clear(PyTypeObject *type) the dict, so that other objects caught in a reference cycle don't start calling destroyed methods. - Otherwise, the only field we need to clear is tp_mro, which is + Otherwise, the we need to clear tp_mro, which is part of a hard cycle (its first element is the class itself) that won't be broken otherwise (it's a tuple and tuples don't have a - tp_clear handler). None of the other fields need to be + tp_clear handler). + We also need to clear ht_module, if present: the module usually holds a + reference to its class. None of the other fields need to be + cleared, and here's why: tp_cache: @@ -3720,8 +3805,11 @@ type_clear(PyTypeObject *type) ((PyHeapTypeObject *)type)->ht_cached_keys = NULL; _PyDictKeys_DecRef(cached_keys); } - if (type->tp_dict) + if (type->tp_dict) { PyDict_Clear(type->tp_dict); + } + Py_CLEAR(((PyHeapTypeObject *)type)->ht_module); + Py_CLEAR(type->tp_mro); return 0; @@ -6776,12 +6864,12 @@ slot_tp_setattro(PyObject *self, PyObject *name, PyObject *value) } static _Py_Identifier name_op[] = { - {0, "__lt__", 0}, - {0, "__le__", 0}, - {0, "__eq__", 0}, - {0, "__ne__", 0}, - {0, "__gt__", 0}, - {0, "__ge__", 0} + _Py_static_string_init("__lt__"), + _Py_static_string_init("__le__"), + _Py_static_string_init("__eq__"), + _Py_static_string_init("__ne__"), + _Py_static_string_init("__gt__"), + _Py_static_string_init("__ge__"), }; static PyObject * @@ -7531,10 +7619,17 @@ _PyTypes_InitSlotDefs(void) for (slotdef *p = slotdefs; p->name; p++) { /* Slots must be ordered by their offset in the PyHeapTypeObject. */ assert(!p[1].name || p->offset <= p[1].offset); +#ifdef INTERN_NAME_STRINGS p->name_strobj = PyUnicode_InternFromString(p->name); if (!p->name_strobj || !PyUnicode_CHECK_INTERNED(p->name_strobj)) { return _PyStatus_NO_MEMORY(); } +#else + p->name_strobj = PyUnicode_FromString(p->name); + if (!p->name_strobj) { + return _PyStatus_NO_MEMORY(); + } +#endif } slotdefs_initialized = 1; return _PyStatus_OK(); @@ -7559,7 +7654,9 @@ update_slot(PyTypeObject *type, PyObject *name) int offset; assert(PyUnicode_CheckExact(name)); +#ifdef INTERN_NAME_STRINGS assert(PyUnicode_CHECK_INTERNED(name)); +#endif assert(slotdefs_initialized); pp = ptrs; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index aba7407533c4ed..ea46a44bf5faac 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -198,6 +198,11 @@ extern "C" { # define OVERALLOCATE_FACTOR 4 #endif +/* bpo-40521: Interned strings are shared by all interpreters. */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define INTERNED_STRINGS +#endif + /* This dictionary holds all interned unicode strings. Note that references to strings in this dictionary are *not* counted in the string's ob_refcnt. When the interned string reaches a refcnt of 0 the string deallocation @@ -206,7 +211,9 @@ extern "C" { Another way to look at this is that to say that the actual reference count of a string is: s->ob_refcnt + (s->state ? 2 : 0) */ +#ifdef INTERNED_STRINGS static PyObject *interned = NULL; +#endif /* The empty Unicode object is shared to improve performance. */ static PyObject *unicode_empty = NULL; @@ -281,9 +288,16 @@ unicode_decode_utf8(const char *s, Py_ssize_t size, /* List of static strings. */ static _Py_Identifier *static_strings = NULL; +/* bpo-40521: Latin1 singletons are shared by all interpreters. */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +# define LATIN1_SINGLETONS +#endif + +#ifdef LATIN1_SINGLETONS /* Single character Unicode strings in the Latin-1 range are being shared as well. */ static PyObject *unicode_latin1[256] = {NULL}; +#endif /* Fast detection of the most frequent whitespace characters */ const unsigned char _Py_ascii_whitespace[] = { @@ -449,7 +463,7 @@ unicode_check_encoding_errors(const char *encoding, const char *errors) /* Avoid calling _PyCodec_Lookup() and PyCodec_LookupError() before the codec registry is ready: before_PyUnicode_InitEncodings() is called. */ - if (!interp->fs_codec.encoding) { + if (!interp->unicode.fs_codec.encoding) { return 0; } @@ -662,6 +676,7 @@ unicode_result_ready(PyObject *unicode) return unicode_empty; } +#ifdef LATIN1_SINGLETONS if (length == 1) { const void *data = PyUnicode_DATA(unicode); int kind = PyUnicode_KIND(unicode); @@ -683,6 +698,7 @@ unicode_result_ready(PyObject *unicode) } } } +#endif assert(_PyUnicode_CheckConsistency(unicode, 1)); return unicode; @@ -1913,10 +1929,12 @@ unicode_dealloc(PyObject *unicode) case SSTATE_INTERNED_MORTAL: /* revive dead object temporarily for DelItem */ Py_SET_REFCNT(unicode, 3); +#ifdef INTERNED_STRINGS if (PyDict_DelItem(interned, unicode) != 0) { _PyErr_WriteUnraisableMsg("deletion of interned string failed", NULL); } +#endif break; case SSTATE_INTERNED_IMMORTAL: @@ -1944,15 +1962,18 @@ unicode_dealloc(PyObject *unicode) static int unicode_is_singleton(PyObject *unicode) { - PyASCIIObject *ascii = (PyASCIIObject *)unicode; - if (unicode == unicode_empty) + if (unicode == unicode_empty) { return 1; + } +#ifdef LATIN1_SINGLETONS + PyASCIIObject *ascii = (PyASCIIObject *)unicode; if (ascii->state.kind != PyUnicode_WCHAR_KIND && ascii->length == 1) { Py_UCS4 ch = PyUnicode_READ_CHAR(unicode, 0); if (ch < 256 && unicode_latin1[ch] == unicode) return 1; } +#endif return 0; } #endif @@ -2094,16 +2115,28 @@ unicode_write_cstr(PyObject *unicode, Py_ssize_t index, static PyObject* get_latin1_char(unsigned char ch) { - PyObject *unicode = unicode_latin1[ch]; + PyObject *unicode; + +#ifdef LATIN1_SINGLETONS + unicode = unicode_latin1[ch]; + if (unicode) { + Py_INCREF(unicode); + return unicode; + } +#endif + + unicode = PyUnicode_New(1, ch); if (!unicode) { - unicode = PyUnicode_New(1, ch); - if (!unicode) - return NULL; - PyUnicode_1BYTE_DATA(unicode)[0] = ch; - assert(_PyUnicode_CheckConsistency(unicode, 1)); - unicode_latin1[ch] = unicode; + return NULL; } + + PyUnicode_1BYTE_DATA(unicode)[0] = ch; + assert(_PyUnicode_CheckConsistency(unicode, 1)); + +#ifdef LATIN1_SINGLETONS Py_INCREF(unicode); + unicode_latin1[ch] = unicode; +#endif return unicode; } @@ -2256,8 +2289,8 @@ _PyUnicode_FromId(_Py_Identifier *id) return id->object; } -void -_PyUnicode_ClearStaticStrings() +static void +unicode_clear_static_strings(void) { _Py_Identifier *tmp, *s = static_strings; while (s) { @@ -3617,16 +3650,17 @@ PyObject * PyUnicode_EncodeFSDefault(PyObject *unicode) { PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->fs_codec.utf8) { + struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec; + if (fs_codec->utf8) { return unicode_encode_utf8(unicode, - interp->fs_codec.error_handler, - interp->fs_codec.errors); + fs_codec->error_handler, + fs_codec->errors); } #ifndef _Py_FORCE_UTF8_FS_ENCODING - else if (interp->fs_codec.encoding) { + else if (fs_codec->encoding) { return PyUnicode_AsEncodedString(unicode, - interp->fs_codec.encoding, - interp->fs_codec.errors); + fs_codec->encoding, + fs_codec->errors); } #endif else { @@ -3853,17 +3887,18 @@ PyObject* PyUnicode_DecodeFSDefaultAndSize(const char *s, Py_ssize_t size) { PyInterpreterState *interp = _PyInterpreterState_GET(); - if (interp->fs_codec.utf8) { + struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec; + if (fs_codec->utf8) { return unicode_decode_utf8(s, size, - interp->fs_codec.error_handler, - interp->fs_codec.errors, + fs_codec->error_handler, + fs_codec->errors, NULL); } #ifndef _Py_FORCE_UTF8_FS_ENCODING - else if (interp->fs_codec.encoding) { + else if (fs_codec->encoding) { return PyUnicode_Decode(s, size, - interp->fs_codec.encoding, - interp->fs_codec.errors); + fs_codec->encoding, + fs_codec->errors); } #endif else { @@ -11270,7 +11305,6 @@ int _PyUnicode_EqualToASCIIId(PyObject *left, _Py_Identifier *right) { PyObject *right_uni; - Py_hash_t hash; assert(_PyUnicode_CHECK(left)); assert(right->string); @@ -11302,10 +11336,12 @@ _PyUnicode_EqualToASCIIId(PyObject *left, _Py_Identifier *right) if (PyUnicode_CHECK_INTERNED(left)) return 0; +#ifdef INTERNED_STRINGS assert(_PyUnicode_HASH(right_uni) != -1); - hash = _PyUnicode_HASH(left); + Py_hash_t hash = _PyUnicode_HASH(left); if (hash != -1 && hash != _PyUnicode_HASH(right_uni)) return 0; +#endif return unicode_compare_eq(left, right_uni); } @@ -12275,31 +12311,22 @@ unicode_isnumeric_impl(PyObject *self) Py_RETURN_TRUE; } -int -PyUnicode_IsIdentifier(PyObject *self) +Py_ssize_t +_PyUnicode_ScanIdentifier(PyObject *self) { Py_ssize_t i; - int ready = PyUnicode_IS_READY(self); + if (PyUnicode_READY(self) == -1) + return -1; - Py_ssize_t len = ready ? PyUnicode_GET_LENGTH(self) : PyUnicode_GET_SIZE(self); + Py_ssize_t len = PyUnicode_GET_LENGTH(self); if (len == 0) { /* an empty string is not a valid identifier */ return 0; } - int kind = 0; - const void *data = NULL; - const wchar_t *wstr = NULL; - Py_UCS4 ch; - if (ready) { - kind = PyUnicode_KIND(self); - data = PyUnicode_DATA(self); - ch = PyUnicode_READ(kind, data, 0); - } - else { - wstr = _PyUnicode_WSTR(self); - ch = wstr[0]; - } + int kind = PyUnicode_KIND(self); + const void *data = PyUnicode_DATA(self); + Py_UCS4 ch = PyUnicode_READ(kind, data, 0); /* PEP 3131 says that the first character must be in XID_Start and subsequent characters in XID_Continue, and for the ASCII range, the 2.x rules apply (i.e @@ -12313,17 +12340,62 @@ PyUnicode_IsIdentifier(PyObject *self) } for (i = 1; i < len; i++) { - if (ready) { - ch = PyUnicode_READ(kind, data, i); + ch = PyUnicode_READ(kind, data, i); + if (!_PyUnicode_IsXidContinue(ch)) { + return i; } - else { - ch = wstr[i]; + } + return i; +} + +int +PyUnicode_IsIdentifier(PyObject *self) +{ + if (PyUnicode_IS_READY(self)) { + Py_ssize_t i = _PyUnicode_ScanIdentifier(self); + Py_ssize_t len = PyUnicode_GET_LENGTH(self); + /* an empty string is not a valid identifier */ + return len && i == len; + } + else { + Py_ssize_t i = 0, len = PyUnicode_GET_SIZE(self); + if (len == 0) { + /* an empty string is not a valid identifier */ + return 0; } - if (!_PyUnicode_IsXidContinue(ch)) { + + const wchar_t *wstr = _PyUnicode_WSTR(self); + Py_UCS4 ch = wstr[i++]; +#if SIZEOF_WCHAR_T == 2 + if (Py_UNICODE_IS_HIGH_SURROGATE(ch) + && i < len + && Py_UNICODE_IS_LOW_SURROGATE(wstr[i])) + { + ch = Py_UNICODE_JOIN_SURROGATES(ch, wstr[i]); + i++; + } +#endif + if (!_PyUnicode_IsXidStart(ch) && ch != 0x5F /* LOW LINE */) { return 0; } + + while (i < len) { + ch = wstr[i++]; +#if SIZEOF_WCHAR_T == 2 + if (Py_UNICODE_IS_HIGH_SURROGATE(ch) + && i < len + && Py_UNICODE_IS_LOW_SURROGATE(wstr[i])) + { + ch = Py_UNICODE_JOIN_SURROGATES(ch, wstr[i]); + i++; + } +#endif + if (!_PyUnicode_IsXidContinue(ch)) { + return 0; + } + } + return 1; } - return 1; } /*[clinic input] @@ -15487,20 +15559,26 @@ void PyUnicode_InternInPlace(PyObject **p) { PyObject *s = *p; - PyObject *t; #ifdef Py_DEBUG assert(s != NULL); assert(_PyUnicode_CHECK(s)); #else - if (s == NULL || !PyUnicode_Check(s)) + if (s == NULL || !PyUnicode_Check(s)) { return; + } #endif + /* If it's a subclass, we don't really know what putting it in the interned dict might do. */ - if (!PyUnicode_CheckExact(s)) + if (!PyUnicode_CheckExact(s)) { return; - if (PyUnicode_CHECK_INTERNED(s)) + } + + if (PyUnicode_CHECK_INTERNED(s)) { return; + } + +#ifdef INTERNED_STRINGS if (interned == NULL) { interned = PyDict_New(); if (interned == NULL) { @@ -15508,22 +15586,28 @@ PyUnicode_InternInPlace(PyObject **p) return; } } + + PyObject *t; Py_ALLOW_RECURSION t = PyDict_SetDefault(interned, s, s); Py_END_ALLOW_RECURSION + if (t == NULL) { PyErr_Clear(); return; } + if (t != s) { Py_INCREF(t); Py_SETREF(*p, t); return; } + /* The two references in interned are not counted by refcnt. The deallocator will take care of this */ Py_SET_REFCNT(s, Py_REFCNT(s) - 2); _PyUnicode_STATE(s).interned = SSTATE_INTERNED_MORTAL; +#endif } void @@ -15989,16 +16073,17 @@ init_fs_codec(PyInterpreterState *interp) return -1; } - PyMem_RawFree(interp->fs_codec.encoding); - interp->fs_codec.encoding = encoding; + struct _Py_unicode_fs_codec *fs_codec = &interp->unicode.fs_codec; + PyMem_RawFree(fs_codec->encoding); + fs_codec->encoding = encoding; /* encoding has been normalized by init_fs_encoding() */ - interp->fs_codec.utf8 = (strcmp(encoding, "utf-8") == 0); - PyMem_RawFree(interp->fs_codec.errors); - interp->fs_codec.errors = errors; - interp->fs_codec.error_handler = error_handler; + fs_codec->utf8 = (strcmp(encoding, "utf-8") == 0); + PyMem_RawFree(fs_codec->errors); + fs_codec->errors = errors; + fs_codec->error_handler = error_handler; #ifdef _Py_FORCE_UTF8_FS_ENCODING - assert(interp->fs_codec.utf8 == 1); + assert(fs_codec->utf8 == 1); #endif /* At this point, PyUnicode_EncodeFSDefault() and @@ -16007,8 +16092,8 @@ init_fs_codec(PyInterpreterState *interp) /* Set Py_FileSystemDefaultEncoding and Py_FileSystemDefaultEncodeErrors global configuration variables. */ - if (_Py_SetFileSystemEncoding(interp->fs_codec.encoding, - interp->fs_codec.errors) < 0) { + if (_Py_SetFileSystemEncoding(fs_codec->encoding, + fs_codec->errors) < 0) { PyErr_NoMemory(); return -1; } @@ -16051,15 +16136,14 @@ _PyUnicode_InitEncodings(PyThreadState *tstate) static void -_PyUnicode_FiniEncodings(PyThreadState *tstate) +_PyUnicode_FiniEncodings(struct _Py_unicode_fs_codec *fs_codec) { - PyInterpreterState *interp = tstate->interp; - PyMem_RawFree(interp->fs_codec.encoding); - interp->fs_codec.encoding = NULL; - interp->fs_codec.utf8 = 0; - PyMem_RawFree(interp->fs_codec.errors); - interp->fs_codec.errors = NULL; - interp->fs_codec.error_handler = _Py_ERROR_UNKNOWN; + PyMem_RawFree(fs_codec->encoding); + fs_codec->encoding = NULL; + fs_codec->utf8 = 0; + PyMem_RawFree(fs_codec->errors); + fs_codec->errors = NULL; + fs_codec->error_handler = _Py_ERROR_UNKNOWN; } @@ -16109,13 +16193,15 @@ _PyUnicode_Fini(PyThreadState *tstate) Py_CLEAR(unicode_empty); +#ifdef LATIN1_SINGLETONS for (Py_ssize_t i = 0; i < 256; i++) { Py_CLEAR(unicode_latin1[i]); } - _PyUnicode_ClearStaticStrings(); +#endif + unicode_clear_static_strings(); } - _PyUnicode_FiniEncodings(tstate); + _PyUnicode_FiniEncodings(&tstate->interp->unicode.fs_codec); } diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c index 9640d93aaf2daf..313e8abab5a25f 100644 --- a/Objects/weakrefobject.c +++ b/Objects/weakrefobject.c @@ -665,10 +665,12 @@ proxy_iternext(PyWeakReference *proxy) WRAP_METHOD(proxy_bytes, __bytes__) +WRAP_METHOD(proxy_reversed, __reversed__) static PyMethodDef proxy_methods[] = { {"__bytes__", proxy_bytes, METH_NOARGS}, + {"__reversed__", proxy_reversed, METH_NOARGS}, {NULL, NULL} }; @@ -730,6 +732,21 @@ static PyMappingMethods proxy_as_mapping = { }; +static Py_hash_t +proxy_hash(PyObject *self) +{ + PyWeakReference *proxy = (PyWeakReference *)self; + if (!proxy_checkref(proxy)) { + return -1; + } + PyObject *obj = PyWeakref_GET_OBJECT(proxy); + Py_INCREF(obj); + Py_hash_t res = PyObject_Hash(obj); + Py_DECREF(obj); + return res; +} + + PyTypeObject _PyWeakref_ProxyType = { PyVarObject_HEAD_INIT(&PyType_Type, 0) @@ -746,7 +763,7 @@ _PyWeakref_ProxyType = { &proxy_as_number, /* tp_as_number */ &proxy_as_sequence, /* tp_as_sequence */ &proxy_as_mapping, /* tp_as_mapping */ - 0, /* tp_hash */ + proxy_hash, /* tp_hash */ 0, /* tp_call */ proxy_str, /* tp_str */ proxy_getattr, /* tp_getattro */ diff --git a/PCbuild/_uuid.vcxproj b/PCbuild/_uuid.vcxproj new file mode 100644 index 00000000000000..2437b7eb2d9399 --- /dev/null +++ b/PCbuild/_uuid.vcxproj @@ -0,0 +1,115 @@ + + + + + Debug + ARM + + + Debug + ARM64 + + + Debug + Win32 + + + Debug + x64 + + + PGInstrument + ARM + + + PGInstrument + ARM64 + + + PGInstrument + Win32 + + + PGInstrument + x64 + + + PGUpdate + ARM + + + PGUpdate + ARM64 + + + PGUpdate + Win32 + + + PGUpdate + x64 + + + Release + ARM + + + Release + ARM64 + + + Release + Win32 + + + Release + x64 + + + + {CB435430-EBB1-478B-8F4E-C256F6838F55} + _uuid + Win32Proj + false + + + + + DynamicLibrary + NotSet + + + + .pyd + + + + + + + + + + <_ProjectFileVersion>10.0.30319.1 + + + + rpcrt4.lib;%(AdditionalDependencies) + + + + + + + + + + + {cf7ac3d1-e2df-41d2-bea6-1e2556cdea26} + false + + + + + + \ No newline at end of file diff --git a/PCbuild/_uuid.vcxproj.filters b/PCbuild/_uuid.vcxproj.filters new file mode 100644 index 00000000000000..17949292314345 --- /dev/null +++ b/PCbuild/_uuid.vcxproj.filters @@ -0,0 +1,14 @@ + + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + + + + Source Files + + + \ No newline at end of file diff --git a/PCbuild/pcbuild.proj b/PCbuild/pcbuild.proj index 22a9eed18d42bb..9c4d352b434488 100644 --- a/PCbuild/pcbuild.proj +++ b/PCbuild/pcbuild.proj @@ -51,7 +51,7 @@ - + diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln index 6dc0139bc42af4..6d4c9506e5ec1a 100644 --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -1,6 +1,6 @@ Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio 15 -VisualStudioVersion = 15.0.27130.2024 +# Visual Studio Version 16 +VisualStudioVersion = 16.0.30028.174 MinimumVisualStudioVersion = 10.0.40219.1 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{553EC33E-9816-4996-A660-5D6186A0B0B3}" ProjectSection(SolutionItems) = preProject @@ -103,6 +103,8 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "venvwlauncher", "venvwlaunc EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pythonw_uwp", "pythonw_uwp.vcxproj", "{AB603547-1E2A-45B3-9E09-B04596006393}" EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_uuid", "_uuid.vcxproj", "{CB435430-EBB1-478B-8F4E-C256F6838F55}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|ARM = Debug|ARM @@ -1440,6 +1442,38 @@ Global {AB603547-1E2A-45B3-9E09-B04596006393}.Release|Win32.Build.0 = Release|Win32 {AB603547-1E2A-45B3-9E09-B04596006393}.Release|x64.ActiveCfg = Release|x64 {AB603547-1E2A-45B3-9E09-B04596006393}.Release|x64.Build.0 = Release|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM.ActiveCfg = Debug|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM.Build.0 = Debug|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM64.ActiveCfg = Debug|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|ARM64.Build.0 = Debug|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|Win32.ActiveCfg = Debug|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|Win32.Build.0 = Debug|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|x64.ActiveCfg = Debug|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Debug|x64.Build.0 = Debug|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM.ActiveCfg = PGInstrument|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM.Build.0 = PGInstrument|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM64.ActiveCfg = PGInstrument|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|ARM64.Build.0 = PGInstrument|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|Win32.ActiveCfg = PGInstrument|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|Win32.Build.0 = PGInstrument|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|x64.ActiveCfg = PGInstrument|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGInstrument|x64.Build.0 = PGInstrument|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM.ActiveCfg = PGUpdate|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM.Build.0 = PGUpdate|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM64.ActiveCfg = PGUpdate|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|ARM64.Build.0 = PGUpdate|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|Win32.ActiveCfg = PGUpdate|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|Win32.Build.0 = PGUpdate|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|x64.ActiveCfg = PGUpdate|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.PGUpdate|x64.Build.0 = PGUpdate|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM.ActiveCfg = Release|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM.Build.0 = Release|ARM + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM64.ActiveCfg = Release|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|ARM64.Build.0 = Release|ARM64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|Win32.ActiveCfg = Release|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|Win32.Build.0 = Release|Win32 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|x64.ActiveCfg = Release|x64 + {CB435430-EBB1-478B-8F4E-C256F6838F55}.Release|x64.Build.0 = Release|x64 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/PCbuild/python_uwp.vcxproj b/PCbuild/python_uwp.vcxproj index 5ff120a0da331a..fb27e9e71222e3 100644 --- a/PCbuild/python_uwp.vcxproj +++ b/PCbuild/python_uwp.vcxproj @@ -95,6 +95,7 @@ windowsapp.lib;%(AdditionalDependencies) Console + 2000000 diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 21b51bf5e6ddcf..b6b0cf3e991ba7 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -138,6 +138,7 @@ + @@ -180,6 +181,7 @@ + @@ -334,7 +336,6 @@ - @@ -461,6 +462,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index f5c76fa34eb946..10dfffba6113e5 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -111,6 +111,9 @@ Include + + Include + Include @@ -234,6 +237,9 @@ Include + + Include + Include @@ -1031,6 +1037,9 @@ Python + + Modules + Python @@ -1139,9 +1148,6 @@ Modules - - Modules - PC diff --git a/PCbuild/pythonw_uwp.vcxproj b/PCbuild/pythonw_uwp.vcxproj index 828d0d1ccac217..e21e46a1b722ed 100644 --- a/PCbuild/pythonw_uwp.vcxproj +++ b/PCbuild/pythonw_uwp.vcxproj @@ -95,6 +95,7 @@ windowsapp.lib;%(AdditionalDependencies) Windows + 2000000 diff --git a/Parser/Python.asdl b/Parser/Python.asdl index f789f1da456e91..889712b4b3d36e 100644 --- a/Parser/Python.asdl +++ b/Parser/Python.asdl @@ -1,5 +1,5 @@ --- ASDL's 5 builtin types are: --- identifier, int, string, object, constant +-- ASDL's 4 builtin types are: +-- identifier, int, string, constant module Python { diff --git a/Parser/asdl.py b/Parser/asdl.py index 5416377100c64a..7f509488b96ed3 100644 --- a/Parser/asdl.py +++ b/Parser/asdl.py @@ -33,8 +33,7 @@ # See the EBNF at the top of the file to understand the logical connection # between the various node types. -builtin_types = {'identifier', 'string', 'bytes', 'int', 'object', 'singleton', - 'constant'} +builtin_types = {'identifier', 'string', 'int', 'constant'} class AST: def __repr__(self): diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index c98f949042f306..59bf03ef8df3d3 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -323,7 +323,7 @@ def emit(s, depth=0, reflow=True): if not opt and argtype != "int": emit("if (!%s) {" % argname, 1) emit("PyErr_SetString(PyExc_ValueError,", 2) - msg = "field %s is required for %s" % (argname, name) + msg = "field '%s' is required for %s" % (argname, name) emit(' "%s");' % msg, 2, reflow=False) emit('return NULL;', 2) @@ -853,11 +853,9 @@ def visitModule(self, mod): Py_INCREF((PyObject*)o); return (PyObject*)o; } -#define ast2obj_singleton ast2obj_object #define ast2obj_constant ast2obj_object #define ast2obj_identifier ast2obj_object #define ast2obj_string ast2obj_object -#define ast2obj_bytes ast2obj_object static PyObject* ast2obj_int(long b) { @@ -1147,12 +1145,8 @@ def simpleSum(self, sum, name): self.emit("case %s:" % t.name, 2) self.emit("Py_INCREF(astmodulestate_global->%s_singleton);" % t.name, 3) self.emit("return astmodulestate_global->%s_singleton;" % t.name, 3) - self.emit("default:", 2) - self.emit('/* should never happen, but just in case ... */', 3) - code = "PyErr_Format(PyExc_SystemError, \"unknown %s found\");" % name - self.emit(code, 3, reflow=False) - self.emit("return NULL;", 3) self.emit("}", 1) + self.emit("Py_UNREACHABLE();", 1); self.emit("}", 0) def visitProduct(self, prod, name): diff --git a/Parser/node.c b/Parser/node.c index f1b70e0f6815be..8789e01e9b848c 100644 --- a/Parser/node.c +++ b/Parser/node.c @@ -14,6 +14,7 @@ PyNode_New(int type) n->n_str = NULL; n->n_lineno = 0; n->n_end_lineno = 0; + n->n_col_offset = 0; n->n_end_col_offset = -1; n->n_nchildren = 0; n->n_child = NULL; diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 55605d5770f1e6..f4c5692212768d 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -199,172 +199,176 @@ static KeywordToken *reserved_keywords[] = { #define star_targets_seq_type 1128 #define star_target_type 1129 #define star_atom_type 1130 -#define inside_paren_ann_assign_target_type 1131 -#define ann_assign_subscript_attribute_target_type 1132 +#define single_target_type 1131 +#define single_subscript_attribute_target_type 1132 #define del_targets_type 1133 #define del_target_type 1134 #define del_t_atom_type 1135 -#define targets_type 1136 -#define target_type 1137 -#define t_primary_type 1138 // Left-recursive -#define t_lookahead_type 1139 -#define t_atom_type 1140 -#define incorrect_arguments_type 1141 -#define invalid_named_expression_type 1142 -#define invalid_assignment_type 1143 -#define invalid_block_type 1144 -#define invalid_comprehension_type 1145 -#define invalid_parameters_type 1146 -#define invalid_star_etc_type 1147 -#define invalid_lambda_star_etc_type 1148 -#define invalid_double_type_comments_type 1149 -#define _loop0_1_type 1150 -#define _loop0_2_type 1151 -#define _loop0_4_type 1152 -#define _gather_3_type 1153 -#define _loop0_6_type 1154 -#define _gather_5_type 1155 -#define _loop0_8_type 1156 -#define _gather_7_type 1157 -#define _loop0_10_type 1158 -#define _gather_9_type 1159 -#define _loop1_11_type 1160 -#define _loop0_13_type 1161 -#define _gather_12_type 1162 -#define _tmp_14_type 1163 -#define _tmp_15_type 1164 -#define _tmp_16_type 1165 -#define _tmp_17_type 1166 -#define _tmp_18_type 1167 -#define _tmp_19_type 1168 -#define _tmp_20_type 1169 -#define _tmp_21_type 1170 -#define _loop1_22_type 1171 -#define _tmp_23_type 1172 -#define _tmp_24_type 1173 -#define _loop0_26_type 1174 -#define _gather_25_type 1175 -#define _loop0_28_type 1176 -#define _gather_27_type 1177 -#define _tmp_29_type 1178 -#define _loop0_30_type 1179 -#define _loop1_31_type 1180 -#define _loop0_33_type 1181 -#define _gather_32_type 1182 -#define _tmp_34_type 1183 -#define _loop0_36_type 1184 -#define _gather_35_type 1185 -#define _tmp_37_type 1186 -#define _loop0_39_type 1187 -#define _gather_38_type 1188 -#define _loop0_41_type 1189 -#define _gather_40_type 1190 -#define _loop0_43_type 1191 -#define _gather_42_type 1192 -#define _loop0_45_type 1193 -#define _gather_44_type 1194 -#define _tmp_46_type 1195 -#define _loop1_47_type 1196 -#define _tmp_48_type 1197 -#define _tmp_49_type 1198 -#define _tmp_50_type 1199 -#define _tmp_51_type 1200 -#define _tmp_52_type 1201 -#define _loop0_53_type 1202 -#define _loop0_54_type 1203 -#define _loop0_55_type 1204 -#define _loop1_56_type 1205 -#define _loop0_57_type 1206 -#define _loop1_58_type 1207 -#define _loop1_59_type 1208 -#define _loop1_60_type 1209 -#define _loop0_61_type 1210 -#define _loop1_62_type 1211 -#define _loop0_63_type 1212 -#define _loop1_64_type 1213 -#define _loop0_65_type 1214 -#define _loop1_66_type 1215 -#define _loop1_67_type 1216 -#define _tmp_68_type 1217 -#define _loop0_70_type 1218 -#define _gather_69_type 1219 -#define _loop1_71_type 1220 -#define _loop0_73_type 1221 -#define _gather_72_type 1222 -#define _loop1_74_type 1223 -#define _loop0_75_type 1224 -#define _loop0_76_type 1225 -#define _loop0_77_type 1226 -#define _loop1_78_type 1227 -#define _loop0_79_type 1228 -#define _loop1_80_type 1229 -#define _loop1_81_type 1230 -#define _loop1_82_type 1231 -#define _loop0_83_type 1232 -#define _loop1_84_type 1233 -#define _loop0_85_type 1234 -#define _loop1_86_type 1235 -#define _loop0_87_type 1236 -#define _loop1_88_type 1237 -#define _loop1_89_type 1238 -#define _loop1_90_type 1239 -#define _loop1_91_type 1240 -#define _tmp_92_type 1241 -#define _loop0_94_type 1242 -#define _gather_93_type 1243 -#define _tmp_95_type 1244 -#define _tmp_96_type 1245 -#define _tmp_97_type 1246 -#define _tmp_98_type 1247 -#define _loop1_99_type 1248 -#define _tmp_100_type 1249 -#define _tmp_101_type 1250 -#define _loop0_103_type 1251 -#define _gather_102_type 1252 -#define _loop1_104_type 1253 -#define _loop0_105_type 1254 -#define _loop0_106_type 1255 -#define _tmp_107_type 1256 -#define _tmp_108_type 1257 -#define _loop0_110_type 1258 -#define _gather_109_type 1259 -#define _loop0_112_type 1260 -#define _gather_111_type 1261 -#define _loop0_114_type 1262 -#define _gather_113_type 1263 -#define _loop0_116_type 1264 -#define _gather_115_type 1265 -#define _loop0_117_type 1266 -#define _loop0_119_type 1267 -#define _gather_118_type 1268 -#define _tmp_120_type 1269 -#define _loop0_122_type 1270 -#define _gather_121_type 1271 -#define _loop0_124_type 1272 -#define _gather_123_type 1273 -#define _tmp_125_type 1274 -#define _tmp_126_type 1275 -#define _tmp_127_type 1276 -#define _tmp_128_type 1277 -#define _tmp_129_type 1278 -#define _loop0_130_type 1279 -#define _tmp_131_type 1280 -#define _tmp_132_type 1281 -#define _tmp_133_type 1282 -#define _tmp_134_type 1283 -#define _tmp_135_type 1284 -#define _tmp_136_type 1285 -#define _tmp_137_type 1286 -#define _tmp_138_type 1287 -#define _tmp_139_type 1288 -#define _tmp_140_type 1289 -#define _tmp_141_type 1290 -#define _tmp_142_type 1291 -#define _tmp_143_type 1292 -#define _tmp_144_type 1293 -#define _loop1_145_type 1294 -#define _tmp_146_type 1295 -#define _tmp_147_type 1296 +#define del_target_end_type 1136 +#define targets_type 1137 +#define target_type 1138 +#define t_primary_type 1139 // Left-recursive +#define t_lookahead_type 1140 +#define t_atom_type 1141 +#define incorrect_arguments_type 1142 +#define invalid_kwarg_type 1143 +#define invalid_named_expression_type 1144 +#define invalid_assignment_type 1145 +#define invalid_block_type 1146 +#define invalid_comprehension_type 1147 +#define invalid_parameters_type 1148 +#define invalid_star_etc_type 1149 +#define invalid_lambda_star_etc_type 1150 +#define invalid_double_type_comments_type 1151 +#define invalid_del_target_type 1152 +#define _loop0_1_type 1153 +#define _loop0_2_type 1154 +#define _loop0_4_type 1155 +#define _gather_3_type 1156 +#define _loop0_6_type 1157 +#define _gather_5_type 1158 +#define _loop0_8_type 1159 +#define _gather_7_type 1160 +#define _loop0_10_type 1161 +#define _gather_9_type 1162 +#define _loop1_11_type 1163 +#define _loop0_13_type 1164 +#define _gather_12_type 1165 +#define _tmp_14_type 1166 +#define _tmp_15_type 1167 +#define _tmp_16_type 1168 +#define _tmp_17_type 1169 +#define _tmp_18_type 1170 +#define _tmp_19_type 1171 +#define _tmp_20_type 1172 +#define _tmp_21_type 1173 +#define _loop1_22_type 1174 +#define _tmp_23_type 1175 +#define _tmp_24_type 1176 +#define _loop0_26_type 1177 +#define _gather_25_type 1178 +#define _loop0_28_type 1179 +#define _gather_27_type 1180 +#define _tmp_29_type 1181 +#define _loop0_30_type 1182 +#define _loop1_31_type 1183 +#define _loop0_33_type 1184 +#define _gather_32_type 1185 +#define _tmp_34_type 1186 +#define _loop0_36_type 1187 +#define _gather_35_type 1188 +#define _tmp_37_type 1189 +#define _loop0_39_type 1190 +#define _gather_38_type 1191 +#define _loop0_41_type 1192 +#define _gather_40_type 1193 +#define _loop0_43_type 1194 +#define _gather_42_type 1195 +#define _loop0_45_type 1196 +#define _gather_44_type 1197 +#define _tmp_46_type 1198 +#define _loop1_47_type 1199 +#define _tmp_48_type 1200 +#define _tmp_49_type 1201 +#define _tmp_50_type 1202 +#define _tmp_51_type 1203 +#define _tmp_52_type 1204 +#define _loop0_53_type 1205 +#define _loop0_54_type 1206 +#define _loop0_55_type 1207 +#define _loop1_56_type 1208 +#define _loop0_57_type 1209 +#define _loop1_58_type 1210 +#define _loop1_59_type 1211 +#define _loop1_60_type 1212 +#define _loop0_61_type 1213 +#define _loop1_62_type 1214 +#define _loop0_63_type 1215 +#define _loop1_64_type 1216 +#define _loop0_65_type 1217 +#define _loop1_66_type 1218 +#define _loop1_67_type 1219 +#define _tmp_68_type 1220 +#define _loop0_70_type 1221 +#define _gather_69_type 1222 +#define _loop1_71_type 1223 +#define _loop0_73_type 1224 +#define _gather_72_type 1225 +#define _loop1_74_type 1226 +#define _loop0_75_type 1227 +#define _loop0_76_type 1228 +#define _loop0_77_type 1229 +#define _loop1_78_type 1230 +#define _loop0_79_type 1231 +#define _loop1_80_type 1232 +#define _loop1_81_type 1233 +#define _loop1_82_type 1234 +#define _loop0_83_type 1235 +#define _loop1_84_type 1236 +#define _loop0_85_type 1237 +#define _loop1_86_type 1238 +#define _loop0_87_type 1239 +#define _loop1_88_type 1240 +#define _loop1_89_type 1241 +#define _loop1_90_type 1242 +#define _loop1_91_type 1243 +#define _tmp_92_type 1244 +#define _loop0_94_type 1245 +#define _gather_93_type 1246 +#define _tmp_95_type 1247 +#define _tmp_96_type 1248 +#define _tmp_97_type 1249 +#define _tmp_98_type 1250 +#define _loop1_99_type 1251 +#define _tmp_100_type 1252 +#define _tmp_101_type 1253 +#define _loop0_103_type 1254 +#define _gather_102_type 1255 +#define _loop1_104_type 1256 +#define _loop0_105_type 1257 +#define _loop0_106_type 1258 +#define _tmp_107_type 1259 +#define _tmp_108_type 1260 +#define _loop0_110_type 1261 +#define _gather_109_type 1262 +#define _loop0_112_type 1263 +#define _gather_111_type 1264 +#define _loop0_114_type 1265 +#define _gather_113_type 1266 +#define _loop0_116_type 1267 +#define _gather_115_type 1268 +#define _loop0_117_type 1269 +#define _loop0_119_type 1270 +#define _gather_118_type 1271 +#define _tmp_120_type 1272 +#define _loop0_122_type 1273 +#define _gather_121_type 1274 +#define _loop0_124_type 1275 +#define _gather_123_type 1276 +#define _tmp_125_type 1277 +#define _loop0_126_type 1278 +#define _tmp_127_type 1279 +#define _tmp_128_type 1280 +#define _tmp_129_type 1281 +#define _tmp_130_type 1282 +#define _loop0_131_type 1283 +#define _tmp_132_type 1284 +#define _tmp_133_type 1285 +#define _tmp_134_type 1286 +#define _tmp_135_type 1287 +#define _tmp_136_type 1288 +#define _tmp_137_type 1289 +#define _tmp_138_type 1290 +#define _tmp_139_type 1291 +#define _tmp_140_type 1292 +#define _tmp_141_type 1293 +#define _tmp_142_type 1294 +#define _tmp_143_type 1295 +#define _tmp_144_type 1296 +#define _tmp_145_type 1297 +#define _loop1_146_type 1298 +#define _tmp_147_type 1299 +#define _tmp_148_type 1300 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -378,7 +382,7 @@ static asdl_seq* statement_newline_rule(Parser *p); static asdl_seq* simple_stmt_rule(Parser *p); static stmt_ty small_stmt_rule(Parser *p); static stmt_ty compound_stmt_rule(Parser *p); -static void *assignment_rule(Parser *p); +static stmt_ty assignment_rule(Parser *p); static AugOperator* augassign_rule(Parser *p); static stmt_ty global_stmt_rule(Parser *p); static stmt_ty nonlocal_stmt_rule(Parser *p); @@ -497,17 +501,19 @@ static expr_ty star_targets_rule(Parser *p); static asdl_seq* star_targets_seq_rule(Parser *p); static expr_ty star_target_rule(Parser *p); static expr_ty star_atom_rule(Parser *p); -static expr_ty inside_paren_ann_assign_target_rule(Parser *p); -static expr_ty ann_assign_subscript_attribute_target_rule(Parser *p); +static expr_ty single_target_rule(Parser *p); +static expr_ty single_subscript_attribute_target_rule(Parser *p); static asdl_seq* del_targets_rule(Parser *p); static expr_ty del_target_rule(Parser *p); static expr_ty del_t_atom_rule(Parser *p); +static void *del_target_end_rule(Parser *p); static asdl_seq* targets_rule(Parser *p); static expr_ty target_rule(Parser *p); static expr_ty t_primary_rule(Parser *p); static void *t_lookahead_rule(Parser *p); static expr_ty t_atom_rule(Parser *p); static void *incorrect_arguments_rule(Parser *p); +static void *invalid_kwarg_rule(Parser *p); static void *invalid_named_expression_rule(Parser *p); static void *invalid_assignment_rule(Parser *p); static void *invalid_block_rule(Parser *p); @@ -516,6 +522,7 @@ static void *invalid_parameters_rule(Parser *p); static void *invalid_star_etc_rule(Parser *p); static void *invalid_lambda_star_etc_rule(Parser *p); static void *invalid_double_type_comments_rule(Parser *p); +static void *invalid_del_target_rule(Parser *p); static asdl_seq *_loop0_1_rule(Parser *p); static asdl_seq *_loop0_2_rule(Parser *p); static asdl_seq *_loop0_4_rule(Parser *p); @@ -641,12 +648,12 @@ static asdl_seq *_gather_121_rule(Parser *p); static asdl_seq *_loop0_124_rule(Parser *p); static asdl_seq *_gather_123_rule(Parser *p); static void *_tmp_125_rule(Parser *p); -static void *_tmp_126_rule(Parser *p); +static asdl_seq *_loop0_126_rule(Parser *p); static void *_tmp_127_rule(Parser *p); static void *_tmp_128_rule(Parser *p); static void *_tmp_129_rule(Parser *p); -static asdl_seq *_loop0_130_rule(Parser *p); -static void *_tmp_131_rule(Parser *p); +static void *_tmp_130_rule(Parser *p); +static asdl_seq *_loop0_131_rule(Parser *p); static void *_tmp_132_rule(Parser *p); static void *_tmp_133_rule(Parser *p); static void *_tmp_134_rule(Parser *p); @@ -660,9 +667,10 @@ static void *_tmp_141_rule(Parser *p); static void *_tmp_142_rule(Parser *p); static void *_tmp_143_rule(Parser *p); static void *_tmp_144_rule(Parser *p); -static asdl_seq *_loop1_145_rule(Parser *p); -static void *_tmp_146_rule(Parser *p); +static void *_tmp_145_rule(Parser *p); +static asdl_seq *_loop1_146_rule(Parser *p); static void *_tmp_147_rule(Parser *p); +static void *_tmp_148_rule(Parser *p); // file: statements? $ @@ -672,29 +680,29 @@ file_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // statements? $ void *a; Token * endmarker_var; if ( - (a = statements_rule(p), 1) + (a = statements_rule(p), 1) // statements? && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = _PyPegen_make_module ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_module ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // interactive: statement_newline @@ -704,26 +712,26 @@ interactive_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // statement_newline asdl_seq* a; if ( - (a = statement_newline_rule(p)) + (a = statement_newline_rule(p)) // statement_newline ) { - res = Interactive ( a , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = Interactive ( a , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // eval: expressions NEWLINE* $ @@ -733,32 +741,32 @@ eval_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // expressions NEWLINE* $ asdl_seq * _loop0_1_var; expr_ty a; Token * endmarker_var; if ( - (a = expressions_rule(p)) + (a = expressions_rule(p)) // expressions && - (_loop0_1_var = _loop0_1_rule(p)) + (_loop0_1_var = _loop0_1_rule(p)) // NEWLINE* && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = Expression ( a , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = Expression ( a , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // func_type: '(' type_expressions? ')' '->' expression NEWLINE* $ @@ -768,44 +776,44 @@ func_type_rule(Parser *p) if (p->error_indicator) { return NULL; } - mod_ty res = NULL; - int mark = p->mark; + mod_ty _res = NULL; + int _mark = p->mark; { // '(' type_expressions? ')' '->' expression NEWLINE* $ + Token * _literal; + Token * _literal_1; + Token * _literal_2; asdl_seq * _loop0_2_var; void *a; expr_ty b; Token * endmarker_var; - Token * literal; - Token * literal_1; - Token * literal_2; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = type_expressions_rule(p), 1) + (a = type_expressions_rule(p), 1) // type_expressions? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 51)) + (_literal_2 = _PyPegen_expect_token(p, 51)) // token='->' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (_loop0_2_var = _loop0_2_rule(p)) + (_loop0_2_var = _loop0_2_rule(p)) // NEWLINE* && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = FunctionType ( a , b , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = FunctionType ( a , b , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // fstring: star_expressions @@ -815,22 +823,22 @@ fstring_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // type_expressions: @@ -847,166 +855,166 @@ type_expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.expression+ ',' '*' expression ',' '**' expression + Token * _literal; + Token * _literal_1; + Token * _literal_2; + Token * _literal_3; asdl_seq * a; expr_ty b; expr_ty c; - Token * literal; - Token * literal_1; - Token * literal_2; - Token * literal_3; if ( - (a = _gather_3_rule(p)) + (a = _gather_3_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (literal_2 = _PyPegen_expect_token(p, 12)) + (_literal_2 = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_3 = _PyPegen_expect_token(p, 35)) + (_literal_3 = _PyPegen_expect_token(p, 35)) // token='**' && - (c = expression_rule(p)) + (c = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_seq_append_to_end ( p , a , b ) ) , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_seq_append_to_end ( p , a , b ) ) , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.expression+ ',' '*' expression + Token * _literal; + Token * _literal_1; asdl_seq * a; expr_ty b; - Token * literal; - Token * literal_1; if ( - (a = _gather_5_rule(p)) + (a = _gather_5_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.expression+ ',' '**' expression + Token * _literal; + Token * _literal_1; asdl_seq * a; expr_ty b; - Token * literal; - Token * literal_1; if ( - (a = _gather_7_rule(p)) + (a = _gather_7_rule(p)) // ','.expression+ && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 35)) + (_literal_1 = _PyPegen_expect_token(p, 35)) // token='**' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' expression ',' '**' expression + Token * _literal; + Token * _literal_1; + Token * _literal_2; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; - Token * literal_2; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal_1 = _PyPegen_expect_token(p, 12)) + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_2 = _PyPegen_expect_token(p, 35)) + (_literal_2 = _PyPegen_expect_token(p, 35)) // token='**' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { - res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.expression+ asdl_seq * _gather_9_var; if ( - (_gather_9_var = _gather_9_rule(p)) + (_gather_9_var = _gather_9_rule(p)) // ','.expression+ ) { - res = _gather_9_var; + _res = _gather_9_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // statements: statement+ @@ -1016,26 +1024,26 @@ statements_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // statement+ asdl_seq * a; if ( - (a = _loop1_11_rule(p)) + (a = _loop1_11_rule(p)) // statement+ ) { - res = _PyPegen_seq_flatten ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_flatten ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // statement: compound_stmt | simple_stmt @@ -1045,37 +1053,37 @@ statement_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // compound_stmt stmt_ty a; if ( - (a = compound_stmt_rule(p)) + (a = compound_stmt_rule(p)) // compound_stmt ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // simple_stmt asdl_seq* simple_stmt_var; if ( - (simple_stmt_var = simple_stmt_rule(p)) + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { - res = simple_stmt_var; + _res = simple_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // statement_newline: compound_stmt NEWLINE | simple_stmt | NEWLINE | $ @@ -1085,86 +1093,86 @@ statement_newline_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // compound_stmt NEWLINE stmt_ty a; Token * newline_var; if ( - (a = compound_stmt_rule(p)) + (a = compound_stmt_rule(p)) // compound_stmt && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // simple_stmt asdl_seq* simple_stmt_var; if ( - (simple_stmt_var = simple_stmt_rule(p)) + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { - res = simple_stmt_var; + _res = simple_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // NEWLINE Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_singleton_seq ( p , CHECK ( _Py_Pass ( EXTRA ) ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_singleton_seq ( p , CHECK ( _Py_Pass ( EXTRA ) ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // $ Token * endmarker_var; if ( - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' ) { - res = _PyPegen_interactive_exit ( p ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_interactive_exit ( p ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // simple_stmt: small_stmt !';' NEWLINE | ';'.small_stmt+ ';'? NEWLINE @@ -1174,53 +1182,53 @@ simple_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // small_stmt !';' NEWLINE stmt_ty a; Token * newline_var; if ( - (a = small_stmt_rule(p)) + (a = small_stmt_rule(p)) // small_stmt && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 13) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 13) // token=';' && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = _PyPegen_singleton_seq ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ';'.small_stmt+ ';'? NEWLINE + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; Token * newline_var; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_12_rule(p)) + (a = _gather_12_rule(p)) // ';'.small_stmt+ && - (opt_var = _PyPegen_expect_token(p, 13), 1) + (_opt_var = _PyPegen_expect_token(p, 13), 1) // ';'? && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // small_stmt: @@ -1243,229 +1251,229 @@ small_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - if (_PyPegen_is_memoized(p, small_stmt_type, &res)) - return res; - int mark = p->mark; + stmt_ty _res = NULL; + if (_PyPegen_is_memoized(p, small_stmt_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // assignment - void *assignment_var; + stmt_ty assignment_var; if ( - (assignment_var = assignment_rule(p)) + (assignment_var = assignment_rule(p)) // assignment ) { - res = assignment_var; + _res = assignment_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty e; if ( - (e = star_expressions_rule(p)) + (e = star_expressions_rule(p)) // star_expressions ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Expr ( e , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Expr ( e , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &'return' return_stmt stmt_ty return_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 500) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 500) // token='return' && - (return_stmt_var = return_stmt_rule(p)) + (return_stmt_var = return_stmt_rule(p)) // return_stmt ) { - res = return_stmt_var; + _res = return_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('import' | 'from') import_stmt stmt_ty import_stmt_var; if ( _PyPegen_lookahead(1, _tmp_14_rule, p) && - (import_stmt_var = import_stmt_rule(p)) + (import_stmt_var = import_stmt_rule(p)) // import_stmt ) { - res = import_stmt_var; + _res = import_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'raise' raise_stmt stmt_ty raise_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 501) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 501) // token='raise' && - (raise_stmt_var = raise_stmt_rule(p)) + (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt ) { - res = raise_stmt_var; + _res = raise_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'pass' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 502)) + (_keyword = _PyPegen_expect_token(p, 502)) // token='pass' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Pass ( EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Pass ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &'del' del_stmt stmt_ty del_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 503) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 503) // token='del' && - (del_stmt_var = del_stmt_rule(p)) + (del_stmt_var = del_stmt_rule(p)) // del_stmt ) { - res = del_stmt_var; + _res = del_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'yield' yield_stmt stmt_ty yield_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 504) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 504) // token='yield' && - (yield_stmt_var = yield_stmt_rule(p)) + (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt ) { - res = yield_stmt_var; + _res = yield_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'assert' assert_stmt stmt_ty assert_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 505) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 505) // token='assert' && - (assert_stmt_var = assert_stmt_rule(p)) + (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt ) { - res = assert_stmt_var; + _res = assert_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'break' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 506)) + (_keyword = _PyPegen_expect_token(p, 506)) // token='break' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Break ( EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Break ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'continue' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 507)) + (_keyword = _PyPegen_expect_token(p, 507)) // token='continue' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Continue ( EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Continue ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &'global' global_stmt stmt_ty global_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 508) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 508) // token='global' && - (global_stmt_var = global_stmt_rule(p)) + (global_stmt_var = global_stmt_rule(p)) // global_stmt ) { - res = global_stmt_var; + _res = global_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'nonlocal' nonlocal_stmt stmt_ty nonlocal_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 509) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 509) // token='nonlocal' && - (nonlocal_stmt_var = nonlocal_stmt_rule(p)) + (nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt ) { - res = nonlocal_stmt_var; + _res = nonlocal_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, small_stmt_type, res); - return res; + _PyPegen_insert_memo(p, _mark, small_stmt_type, _res); + return _res; } // compound_stmt: @@ -1482,262 +1490,262 @@ compound_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // &('def' | '@' | ASYNC) function_def stmt_ty function_def_var; if ( _PyPegen_lookahead(1, _tmp_15_rule, p) && - (function_def_var = function_def_rule(p)) + (function_def_var = function_def_rule(p)) // function_def ) { - res = function_def_var; + _res = function_def_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'if' if_stmt stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 510) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 510) // token='if' && - (if_stmt_var = if_stmt_rule(p)) + (if_stmt_var = if_stmt_rule(p)) // if_stmt ) { - res = if_stmt_var; + _res = if_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('class' | '@') class_def stmt_ty class_def_var; if ( _PyPegen_lookahead(1, _tmp_16_rule, p) && - (class_def_var = class_def_rule(p)) + (class_def_var = class_def_rule(p)) // class_def ) { - res = class_def_var; + _res = class_def_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('with' | ASYNC) with_stmt stmt_ty with_stmt_var; if ( _PyPegen_lookahead(1, _tmp_17_rule, p) && - (with_stmt_var = with_stmt_rule(p)) + (with_stmt_var = with_stmt_rule(p)) // with_stmt ) { - res = with_stmt_var; + _res = with_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &('for' | ASYNC) for_stmt stmt_ty for_stmt_var; if ( _PyPegen_lookahead(1, _tmp_18_rule, p) && - (for_stmt_var = for_stmt_rule(p)) + (for_stmt_var = for_stmt_rule(p)) // for_stmt ) { - res = for_stmt_var; + _res = for_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'try' try_stmt stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 511) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 511) // token='try' && - (try_stmt_var = try_stmt_rule(p)) + (try_stmt_var = try_stmt_rule(p)) // try_stmt ) { - res = try_stmt_var; + _res = try_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'while' while_stmt stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 512) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 512) // token='while' && - (while_stmt_var = while_stmt_rule(p)) + (while_stmt_var = while_stmt_rule(p)) // while_stmt ) { - res = while_stmt_var; + _res = while_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // assignment: // | NAME ':' expression ['=' annotated_rhs] -// | ('(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target) ':' expression ['=' annotated_rhs] +// | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] // | ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? -// | target augassign (yield_expr | star_expressions) +// | single_target augassign (yield_expr | star_expressions) // | invalid_assignment -static void * +static stmt_ty assignment_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME ':' expression ['=' annotated_rhs] + Token * _literal; expr_ty a; expr_ty b; void *c; - Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (c = _tmp_19_rule(p), 1) + (c = _tmp_19_rule(p), 1) // ['=' annotated_rhs] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 6 , "Variable annotation syntax is" , _Py_AnnAssign ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , c , 1 , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 6 , "Variable annotation syntax is" , _Py_AnnAssign ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , c , 1 , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - { // ('(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target) ':' expression ['=' annotated_rhs] + { // ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] + Token * _literal; void *a; expr_ty b; void *c; - Token * literal; if ( - (a = _tmp_20_rule(p)) + (a = _tmp_20_rule(p)) // '(' single_target ')' | single_subscript_attribute_target && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression && - (c = _tmp_21_rule(p), 1) + (c = _tmp_21_rule(p), 1) // ['=' annotated_rhs] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 6 , "Variable annotations syntax is" , _Py_AnnAssign ( a , b , c , 0 , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 6 , "Variable annotations syntax is" , _Py_AnnAssign ( a , b , c , 0 , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? asdl_seq * a; void *b; void *tc; if ( - (a = _loop1_22_rule(p)) + (a = _loop1_22_rule(p)) // ((star_targets '='))+ && - (b = _tmp_23_rule(p)) + (b = _tmp_23_rule(p)) // yield_expr | star_expressions && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Assign ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Assign ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - { // target augassign (yield_expr | star_expressions) + { // single_target augassign (yield_expr | star_expressions) expr_ty a; AugOperator* b; void *c; if ( - (a = target_rule(p)) + (a = single_target_rule(p)) // single_target && - (b = augassign_rule(p)) + (b = augassign_rule(p)) // augassign && - (c = _tmp_24_rule(p)) + (c = _tmp_24_rule(p)) // yield_expr | star_expressions ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_AugAssign ( a , b -> kind , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_AugAssign ( a , b -> kind , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_assignment void *invalid_assignment_var; if ( - (invalid_assignment_var = invalid_assignment_rule(p)) + (invalid_assignment_var = invalid_assignment_rule(p)) // invalid_assignment ) { - res = invalid_assignment_var; + _res = invalid_assignment_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // augassign: @@ -1760,206 +1768,206 @@ augassign_rule(Parser *p) if (p->error_indicator) { return NULL; } - AugOperator* res = NULL; - int mark = p->mark; + AugOperator* _res = NULL; + int _mark = p->mark; { // '+=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 36)) + (_literal = _PyPegen_expect_token(p, 36)) // token='+=' ) { - res = _PyPegen_augoperator ( p , Add ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Add ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '-=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 37)) + (_literal = _PyPegen_expect_token(p, 37)) // token='-=' ) { - res = _PyPegen_augoperator ( p , Sub ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Sub ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 38)) + (_literal = _PyPegen_expect_token(p, 38)) // token='*=' ) { - res = _PyPegen_augoperator ( p , Mult ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Mult ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '@=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 50)) + (_literal = _PyPegen_expect_token(p, 50)) // token='@=' ) { - res = CHECK_VERSION ( 5 , "The '@' operator is" , _PyPegen_augoperator ( p , MatMult ) ); - if (res == NULL && PyErr_Occurred()) { + _res = CHECK_VERSION ( 5 , "The '@' operator is" , _PyPegen_augoperator ( p , MatMult ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '/=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 39)) + (_literal = _PyPegen_expect_token(p, 39)) // token='/=' ) { - res = _PyPegen_augoperator ( p , Div ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Div ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '%=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 40)) + (_literal = _PyPegen_expect_token(p, 40)) // token='%=' ) { - res = _PyPegen_augoperator ( p , Mod ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Mod ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '&=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 41)) + (_literal = _PyPegen_expect_token(p, 41)) // token='&=' ) { - res = _PyPegen_augoperator ( p , BitAnd ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , BitAnd ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '|=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 42)) + (_literal = _PyPegen_expect_token(p, 42)) // token='|=' ) { - res = _PyPegen_augoperator ( p , BitOr ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , BitOr ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '^=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 43)) + (_literal = _PyPegen_expect_token(p, 43)) // token='^=' ) { - res = _PyPegen_augoperator ( p , BitXor ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , BitXor ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '<<=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 44)) + (_literal = _PyPegen_expect_token(p, 44)) // token='<<=' ) { - res = _PyPegen_augoperator ( p , LShift ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , LShift ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '>>=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 45)) + (_literal = _PyPegen_expect_token(p, 45)) // token='>>=' ) { - res = _PyPegen_augoperator ( p , RShift ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , RShift ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '**=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 46)) + (_literal = _PyPegen_expect_token(p, 46)) // token='**=' ) { - res = _PyPegen_augoperator ( p , Pow ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , Pow ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '//=' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 48)) + (_literal = _PyPegen_expect_token(p, 48)) // token='//=' ) { - res = _PyPegen_augoperator ( p , FloorDiv ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_augoperator ( p , FloorDiv ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // global_stmt: 'global' ','.NAME+ @@ -1969,45 +1977,45 @@ global_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'global' ','.NAME+ + Token * _keyword; asdl_seq * a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 508)) + (_keyword = _PyPegen_expect_token(p, 508)) // token='global' && - (a = _gather_25_rule(p)) + (a = _gather_25_rule(p)) // ','.NAME+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Global ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Global ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // nonlocal_stmt: 'nonlocal' ','.NAME+ @@ -2017,45 +2025,45 @@ nonlocal_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'nonlocal' ','.NAME+ + Token * _keyword; asdl_seq * a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 509)) + (_keyword = _PyPegen_expect_token(p, 509)) // token='nonlocal' && - (a = _gather_27_rule(p)) + (a = _gather_27_rule(p)) // ','.NAME+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Nonlocal ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Nonlocal ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // yield_stmt: yield_expr @@ -2065,42 +2073,42 @@ yield_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // yield_expr expr_ty y; if ( - (y = yield_expr_rule(p)) + (y = yield_expr_rule(p)) // yield_expr ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Expr ( y , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Expr ( y , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // assert_stmt: 'assert' expression [',' expression] @@ -2110,48 +2118,48 @@ assert_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'assert' expression [',' expression] + Token * _keyword; expr_ty a; void *b; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 505)) + (_keyword = _PyPegen_expect_token(p, 505)) // token='assert' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = _tmp_29_rule(p), 1) + (b = _tmp_29_rule(p), 1) // [',' expression] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Assert ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Assert ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // del_stmt: 'del' del_targets @@ -2161,45 +2169,45 @@ del_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'del' del_targets + Token * _keyword; asdl_seq* a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 503)) + (_keyword = _PyPegen_expect_token(p, 503)) // token='del' && - (a = del_targets_rule(p)) + (a = del_targets_rule(p)) // del_targets ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Delete ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Delete ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_stmt: import_name | import_from @@ -2209,33 +2217,33 @@ import_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // import_name stmt_ty import_name_var; if ( - (import_name_var = import_name_rule(p)) + (import_name_var = import_name_rule(p)) // import_name ) { - res = import_name_var; + _res = import_name_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // import_from stmt_ty import_from_var; if ( - (import_from_var = import_from_rule(p)) + (import_from_var = import_from_rule(p)) // import_from ) { - res = import_from_var; + _res = import_from_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_name: 'import' dotted_as_names @@ -2245,45 +2253,45 @@ import_name_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'import' dotted_as_names + Token * _keyword; asdl_seq* a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 513)) + (_keyword = _PyPegen_expect_token(p, 513)) // token='import' && - (a = dotted_as_names_rule(p)) + (a = dotted_as_names_rule(p)) // dotted_as_names ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Import ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Import ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from: @@ -2295,86 +2303,86 @@ import_from_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'from' (('.' | '...'))* dotted_name 'import' import_from_targets + Token * _keyword; + Token * _keyword_1; asdl_seq * a; expr_ty b; asdl_seq* c; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' && - (a = _loop0_30_rule(p)) + (a = _loop0_30_rule(p)) // (('.' | '...'))* && - (b = dotted_name_rule(p)) + (b = dotted_name_rule(p)) // dotted_name && - (keyword_1 = _PyPegen_expect_token(p, 513)) + (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' && - (c = import_from_targets_rule(p)) + (c = import_from_targets_rule(p)) // import_from_targets ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ImportFrom ( b -> v . Name . id , c , _PyPegen_seq_count_dots ( a ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ImportFrom ( b -> v . Name . id , c , _PyPegen_seq_count_dots ( a ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'from' (('.' | '...'))+ 'import' import_from_targets + Token * _keyword; + Token * _keyword_1; asdl_seq * a; asdl_seq* b; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' && - (a = _loop1_31_rule(p)) + (a = _loop1_31_rule(p)) // (('.' | '...'))+ && - (keyword_1 = _PyPegen_expect_token(p, 513)) + (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' && - (b = import_from_targets_rule(p)) + (b = import_from_targets_rule(p)) // import_from_targets ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ImportFrom ( NULL , b , _PyPegen_seq_count_dots ( a ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ImportFrom ( NULL , b , _PyPegen_seq_count_dots ( a ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from_targets: '(' import_from_as_names ','? ')' | import_from_as_names | '*' @@ -2384,62 +2392,62 @@ import_from_targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // '(' import_from_as_names ','? ')' + Token * _literal; + Token * _literal_1; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq* a; - Token * literal; - Token * literal_1; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = import_from_as_names_rule(p)) + (a = import_from_as_names_rule(p)) // import_from_as_names && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // import_from_as_names asdl_seq* import_from_as_names_var; if ( - (import_from_as_names_var = import_from_as_names_rule(p)) + (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names ) { - res = import_from_as_names_var; + _res = import_from_as_names_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' ) { - res = _PyPegen_singleton_seq ( p , CHECK ( _PyPegen_alias_for_star ( p ) ) ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_singleton_seq ( p , CHECK ( _PyPegen_alias_for_star ( p ) ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from_as_names: ','.import_from_as_name+ @@ -2449,26 +2457,26 @@ import_from_as_names_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.import_from_as_name+ asdl_seq * a; if ( - (a = _gather_32_rule(p)) + (a = _gather_32_rule(p)) // ','.import_from_as_name+ ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // import_from_as_name: NAME ['as' NAME] @@ -2478,29 +2486,29 @@ import_from_as_name_rule(Parser *p) if (p->error_indicator) { return NULL; } - alias_ty res = NULL; - int mark = p->mark; + alias_ty _res = NULL; + int _mark = p->mark; { // NAME ['as' NAME] expr_ty a; void *b; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (b = _tmp_34_rule(p), 1) + (b = _tmp_34_rule(p), 1) // ['as' NAME] ) { - res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dotted_as_names: ','.dotted_as_name+ @@ -2510,26 +2518,26 @@ dotted_as_names_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.dotted_as_name+ asdl_seq * a; if ( - (a = _gather_35_rule(p)) + (a = _gather_35_rule(p)) // ','.dotted_as_name+ ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dotted_as_name: dotted_name ['as' NAME] @@ -2539,29 +2547,29 @@ dotted_as_name_rule(Parser *p) if (p->error_indicator) { return NULL; } - alias_ty res = NULL; - int mark = p->mark; + alias_ty _res = NULL; + int _mark = p->mark; { // dotted_name ['as' NAME] expr_ty a; void *b; if ( - (a = dotted_name_rule(p)) + (a = dotted_name_rule(p)) // dotted_name && - (b = _tmp_37_rule(p), 1) + (b = _tmp_37_rule(p), 1) // ['as' NAME] ) { - res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -2570,25 +2578,25 @@ static expr_ty dotted_name_raw(Parser *); static expr_ty dotted_name_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, dotted_name_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, dotted_name_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_0 = _PyPegen_update_memo(p, mark, dotted_name_type, res); + int tmpvar_0 = _PyPegen_update_memo(p, _mark, dotted_name_type, _res); if (tmpvar_0) { - return res; + return _res; } - p->mark = mark; - void *raw = dotted_name_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = dotted_name_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty dotted_name_raw(Parser *p) @@ -2596,43 +2604,43 @@ dotted_name_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // dotted_name '.' NAME + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = dotted_name_rule(p)) + (a = dotted_name_rule(p)) // dotted_name && - (literal = _PyPegen_expect_token(p, 23)) + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_join_names_with_dot ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_join_names_with_dot ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // NAME expr_ty name_var; if ( - (name_var = _PyPegen_name_token(p)) + (name_var = _PyPegen_name_token(p)) // NAME ) { - res = name_var; + _res = name_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // if_stmt: @@ -2644,89 +2652,89 @@ if_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'if' named_expression ':' block elif_stmt + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; stmt_ty c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = elif_stmt_rule(p)) + (c = elif_stmt_rule(p)) // elif_stmt ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'if' named_expression ':' block else_block? + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; void *c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = else_block_rule(p), 1) + (c = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // elif_stmt: @@ -2738,89 +2746,89 @@ elif_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'elif' named_expression ':' block elif_stmt + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; stmt_ty c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 515)) + (_keyword = _PyPegen_expect_token(p, 515)) // token='elif' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = elif_stmt_rule(p)) + (c = elif_stmt_rule(p)) // elif_stmt ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'elif' named_expression ':' block else_block? + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; void *c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 515)) + (_keyword = _PyPegen_expect_token(p, 515)) // token='elif' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = else_block_rule(p), 1) + (c = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_If ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // else_block: 'else' ':' block @@ -2830,32 +2838,32 @@ else_block_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // 'else' ':' block + Token * _keyword; + Token * _literal; asdl_seq* b; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 516)) + (_keyword = _PyPegen_expect_token(p, 516)) // token='else' && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { - res = b; - if (res == NULL && PyErr_Occurred()) { + _res = b; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // while_stmt: 'while' named_expression ':' block else_block? @@ -2865,54 +2873,54 @@ while_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'while' named_expression ':' block else_block? + Token * _keyword; + Token * _literal; expr_ty a; asdl_seq* b; void *c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 512)) + (_keyword = _PyPegen_expect_token(p, 512)) // token='while' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (c = else_block_rule(p), 1) + (c = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_While ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_While ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // for_stmt: @@ -2924,116 +2932,116 @@ for_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + Token * _keyword; + Token * _keyword_1; + Token * _literal; asdl_seq* b; void *el; expr_ty ex; - Token * keyword; - Token * keyword_1; - Token * literal; expr_ty t; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 517)) + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (t = star_targets_rule(p)) + (t = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (ex = star_expressions_rule(p)) + (ex = star_expressions_rule(p)) // star_expressions && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (el = else_block_rule(p), 1) + (el = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_For ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_For ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + Token * _keyword; + Token * _keyword_1; + Token * _literal; Token * async_var; asdl_seq* b; void *el; expr_ty ex; - Token * keyword; - Token * keyword_1; - Token * literal; expr_ty t; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 517)) + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (t = star_targets_rule(p)) + (t = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (ex = star_expressions_rule(p)) + (ex = star_expressions_rule(p)) // star_expressions && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (el = else_block_rule(p), 1) + (el = else_block_rule(p), 1) // else_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async for loops are" , _Py_AsyncFor ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async for loops are" , _Py_AsyncFor ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // with_stmt: -// | 'with' '(' ','.with_item+ ')' ':' block +// | 'with' '(' ','.with_item+ ','? ')' ':' block // | 'with' ','.with_item+ ':' TYPE_COMMENT? block -// | ASYNC 'with' '(' ','.with_item+ ')' ':' block +// | ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block // | ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block static stmt_ty with_stmt_rule(Parser *p) @@ -3041,171 +3049,179 @@ with_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro - { // 'with' '(' ','.with_item+ ')' ':' block + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'with' '(' ','.with_item+ ','? ')' ':' block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; if ( - (keyword = _PyPegen_expect_token(p, 519)) + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _gather_38_rule(p)) + (a = _gather_38_rule(p)) // ','.with_item+ && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_2 = _PyPegen_expect_token(p, 11)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (b = block_rule(p)) + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_With ( a , b , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_With ( a , b , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'with' ','.with_item+ ':' TYPE_COMMENT? block + Token * _keyword; + Token * _literal; asdl_seq * a; asdl_seq* b; - Token * keyword; - Token * literal; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 519)) + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (a = _gather_40_rule(p)) + (a = _gather_40_rule(p)) // ','.with_item+ && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_With ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_With ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - { // ASYNC 'with' '(' ','.with_item+ ')' ':' block + { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; Token * async_var; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + && + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (keyword = _PyPegen_expect_token(p, 519)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (literal = _PyPegen_expect_token(p, 7)) + (a = _gather_42_rule(p)) // ','.with_item+ && - (a = _gather_42_rule(p)) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (literal_2 = _PyPegen_expect_token(p, 11)) + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NULL , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NULL , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block + Token * _keyword; + Token * _literal; asdl_seq * a; Token * async_var; asdl_seq* b; - Token * keyword; - Token * literal; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 519)) + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (a = _gather_44_rule(p)) + (a = _gather_44_rule(p)) // ','.with_item+ && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // with_item: expression ['as' target] @@ -3215,29 +3231,29 @@ with_item_rule(Parser *p) if (p->error_indicator) { return NULL; } - withitem_ty res = NULL; - int mark = p->mark; + withitem_ty _res = NULL; + int _mark = p->mark; { // expression ['as' target] expr_ty e; void *o; if ( - (e = expression_rule(p)) + (e = expression_rule(p)) // expression && - (o = _tmp_46_rule(p), 1) + (o = _tmp_46_rule(p), 1) // ['as' target] ) { - res = _Py_withitem ( e , o , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_withitem ( e , o , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // try_stmt: @@ -3249,175 +3265,175 @@ try_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'try' ':' block finally_block + Token * _keyword; + Token * _literal; asdl_seq* b; asdl_seq* f; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 511)) + (_keyword = _PyPegen_expect_token(p, 511)) // token='try' && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (f = finally_block_rule(p)) + (f = finally_block_rule(p)) // finally_block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Try ( b , NULL , NULL , f , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Try ( b , NULL , NULL , f , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'try' ':' block except_block+ else_block? finally_block? + Token * _keyword; + Token * _literal; asdl_seq* b; void *el; asdl_seq * ex; void *f; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 511)) + (_keyword = _PyPegen_expect_token(p, 511)) // token='try' && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block && - (ex = _loop1_47_rule(p)) + (ex = _loop1_47_rule(p)) // except_block+ && - (el = else_block_rule(p), 1) + (el = else_block_rule(p), 1) // else_block? && - (f = finally_block_rule(p), 1) + (f = finally_block_rule(p), 1) // finally_block? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Try ( b , ex , el , f , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Try ( b , ex , el , f , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// except_block: 'except' expression ['as' target] ':' block | 'except' ':' block +// except_block: 'except' expression ['as' NAME] ':' block | 'except' ':' block static excepthandler_ty except_block_rule(Parser *p) { if (p->error_indicator) { return NULL; } - excepthandler_ty res = NULL; - int mark = p->mark; + excepthandler_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro - { // 'except' expression ['as' target] ':' block + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'except' expression ['as' NAME] ':' block + Token * _keyword; + Token * _literal; asdl_seq* b; expr_ty e; - Token * keyword; - Token * literal; void *t; if ( - (keyword = _PyPegen_expect_token(p, 520)) + (_keyword = _PyPegen_expect_token(p, 520)) // token='except' && - (e = expression_rule(p)) + (e = expression_rule(p)) // expression && - (t = _tmp_48_rule(p), 1) + (t = _tmp_48_rule(p), 1) // ['as' NAME] && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ExceptHandler ( e , ( t ) ? ( ( expr_ty ) t ) -> v . Name . id : NULL , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ExceptHandler ( e , ( t ) ? ( ( expr_ty ) t ) -> v . Name . id : NULL , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'except' ':' block + Token * _keyword; + Token * _literal; asdl_seq* b; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 520)) + (_keyword = _PyPegen_expect_token(p, 520)) // token='except' && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ExceptHandler ( NULL , NULL , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ExceptHandler ( NULL , NULL , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // finally_block: 'finally' ':' block @@ -3427,32 +3443,32 @@ finally_block_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // 'finally' ':' block + Token * _keyword; + Token * _literal; asdl_seq* a; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 521)) + (_keyword = _PyPegen_expect_token(p, 521)) // token='finally' && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (a = block_rule(p)) + (a = block_rule(p)) // block ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // return_stmt: 'return' star_expressions? @@ -3462,45 +3478,45 @@ return_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'return' star_expressions? + Token * _keyword; void *a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 500)) + (_keyword = _PyPegen_expect_token(p, 500)) // token='return' && - (a = star_expressions_rule(p), 1) + (a = star_expressions_rule(p), 1) // star_expressions? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Return ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Return ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // raise_stmt: 'raise' expression ['from' expression] | 'raise' @@ -3510,71 +3526,71 @@ raise_stmt_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'raise' expression ['from' expression] + Token * _keyword; expr_ty a; void *b; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 501)) + (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = _tmp_49_rule(p), 1) + (b = _tmp_49_rule(p), 1) // ['from' expression] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Raise ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Raise ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'raise' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 501)) + (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Raise ( NULL , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Raise ( NULL , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // function_def: decorators function_def_raw | function_def_raw @@ -3584,40 +3600,40 @@ function_def_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // decorators function_def_raw asdl_seq* d; stmt_ty f; if ( - (d = decorators_rule(p)) + (d = decorators_rule(p)) // decorators && - (f = function_def_raw_rule(p)) + (f = function_def_raw_rule(p)) // function_def_raw ) { - res = _PyPegen_function_def_decorators ( p , d , f ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_function_def_decorators ( p , d , f ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // function_def_raw stmt_ty function_def_raw_var; if ( - (function_def_raw_var = function_def_raw_rule(p)) + (function_def_raw_var = function_def_raw_rule(p)) // function_def_raw ) { - res = function_def_raw_var; + _res = function_def_raw_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // function_def_raw: @@ -3629,116 +3645,116 @@ function_def_raw_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; void *a; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; expr_ty n; void *params; void *tc; if ( - (keyword = _PyPegen_expect_token(p, 522)) + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' && - (n = _PyPegen_name_token(p)) + (n = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (params = params_rule(p), 1) + (params = params_rule(p), 1) // params? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (a = _tmp_50_rule(p), 1) + (a = _tmp_50_rule(p), 1) // ['->' expression] && - (literal_2 = _PyPegen_expect_token(p, 11)) + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = func_type_comment_rule(p), 1) + (tc = func_type_comment_rule(p), 1) // func_type_comment? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_FunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_FunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; void *a; Token * async_var; asdl_seq* b; - Token * keyword; - Token * literal; - Token * literal_1; - Token * literal_2; expr_ty n; void *params; void *tc; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 522)) + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' && - (n = _PyPegen_name_token(p)) + (n = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (params = params_rule(p), 1) + (params = params_rule(p), 1) // params? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (a = _tmp_51_rule(p), 1) + (a = _tmp_51_rule(p), 1) // ['->' expression] && - (literal_2 = _PyPegen_expect_token(p, 11)) + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && - (tc = func_type_comment_rule(p), 1) + (tc = func_type_comment_rule(p), 1) // func_type_comment? && - (b = block_rule(p)) + (b = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Async functions are" , _Py_AsyncFunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async functions are" , _Py_AsyncFunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // func_type_comment: @@ -3751,53 +3767,53 @@ func_type_comment_rule(Parser *p) if (p->error_indicator) { return NULL; } - Token* res = NULL; - int mark = p->mark; + Token* _res = NULL; + int _mark = p->mark; { // NEWLINE TYPE_COMMENT &(NEWLINE INDENT) Token * newline_var; Token * t; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (t = _PyPegen_expect_token(p, TYPE_COMMENT)) + (t = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && _PyPegen_lookahead(1, _tmp_52_rule, p) ) { - res = t; - if (res == NULL && PyErr_Occurred()) { + _res = t; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_double_type_comments void *invalid_double_type_comments_var; if ( - (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) + (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) // invalid_double_type_comments ) { - res = invalid_double_type_comments_var; + _res = invalid_double_type_comments_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // TYPE_COMMENT Token * type_comment_var; if ( - (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) + (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' ) { - res = type_comment_var; + _res = type_comment_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // params: invalid_parameters | parameters @@ -3807,33 +3823,33 @@ params_rule(Parser *p) if (p->error_indicator) { return NULL; } - arguments_ty res = NULL; - int mark = p->mark; + arguments_ty _res = NULL; + int _mark = p->mark; { // invalid_parameters void *invalid_parameters_var; if ( - (invalid_parameters_var = invalid_parameters_rule(p)) + (invalid_parameters_var = invalid_parameters_rule(p)) // invalid_parameters ) { - res = invalid_parameters_var; + _res = invalid_parameters_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // parameters arguments_ty parameters_var; if ( - (parameters_var = parameters_rule(p)) + (parameters_var = parameters_rule(p)) // parameters ) { - res = parameters_var; + _res = parameters_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // parameters: @@ -3848,110 +3864,110 @@ parameters_rule(Parser *p) if (p->error_indicator) { return NULL; } - arguments_ty res = NULL; - int mark = p->mark; + arguments_ty _res = NULL; + int _mark = p->mark; { // slash_no_default param_no_default* param_with_default* star_etc? asdl_seq* a; asdl_seq * b; asdl_seq * c; void *d; if ( - (a = slash_no_default_rule(p)) + (a = slash_no_default_rule(p)) // slash_no_default && - (b = _loop0_53_rule(p)) + (b = _loop0_53_rule(p)) // param_no_default* && - (c = _loop0_54_rule(p)) + (c = _loop0_54_rule(p)) // param_with_default* && - (d = star_etc_rule(p), 1) + (d = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // slash_with_default param_with_default* star_etc? SlashWithDefault* a; asdl_seq * b; void *c; if ( - (a = slash_with_default_rule(p)) + (a = slash_with_default_rule(p)) // slash_with_default && - (b = _loop0_55_rule(p)) + (b = _loop0_55_rule(p)) // param_with_default* && - (c = star_etc_rule(p), 1) + (c = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_no_default+ param_with_default* star_etc? asdl_seq * a; asdl_seq * b; void *c; if ( - (a = _loop1_56_rule(p)) + (a = _loop1_56_rule(p)) // param_no_default+ && - (b = _loop0_57_rule(p)) + (b = _loop0_57_rule(p)) // param_with_default* && - (c = star_etc_rule(p), 1) + (c = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_with_default+ star_etc? asdl_seq * a; void *b; if ( - (a = _loop1_58_rule(p)) + (a = _loop1_58_rule(p)) // param_with_default+ && - (b = star_etc_rule(p), 1) + (b = star_etc_rule(p), 1) // star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_etc StarEtc* a; if ( - (a = star_etc_rule(p)) + (a = star_etc_rule(p)) // star_etc ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slash_no_default: param_no_default+ '/' ',' | param_no_default+ '/' &')' @@ -3961,52 +3977,52 @@ slash_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // param_no_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; - Token * literal; - Token * literal_1; if ( - (a = _loop1_59_rule(p)) + (a = _loop1_59_rule(p)) // param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_no_default+ '/' &')' + Token * _literal; asdl_seq * a; - Token * literal; if ( - (a = _loop1_60_rule(p)) + (a = _loop1_60_rule(p)) // param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slash_with_default: @@ -4018,58 +4034,58 @@ slash_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - SlashWithDefault* res = NULL; - int mark = p->mark; + SlashWithDefault* _res = NULL; + int _mark = p->mark; { // param_no_default* param_with_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; asdl_seq * b; - Token * literal; - Token * literal_1; if ( - (a = _loop0_61_rule(p)) + (a = _loop0_61_rule(p)) // param_no_default* && - (b = _loop1_62_rule(p)) + (b = _loop1_62_rule(p)) // param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param_no_default* param_with_default+ '/' &')' + Token * _literal; asdl_seq * a; asdl_seq * b; - Token * literal; if ( - (a = _loop0_63_rule(p)) + (a = _loop0_63_rule(p)) // param_no_default* && - (b = _loop1_64_rule(p)) + (b = _loop1_64_rule(p)) // param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_etc: @@ -4083,85 +4099,85 @@ star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - StarEtc* res = NULL; - int mark = p->mark; + StarEtc* _res = NULL; + int _mark = p->mark; { // '*' param_no_default param_maybe_default* kwds? + Token * _literal; arg_ty a; asdl_seq * b; void *c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = param_no_default_rule(p)) + (a = param_no_default_rule(p)) // param_no_default && - (b = _loop0_65_rule(p)) + (b = _loop0_65_rule(p)) // param_maybe_default* && - (c = kwds_rule(p), 1) + (c = kwds_rule(p), 1) // kwds? ) { - res = _PyPegen_star_etc ( p , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' ',' param_maybe_default+ kwds? + Token * _literal; + Token * _literal_1; asdl_seq * b; void *c; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _loop1_66_rule(p)) + (b = _loop1_66_rule(p)) // param_maybe_default+ && - (c = kwds_rule(p), 1) + (c = kwds_rule(p), 1) // kwds? ) { - res = _PyPegen_star_etc ( p , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // kwds arg_ty a; if ( - (a = kwds_rule(p)) + (a = kwds_rule(p)) // kwds ) { - res = _PyPegen_star_etc ( p , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_star_etc void *invalid_star_etc_var; if ( - (invalid_star_etc_var = invalid_star_etc_rule(p)) + (invalid_star_etc_var = invalid_star_etc_rule(p)) // invalid_star_etc ) { - res = invalid_star_etc_var; + _res = invalid_star_etc_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kwds: '**' param_no_default @@ -4171,29 +4187,29 @@ kwds_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // '**' param_no_default + Token * _literal; arg_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = param_no_default_rule(p)) + (a = param_no_default_rule(p)) // param_no_default ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param_no_default: param ',' TYPE_COMMENT? | param TYPE_COMMENT? &')' @@ -4203,52 +4219,52 @@ param_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // param ',' TYPE_COMMENT? + Token * _literal; arg_ty a; - Token * literal; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param TYPE_COMMENT? &')' arg_ty a; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param_with_default: param default ',' TYPE_COMMENT? | param default TYPE_COMMENT? &')' @@ -4258,58 +4274,58 @@ param_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // param default ',' TYPE_COMMENT? + Token * _literal; arg_ty a; expr_ty c; - Token * literal; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param default TYPE_COMMENT? &')' arg_ty a; expr_ty c; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param_maybe_default: @@ -4321,58 +4337,58 @@ param_maybe_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // param default? ',' TYPE_COMMENT? + Token * _literal; arg_ty a; void *c; - Token * literal; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // param default? TYPE_COMMENT? &')' arg_ty a; void *c; void *tc; if ( - (a = param_rule(p)) + (a = param_rule(p)) // param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // param: NAME annotation? @@ -4382,45 +4398,45 @@ param_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME annotation? expr_ty a; void *b; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (b = annotation_rule(p), 1) + (b = annotation_rule(p), 1) // annotation? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_arg ( a -> v . Name . id , b , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_arg ( a -> v . Name . id , b , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // annotation: ':' expression @@ -4430,29 +4446,29 @@ annotation_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // ':' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // default: '=' expression @@ -4462,29 +4478,29 @@ default_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // '=' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // decorators: (('@' named_expression NEWLINE))+ @@ -4494,26 +4510,26 @@ decorators_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // (('@' named_expression NEWLINE))+ asdl_seq * a; if ( - (a = _loop1_67_rule(p)) + (a = _loop1_67_rule(p)) // (('@' named_expression NEWLINE))+ ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // class_def: decorators class_def_raw | class_def_raw @@ -4523,40 +4539,40 @@ class_def_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; { // decorators class_def_raw asdl_seq* a; stmt_ty b; if ( - (a = decorators_rule(p)) + (a = decorators_rule(p)) // decorators && - (b = class_def_raw_rule(p)) + (b = class_def_raw_rule(p)) // class_def_raw ) { - res = _PyPegen_class_def_decorators ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_class_def_decorators ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // class_def_raw stmt_ty class_def_raw_var; if ( - (class_def_raw_var = class_def_raw_rule(p)) + (class_def_raw_var = class_def_raw_rule(p)) // class_def_raw ) { - res = class_def_raw_var; + _res = class_def_raw_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // class_def_raw: 'class' NAME ['(' arguments? ')'] ':' block @@ -4566,54 +4582,54 @@ class_def_raw_rule(Parser *p) if (p->error_indicator) { return NULL; } - stmt_ty res = NULL; - int mark = p->mark; + stmt_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'class' NAME ['(' arguments? ')'] ':' block + Token * _keyword; + Token * _literal; expr_ty a; void *b; asdl_seq* c; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 523)) + (_keyword = _PyPegen_expect_token(p, 523)) // token='class' && - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (b = _tmp_68_rule(p), 1) + (b = _tmp_68_rule(p), 1) // ['(' arguments? ')'] && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (c = block_rule(p)) + (c = block_rule(p)) // block ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ClassDef ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , c , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ClassDef ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , c , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // block: NEWLINE INDENT statements DEDENT | simple_stmt | invalid_block @@ -4623,60 +4639,60 @@ block_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - if (_PyPegen_is_memoized(p, block_type, &res)) - return res; - int mark = p->mark; + asdl_seq* _res = NULL; + if (_PyPegen_is_memoized(p, block_type, &_res)) + return _res; + int _mark = p->mark; { // NEWLINE INDENT statements DEDENT asdl_seq* a; Token * dedent_var; Token * indent_var; Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (indent_var = _PyPegen_expect_token(p, INDENT)) + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' && - (a = statements_rule(p)) + (a = statements_rule(p)) // statements && - (dedent_var = _PyPegen_expect_token(p, DEDENT)) + (dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // simple_stmt asdl_seq* simple_stmt_var; if ( - (simple_stmt_var = simple_stmt_rule(p)) + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt ) { - res = simple_stmt_var; + _res = simple_stmt_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_block void *invalid_block_var; if ( - (invalid_block_var = invalid_block_rule(p)) + (invalid_block_var = invalid_block_rule(p)) // invalid_block ) { - res = invalid_block_var; + _res = invalid_block_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, block_type, res); - return res; + _PyPegen_insert_memo(p, _mark, block_type, _res); + return _res; } // expressions_list: ','.star_expression+ ','? @@ -4686,30 +4702,30 @@ expressions_list_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.star_expression+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_69_rule(p)) + (a = _gather_69_rule(p)) // ','.star_expression+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_expressions: @@ -4722,86 +4738,86 @@ star_expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // star_expression ((',' star_expression))+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; asdl_seq * b; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = star_expression_rule(p)) + (a = star_expression_rule(p)) // star_expression && - (b = _loop1_71_rule(p)) + (b = _loop1_71_rule(p)) // ((',' star_expression))+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expression ',' + Token * _literal; expr_ty a; - Token * literal; if ( - (a = star_expression_rule(p)) + (a = star_expression_rule(p)) // star_expression && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expression expr_ty star_expression_var; if ( - (star_expression_var = star_expression_rule(p)) + (star_expression_var = star_expression_rule(p)) // star_expression ) { - res = star_expression_var; + _res = star_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_expression: '*' bitwise_or | expression @@ -4811,59 +4827,59 @@ star_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, star_expression_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, star_expression_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression expr_ty expression_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression ) { - res = expression_var; + _res = expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, star_expression_type, res); - return res; + _PyPegen_insert_memo(p, _mark, star_expression_type, _res); + return _res; } // star_named_expressions: ','.star_named_expression+ ','? @@ -4873,30 +4889,30 @@ star_named_expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.star_named_expression+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_72_rule(p)) + (a = _gather_72_rule(p)) // ','.star_named_expression+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_named_expression: '*' bitwise_or | named_expression @@ -4906,56 +4922,56 @@ star_named_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // named_expression expr_ty named_expression_var; if ( - (named_expression_var = named_expression_rule(p)) + (named_expression_var = named_expression_rule(p)) // named_expression ) { - res = named_expression_var; + _res = named_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // named_expression: NAME ':=' expression | expression !':=' | invalid_named_expression @@ -4965,72 +4981,72 @@ named_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME ':=' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 53)) + (_literal = _PyPegen_expect_token(p, 53)) // token=':=' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_NamedExpr ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_NamedExpr ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression !':=' expr_ty expression_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' ) { - res = expression_var; + _res = expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_named_expression void *invalid_named_expression_var; if ( - (invalid_named_expression_var = invalid_named_expression_rule(p)) + (invalid_named_expression_var = invalid_named_expression_rule(p)) // invalid_named_expression ) { - res = invalid_named_expression_var; + _res = invalid_named_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // annotated_rhs: yield_expr | star_expressions @@ -5040,33 +5056,33 @@ annotated_rhs_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // expressions: expression ((',' expression))+ ','? | expression ',' | expression @@ -5076,86 +5092,86 @@ expressions_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // expression ((',' expression))+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; asdl_seq * b; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = _loop1_74_rule(p)) + (b = _loop1_74_rule(p)) // ((',' expression))+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression ',' + Token * _literal; expr_ty a; - Token * literal; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression expr_ty expression_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression ) { - res = expression_var; + _res = expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // expression: disjunction 'if' disjunction 'else' expression | disjunction | lambdef @@ -5165,79 +5181,79 @@ expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, expression_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, expression_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // disjunction 'if' disjunction 'else' expression + Token * _keyword; + Token * _keyword_1; expr_ty a; expr_ty b; expr_ty c; - Token * keyword; - Token * keyword_1; if ( - (a = disjunction_rule(p)) + (a = disjunction_rule(p)) // disjunction && - (keyword = _PyPegen_expect_token(p, 510)) + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (b = disjunction_rule(p)) + (b = disjunction_rule(p)) // disjunction && - (keyword_1 = _PyPegen_expect_token(p, 516)) + (_keyword_1 = _PyPegen_expect_token(p, 516)) // token='else' && - (c = expression_rule(p)) + (c = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_IfExp ( b , a , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_IfExp ( b , a , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // disjunction expr_ty disjunction_var; if ( - (disjunction_var = disjunction_rule(p)) + (disjunction_var = disjunction_rule(p)) // disjunction ) { - res = disjunction_var; + _res = disjunction_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // lambdef expr_ty lambdef_var; if ( - (lambdef_var = lambdef_rule(p)) + (lambdef_var = lambdef_rule(p)) // lambdef ) { - res = lambdef_var; + _res = lambdef_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, expression_type, res); - return res; + _PyPegen_insert_memo(p, _mark, expression_type, _res); + return _res; } // lambdef: 'lambda' lambda_parameters? ':' expression @@ -5247,51 +5263,51 @@ lambdef_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'lambda' lambda_parameters? ':' expression + Token * _keyword; + Token * _literal; void *a; expr_ty b; - Token * keyword; - Token * literal; if ( - (keyword = _PyPegen_expect_token(p, 524)) + (_keyword = _PyPegen_expect_token(p, 524)) // token='lambda' && - (a = lambda_parameters_rule(p), 1) + (a = lambda_parameters_rule(p), 1) // lambda_parameters? && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Lambda ( ( a ) ? a : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Lambda ( ( a ) ? a : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_parameters: @@ -5306,110 +5322,110 @@ lambda_parameters_rule(Parser *p) if (p->error_indicator) { return NULL; } - arguments_ty res = NULL; - int mark = p->mark; + arguments_ty _res = NULL; + int _mark = p->mark; { // lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? asdl_seq* a; asdl_seq * b; asdl_seq * c; void *d; if ( - (a = lambda_slash_no_default_rule(p)) + (a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default && - (b = _loop0_75_rule(p)) + (b = _loop0_75_rule(p)) // lambda_param_no_default* && - (c = _loop0_76_rule(p)) + (c = _loop0_76_rule(p)) // lambda_param_with_default* && - (d = lambda_star_etc_rule(p), 1) + (d = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_slash_with_default lambda_param_with_default* lambda_star_etc? SlashWithDefault* a; asdl_seq * b; void *c; if ( - (a = lambda_slash_with_default_rule(p)) + (a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default && - (b = _loop0_77_rule(p)) + (b = _loop0_77_rule(p)) // lambda_param_with_default* && - (c = lambda_star_etc_rule(p), 1) + (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? asdl_seq * a; asdl_seq * b; void *c; if ( - (a = _loop1_78_rule(p)) + (a = _loop1_78_rule(p)) // lambda_param_no_default+ && - (b = _loop0_79_rule(p)) + (b = _loop0_79_rule(p)) // lambda_param_with_default* && - (c = lambda_star_etc_rule(p), 1) + (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_with_default+ lambda_star_etc? asdl_seq * a; void *b; if ( - (a = _loop1_80_rule(p)) + (a = _loop1_80_rule(p)) // lambda_param_with_default+ && - (b = lambda_star_etc_rule(p), 1) + (b = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_star_etc StarEtc* a; if ( - (a = lambda_star_etc_rule(p)) + (a = lambda_star_etc_rule(p)) // lambda_star_etc ) { - res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_slash_no_default: @@ -5421,52 +5437,52 @@ lambda_slash_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // lambda_param_no_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; - Token * literal; - Token * literal_1; if ( - (a = _loop1_81_rule(p)) + (a = _loop1_81_rule(p)) // lambda_param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_no_default+ '/' &':' + Token * _literal; asdl_seq * a; - Token * literal; if ( - (a = _loop1_82_rule(p)) + (a = _loop1_82_rule(p)) // lambda_param_no_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_slash_with_default: @@ -5478,58 +5494,58 @@ lambda_slash_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - SlashWithDefault* res = NULL; - int mark = p->mark; + SlashWithDefault* _res = NULL; + int _mark = p->mark; { // lambda_param_no_default* lambda_param_with_default+ '/' ',' + Token * _literal; + Token * _literal_1; asdl_seq * a; asdl_seq * b; - Token * literal; - Token * literal_1; if ( - (a = _loop0_83_rule(p)) + (a = _loop0_83_rule(p)) // lambda_param_no_default* && - (b = _loop1_84_rule(p)) + (b = _loop1_84_rule(p)) // lambda_param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param_no_default* lambda_param_with_default+ '/' &':' + Token * _literal; asdl_seq * a; asdl_seq * b; - Token * literal; if ( - (a = _loop0_85_rule(p)) + (a = _loop0_85_rule(p)) // lambda_param_no_default* && - (b = _loop1_86_rule(p)) + (b = _loop1_86_rule(p)) // lambda_param_with_default+ && - (literal = _PyPegen_expect_token(p, 17)) + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = _PyPegen_slash_with_default ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_star_etc: @@ -5543,85 +5559,85 @@ lambda_star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - StarEtc* res = NULL; - int mark = p->mark; + StarEtc* _res = NULL; + int _mark = p->mark; { // '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? + Token * _literal; arg_ty a; asdl_seq * b; void *c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = lambda_param_no_default_rule(p)) + (a = lambda_param_no_default_rule(p)) // lambda_param_no_default && - (b = _loop0_87_rule(p)) + (b = _loop0_87_rule(p)) // lambda_param_maybe_default* && - (c = lambda_kwds_rule(p), 1) + (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { - res = _PyPegen_star_etc ( p , a , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '*' ',' lambda_param_maybe_default+ lambda_kwds? + Token * _literal; + Token * _literal_1; asdl_seq * b; void *c; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (literal_1 = _PyPegen_expect_token(p, 12)) + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _loop1_88_rule(p)) + (b = _loop1_88_rule(p)) // lambda_param_maybe_default+ && - (c = lambda_kwds_rule(p), 1) + (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) { - res = _PyPegen_star_etc ( p , NULL , b , c ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_kwds arg_ty a; if ( - (a = lambda_kwds_rule(p)) + (a = lambda_kwds_rule(p)) // lambda_kwds ) { - res = _PyPegen_star_etc ( p , NULL , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_star_etc ( p , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_lambda_star_etc void *invalid_lambda_star_etc_var; if ( - (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) + (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) // invalid_lambda_star_etc ) { - res = invalid_lambda_star_etc_var; + _res = invalid_lambda_star_etc_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_kwds: '**' lambda_param_no_default @@ -5631,29 +5647,29 @@ lambda_kwds_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // '**' lambda_param_no_default + Token * _literal; arg_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = lambda_param_no_default_rule(p)) + (a = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param_no_default: lambda_param ',' | lambda_param &':' @@ -5663,46 +5679,46 @@ lambda_param_no_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; { // lambda_param ',' + Token * _literal; arg_ty a; - Token * literal; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param &':' arg_ty a; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param_with_default: lambda_param default ',' | lambda_param default &':' @@ -5712,52 +5728,52 @@ lambda_param_with_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // lambda_param default ',' + Token * _literal; arg_ty a; expr_ty c; - Token * literal; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param default &':' arg_ty a; expr_ty c; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p)) + (c = default_rule(p)) // default && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param_maybe_default: lambda_param default? ',' | lambda_param default? &':' @@ -5767,52 +5783,52 @@ lambda_param_maybe_default_rule(Parser *p) if (p->error_indicator) { return NULL; } - NameDefaultPair* res = NULL; - int mark = p->mark; + NameDefaultPair* _res = NULL; + int _mark = p->mark; { // lambda_param default? ',' + Token * _literal; arg_ty a; void *c; - Token * literal; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // lambda_param default? &':' arg_ty a; void *c; if ( - (a = lambda_param_rule(p)) + (a = lambda_param_rule(p)) // lambda_param && - (c = default_rule(p), 1) + (c = default_rule(p), 1) // default? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' ) { - res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lambda_param: NAME @@ -5822,42 +5838,42 @@ lambda_param_rule(Parser *p) if (p->error_indicator) { return NULL; } - arg_ty res = NULL; - int mark = p->mark; + arg_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_arg ( a -> v . Name . id , NULL , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_arg ( a -> v . Name . id , NULL , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // disjunction: conjunction (('or' conjunction))+ | conjunction @@ -5867,59 +5883,59 @@ disjunction_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, disjunction_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, disjunction_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // conjunction (('or' conjunction))+ expr_ty a; asdl_seq * b; if ( - (a = conjunction_rule(p)) + (a = conjunction_rule(p)) // conjunction && - (b = _loop1_89_rule(p)) + (b = _loop1_89_rule(p)) // (('or' conjunction))+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BoolOp ( Or , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BoolOp ( Or , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // conjunction expr_ty conjunction_var; if ( - (conjunction_var = conjunction_rule(p)) + (conjunction_var = conjunction_rule(p)) // conjunction ) { - res = conjunction_var; + _res = conjunction_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, disjunction_type, res); - return res; + _PyPegen_insert_memo(p, _mark, disjunction_type, _res); + return _res; } // conjunction: inversion (('and' inversion))+ | inversion @@ -5929,59 +5945,59 @@ conjunction_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, conjunction_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, conjunction_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // inversion (('and' inversion))+ expr_ty a; asdl_seq * b; if ( - (a = inversion_rule(p)) + (a = inversion_rule(p)) // inversion && - (b = _loop1_90_rule(p)) + (b = _loop1_90_rule(p)) // (('and' inversion))+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BoolOp ( And , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BoolOp ( And , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // inversion expr_ty inversion_var; if ( - (inversion_var = inversion_rule(p)) + (inversion_var = inversion_rule(p)) // inversion ) { - res = inversion_var; + _res = inversion_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, conjunction_type, res); - return res; + _PyPegen_insert_memo(p, _mark, conjunction_type, _res); + return _res; } // inversion: 'not' inversion | comparison @@ -5991,59 +6007,59 @@ inversion_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, inversion_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, inversion_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'not' inversion + Token * _keyword; expr_ty a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 525)) + (_keyword = _PyPegen_expect_token(p, 525)) // token='not' && - (a = inversion_rule(p)) + (a = inversion_rule(p)) // inversion ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( Not , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( Not , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // comparison expr_ty comparison_var; if ( - (comparison_var = comparison_rule(p)) + (comparison_var = comparison_rule(p)) // comparison ) { - res = comparison_var; + _res = comparison_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, inversion_type, res); - return res; + _PyPegen_insert_memo(p, _mark, inversion_type, _res); + return _res; } // comparison: bitwise_or compare_op_bitwise_or_pair+ | bitwise_or @@ -6053,56 +6069,56 @@ comparison_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_or compare_op_bitwise_or_pair+ expr_ty a; asdl_seq * b; if ( - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or && - (b = _loop1_91_rule(p)) + (b = _loop1_91_rule(p)) // compare_op_bitwise_or_pair+ ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Compare ( a , CHECK ( _PyPegen_get_cmpops ( p , b ) ) , CHECK ( _PyPegen_get_exprs ( p , b ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Compare ( a , CHECK ( _PyPegen_get_cmpops ( p , b ) ) , CHECK ( _PyPegen_get_exprs ( p , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // bitwise_or expr_ty bitwise_or_var; if ( - (bitwise_or_var = bitwise_or_rule(p)) + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or ) { - res = bitwise_or_var; + _res = bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // compare_op_bitwise_or_pair: @@ -6122,121 +6138,121 @@ compare_op_bitwise_or_pair_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // eq_bitwise_or CmpopExprPair* eq_bitwise_or_var; if ( - (eq_bitwise_or_var = eq_bitwise_or_rule(p)) + (eq_bitwise_or_var = eq_bitwise_or_rule(p)) // eq_bitwise_or ) { - res = eq_bitwise_or_var; + _res = eq_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // noteq_bitwise_or CmpopExprPair* noteq_bitwise_or_var; if ( - (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) + (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) // noteq_bitwise_or ) { - res = noteq_bitwise_or_var; + _res = noteq_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // lte_bitwise_or CmpopExprPair* lte_bitwise_or_var; if ( - (lte_bitwise_or_var = lte_bitwise_or_rule(p)) + (lte_bitwise_or_var = lte_bitwise_or_rule(p)) // lte_bitwise_or ) { - res = lte_bitwise_or_var; + _res = lte_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // lt_bitwise_or CmpopExprPair* lt_bitwise_or_var; if ( - (lt_bitwise_or_var = lt_bitwise_or_rule(p)) + (lt_bitwise_or_var = lt_bitwise_or_rule(p)) // lt_bitwise_or ) { - res = lt_bitwise_or_var; + _res = lt_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // gte_bitwise_or CmpopExprPair* gte_bitwise_or_var; if ( - (gte_bitwise_or_var = gte_bitwise_or_rule(p)) + (gte_bitwise_or_var = gte_bitwise_or_rule(p)) // gte_bitwise_or ) { - res = gte_bitwise_or_var; + _res = gte_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // gt_bitwise_or CmpopExprPair* gt_bitwise_or_var; if ( - (gt_bitwise_or_var = gt_bitwise_or_rule(p)) + (gt_bitwise_or_var = gt_bitwise_or_rule(p)) // gt_bitwise_or ) { - res = gt_bitwise_or_var; + _res = gt_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // notin_bitwise_or CmpopExprPair* notin_bitwise_or_var; if ( - (notin_bitwise_or_var = notin_bitwise_or_rule(p)) + (notin_bitwise_or_var = notin_bitwise_or_rule(p)) // notin_bitwise_or ) { - res = notin_bitwise_or_var; + _res = notin_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // in_bitwise_or CmpopExprPair* in_bitwise_or_var; if ( - (in_bitwise_or_var = in_bitwise_or_rule(p)) + (in_bitwise_or_var = in_bitwise_or_rule(p)) // in_bitwise_or ) { - res = in_bitwise_or_var; + _res = in_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // isnot_bitwise_or CmpopExprPair* isnot_bitwise_or_var; if ( - (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) + (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) // isnot_bitwise_or ) { - res = isnot_bitwise_or_var; + _res = isnot_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // is_bitwise_or CmpopExprPair* is_bitwise_or_var; if ( - (is_bitwise_or_var = is_bitwise_or_rule(p)) + (is_bitwise_or_var = is_bitwise_or_rule(p)) // is_bitwise_or ) { - res = is_bitwise_or_var; + _res = is_bitwise_or_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // eq_bitwise_or: '==' bitwise_or @@ -6246,29 +6262,29 @@ eq_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '==' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 27)) + (_literal = _PyPegen_expect_token(p, 27)) // token='==' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Eq , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Eq , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // noteq_bitwise_or: ('!=') bitwise_or @@ -6278,29 +6294,29 @@ noteq_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // ('!=') bitwise_or void *_tmp_92_var; expr_ty a; if ( - (_tmp_92_var = _tmp_92_rule(p)) + (_tmp_92_var = _tmp_92_rule(p)) // '!=' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , NotEq , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , NotEq , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lte_bitwise_or: '<=' bitwise_or @@ -6310,29 +6326,29 @@ lte_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '<=' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 29)) + (_literal = _PyPegen_expect_token(p, 29)) // token='<=' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , LtE , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , LtE , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // lt_bitwise_or: '<' bitwise_or @@ -6342,29 +6358,29 @@ lt_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '<' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 20)) + (_literal = _PyPegen_expect_token(p, 20)) // token='<' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Lt , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Lt , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // gte_bitwise_or: '>=' bitwise_or @@ -6374,29 +6390,29 @@ gte_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '>=' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 30)) + (_literal = _PyPegen_expect_token(p, 30)) // token='>=' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , GtE , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , GtE , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // gt_bitwise_or: '>' bitwise_or @@ -6406,29 +6422,29 @@ gt_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // '>' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 21)) + (_literal = _PyPegen_expect_token(p, 21)) // token='>' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Gt , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Gt , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // notin_bitwise_or: 'not' 'in' bitwise_or @@ -6438,32 +6454,32 @@ notin_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'not' 'in' bitwise_or + Token * _keyword; + Token * _keyword_1; expr_ty a; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 525)) + (_keyword = _PyPegen_expect_token(p, 525)) // token='not' && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , NotIn , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , NotIn , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // in_bitwise_or: 'in' bitwise_or @@ -6473,29 +6489,29 @@ in_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'in' bitwise_or + Token * _keyword; expr_ty a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 518)) + (_keyword = _PyPegen_expect_token(p, 518)) // token='in' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , In , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , In , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // isnot_bitwise_or: 'is' 'not' bitwise_or @@ -6505,32 +6521,32 @@ isnot_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'is' 'not' bitwise_or + Token * _keyword; + Token * _keyword_1; expr_ty a; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 526)) + (_keyword = _PyPegen_expect_token(p, 526)) // token='is' && - (keyword_1 = _PyPegen_expect_token(p, 525)) + (_keyword_1 = _PyPegen_expect_token(p, 525)) // token='not' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , IsNot , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , IsNot , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // is_bitwise_or: 'is' bitwise_or @@ -6540,29 +6556,29 @@ is_bitwise_or_rule(Parser *p) if (p->error_indicator) { return NULL; } - CmpopExprPair* res = NULL; - int mark = p->mark; + CmpopExprPair* _res = NULL; + int _mark = p->mark; { // 'is' bitwise_or + Token * _keyword; expr_ty a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 526)) + (_keyword = _PyPegen_expect_token(p, 526)) // token='is' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_cmpop_expr_pair ( p , Is , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_cmpop_expr_pair ( p , Is , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6571,25 +6587,25 @@ static expr_ty bitwise_or_raw(Parser *); static expr_ty bitwise_or_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, bitwise_or_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_or_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_1 = _PyPegen_update_memo(p, mark, bitwise_or_type, res); + int tmpvar_1 = _PyPegen_update_memo(p, _mark, bitwise_or_type, _res); if (tmpvar_1) { - return res; + return _res; } - p->mark = mark; - void *raw = bitwise_or_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = bitwise_or_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty bitwise_or_raw(Parser *p) @@ -6597,59 +6613,59 @@ bitwise_or_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_or '|' bitwise_xor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or && - (literal = _PyPegen_expect_token(p, 18)) + (_literal = _PyPegen_expect_token(p, 18)) // token='|' && - (b = bitwise_xor_rule(p)) + (b = bitwise_xor_rule(p)) // bitwise_xor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , BitOr , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitOr , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // bitwise_xor expr_ty bitwise_xor_var; if ( - (bitwise_xor_var = bitwise_xor_rule(p)) + (bitwise_xor_var = bitwise_xor_rule(p)) // bitwise_xor ) { - res = bitwise_xor_var; + _res = bitwise_xor_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6658,25 +6674,25 @@ static expr_ty bitwise_xor_raw(Parser *); static expr_ty bitwise_xor_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, bitwise_xor_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_xor_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_2 = _PyPegen_update_memo(p, mark, bitwise_xor_type, res); + int tmpvar_2 = _PyPegen_update_memo(p, _mark, bitwise_xor_type, _res); if (tmpvar_2) { - return res; + return _res; } - p->mark = mark; - void *raw = bitwise_xor_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = bitwise_xor_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty bitwise_xor_raw(Parser *p) @@ -6684,59 +6700,59 @@ bitwise_xor_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_xor '^' bitwise_and + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = bitwise_xor_rule(p)) + (a = bitwise_xor_rule(p)) // bitwise_xor && - (literal = _PyPegen_expect_token(p, 32)) + (_literal = _PyPegen_expect_token(p, 32)) // token='^' && - (b = bitwise_and_rule(p)) + (b = bitwise_and_rule(p)) // bitwise_and ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , BitXor , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitXor , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // bitwise_and expr_ty bitwise_and_var; if ( - (bitwise_and_var = bitwise_and_rule(p)) + (bitwise_and_var = bitwise_and_rule(p)) // bitwise_and ) { - res = bitwise_and_var; + _res = bitwise_and_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6745,25 +6761,25 @@ static expr_ty bitwise_and_raw(Parser *); static expr_ty bitwise_and_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, bitwise_and_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_and_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_3 = _PyPegen_update_memo(p, mark, bitwise_and_type, res); + int tmpvar_3 = _PyPegen_update_memo(p, _mark, bitwise_and_type, _res); if (tmpvar_3) { - return res; + return _res; } - p->mark = mark; - void *raw = bitwise_and_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = bitwise_and_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty bitwise_and_raw(Parser *p) @@ -6771,59 +6787,59 @@ bitwise_and_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // bitwise_and '&' shift_expr + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = bitwise_and_rule(p)) + (a = bitwise_and_rule(p)) // bitwise_and && - (literal = _PyPegen_expect_token(p, 19)) + (_literal = _PyPegen_expect_token(p, 19)) // token='&' && - (b = shift_expr_rule(p)) + (b = shift_expr_rule(p)) // shift_expr ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , BitAnd , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitAnd , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // shift_expr expr_ty shift_expr_var; if ( - (shift_expr_var = shift_expr_rule(p)) + (shift_expr_var = shift_expr_rule(p)) // shift_expr ) { - res = shift_expr_var; + _res = shift_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6832,25 +6848,25 @@ static expr_ty shift_expr_raw(Parser *); static expr_ty shift_expr_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, shift_expr_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, shift_expr_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_4 = _PyPegen_update_memo(p, mark, shift_expr_type, res); + int tmpvar_4 = _PyPegen_update_memo(p, _mark, shift_expr_type, _res); if (tmpvar_4) { - return res; + return _res; } - p->mark = mark; - void *raw = shift_expr_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = shift_expr_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty shift_expr_raw(Parser *p) @@ -6858,88 +6874,88 @@ shift_expr_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // shift_expr '<<' sum + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = shift_expr_rule(p)) + (a = shift_expr_rule(p)) // shift_expr && - (literal = _PyPegen_expect_token(p, 33)) + (_literal = _PyPegen_expect_token(p, 33)) // token='<<' && - (b = sum_rule(p)) + (b = sum_rule(p)) // sum ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , LShift , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , LShift , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // shift_expr '>>' sum + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = shift_expr_rule(p)) + (a = shift_expr_rule(p)) // shift_expr && - (literal = _PyPegen_expect_token(p, 34)) + (_literal = _PyPegen_expect_token(p, 34)) // token='>>' && - (b = sum_rule(p)) + (b = sum_rule(p)) // sum ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , RShift , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , RShift , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // sum expr_ty sum_var; if ( - (sum_var = sum_rule(p)) + (sum_var = sum_rule(p)) // sum ) { - res = sum_var; + _res = sum_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -6948,25 +6964,25 @@ static expr_ty sum_raw(Parser *); static expr_ty sum_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, sum_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, sum_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_5 = _PyPegen_update_memo(p, mark, sum_type, res); + int tmpvar_5 = _PyPegen_update_memo(p, _mark, sum_type, _res); if (tmpvar_5) { - return res; + return _res; } - p->mark = mark; - void *raw = sum_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = sum_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty sum_raw(Parser *p) @@ -6974,88 +6990,88 @@ sum_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // sum '+' term + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = sum_rule(p)) + (a = sum_rule(p)) // sum && - (literal = _PyPegen_expect_token(p, 14)) + (_literal = _PyPegen_expect_token(p, 14)) // token='+' && - (b = term_rule(p)) + (b = term_rule(p)) // term ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Add , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Add , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // sum '-' term + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = sum_rule(p)) + (a = sum_rule(p)) // sum && - (literal = _PyPegen_expect_token(p, 15)) + (_literal = _PyPegen_expect_token(p, 15)) // token='-' && - (b = term_rule(p)) + (b = term_rule(p)) // term ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Sub , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Sub , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term expr_ty term_var; if ( - (term_var = term_rule(p)) + (term_var = term_rule(p)) // term ) { - res = term_var; + _res = term_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // Left-recursive @@ -7070,25 +7086,25 @@ static expr_ty term_raw(Parser *); static expr_ty term_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, term_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, term_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_6 = _PyPegen_update_memo(p, mark, term_type, res); + int tmpvar_6 = _PyPegen_update_memo(p, _mark, term_type, _res); if (tmpvar_6) { - return res; + return _res; } - p->mark = mark; - void *raw = term_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = term_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty term_raw(Parser *p) @@ -7096,175 +7112,175 @@ term_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // term '*' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Mult , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Mult , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '/' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 17)) + (_literal = _PyPegen_expect_token(p, 17)) // token='/' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Div , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Div , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '//' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 47)) + (_literal = _PyPegen_expect_token(p, 47)) // token='//' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , FloorDiv , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , FloorDiv , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '%' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 24)) + (_literal = _PyPegen_expect_token(p, 24)) // token='%' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Mod , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Mod , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // term '@' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = term_rule(p)) + (a = term_rule(p)) // term && - (literal = _PyPegen_expect_token(p, 49)) + (_literal = _PyPegen_expect_token(p, 49)) // token='@' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "The '@' operator is" , _Py_BinOp ( a , MatMult , b , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "The '@' operator is" , _Py_BinOp ( a , MatMult , b , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // factor expr_ty factor_var; if ( - (factor_var = factor_rule(p)) + (factor_var = factor_rule(p)) // factor ) { - res = factor_var; + _res = factor_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // factor: '+' factor | '-' factor | '~' factor | power @@ -7274,111 +7290,111 @@ factor_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, factor_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, factor_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '+' factor + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 14)) + (_literal = _PyPegen_expect_token(p, 14)) // token='+' && - (a = factor_rule(p)) + (a = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( UAdd , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( UAdd , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '-' factor + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 15)) + (_literal = _PyPegen_expect_token(p, 15)) // token='-' && - (a = factor_rule(p)) + (a = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( USub , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( USub , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '~' factor + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 31)) + (_literal = _PyPegen_expect_token(p, 31)) // token='~' && - (a = factor_rule(p)) + (a = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_UnaryOp ( Invert , a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( Invert , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // power expr_ty power_var; if ( - (power_var = power_rule(p)) + (power_var = power_rule(p)) // power ) { - res = power_var; + _res = power_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, factor_type, res); - return res; + _PyPegen_insert_memo(p, _mark, factor_type, _res); + return _res; } // power: await_primary '**' factor | await_primary @@ -7388,59 +7404,59 @@ power_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // await_primary '**' factor + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = await_primary_rule(p)) + (a = await_primary_rule(p)) // await_primary && - (literal = _PyPegen_expect_token(p, 35)) + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && - (b = factor_rule(p)) + (b = factor_rule(p)) // factor ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_BinOp ( a , Pow , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Pow , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // await_primary expr_ty await_primary_var; if ( - (await_primary_var = await_primary_rule(p)) + (await_primary_var = await_primary_rule(p)) // await_primary ) { - res = await_primary_var; + _res = await_primary_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // await_primary: AWAIT primary | primary @@ -7450,59 +7466,59 @@ await_primary_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, await_primary_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, await_primary_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // AWAIT primary expr_ty a; Token * await_var; if ( - (await_var = _PyPegen_expect_token(p, AWAIT)) + (await_var = _PyPegen_expect_token(p, AWAIT)) // token='AWAIT' && - (a = primary_rule(p)) + (a = primary_rule(p)) // primary ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = CHECK_VERSION ( 5 , "Await expressions are" , _Py_Await ( a , EXTRA ) ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Await expressions are" , _Py_Await ( a , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary expr_ty primary_var; if ( - (primary_var = primary_rule(p)) + (primary_var = primary_rule(p)) // primary ) { - res = primary_var; + _res = primary_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, await_primary_type, res); - return res; + _PyPegen_insert_memo(p, _mark, await_primary_type, _res); + return _res; } // Left-recursive @@ -7516,25 +7532,25 @@ static expr_ty primary_raw(Parser *); static expr_ty primary_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, primary_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, primary_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_7 = _PyPegen_update_memo(p, mark, primary_type, res); + int tmpvar_7 = _PyPegen_update_memo(p, _mark, primary_type, _res); if (tmpvar_7) { - return res; + return _res; } - p->mark = mark; - void *raw = primary_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = primary_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty primary_raw(Parser *p) @@ -7542,149 +7558,149 @@ primary_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // primary '.' NAME + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 23)) + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary genexp expr_ty a; expr_ty b; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (b = genexp_rule(p)) + (b = genexp_rule(p)) // genexp ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary '(' arguments? ')' + Token * _literal; + Token * _literal_1; expr_ty a; void *b; - Token * literal; - Token * literal_1; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = arguments_rule(p), 1) + (b = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // primary '[' slices ']' + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( - (a = primary_rule(p)) + (a = primary_rule(p)) // primary && - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // atom expr_ty atom_var; if ( - (atom_var = atom_rule(p)) + (atom_var = atom_rule(p)) // atom ) { - res = atom_var; + _res = atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slices: slice !',' | ','.slice+ ','? @@ -7694,63 +7710,63 @@ slices_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // slice !',' expr_ty a; if ( - (a = slice_rule(p)) + (a = slice_rule(p)) // slice && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.slice+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_93_rule(p)) + (a = _gather_93_rule(p)) // ','.slice+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // slice: expression? ':' expression? [':' expression?] | expression @@ -7760,66 +7776,66 @@ slice_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // expression? ':' expression? [':' expression?] + Token * _literal; void *a; void *b; void *c; - Token * literal; if ( - (a = expression_rule(p), 1) + (a = expression_rule(p), 1) // expression? && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p), 1) + (b = expression_rule(p), 1) // expression? && - (c = _tmp_95_rule(p), 1) + (c = _tmp_95_rule(p), 1) // [':' expression?] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Slice ( a , b , c , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Slice ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression expr_ty a; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // atom: @@ -7840,200 +7856,200 @@ atom_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty name_var; if ( - (name_var = _PyPegen_name_token(p)) + (name_var = _PyPegen_name_token(p)) // NAME ) { - res = name_var; + _res = name_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'True' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 527)) + (_keyword = _PyPegen_expect_token(p, 527)) // token='True' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_True , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_True , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'False' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 528)) + (_keyword = _PyPegen_expect_token(p, 528)) // token='False' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_False , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_False , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'None' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 529)) + (_keyword = _PyPegen_expect_token(p, 529)) // token='None' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_None , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_None , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '__new_parser__' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 530)) + (_keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__' ) { - res = RAISE_SYNTAX_ERROR ( "You found it!" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "You found it!" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // &STRING strings expr_ty strings_var; if ( _PyPegen_lookahead(1, _PyPegen_string_token, p) && - (strings_var = strings_rule(p)) + (strings_var = strings_rule(p)) // strings ) { - res = strings_var; + _res = strings_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // NUMBER expr_ty number_var; if ( - (number_var = _PyPegen_number_token(p)) + (number_var = _PyPegen_number_token(p)) // NUMBER ) { - res = number_var; + _res = number_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'(' (tuple | group | genexp) void *_tmp_96_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='(' && - (_tmp_96_var = _tmp_96_rule(p)) + (_tmp_96_var = _tmp_96_rule(p)) // tuple | group | genexp ) { - res = _tmp_96_var; + _res = _tmp_96_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'[' (list | listcomp) void *_tmp_97_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='[' && - (_tmp_97_var = _tmp_97_rule(p)) + (_tmp_97_var = _tmp_97_rule(p)) // list | listcomp ) { - res = _tmp_97_var; + _res = _tmp_97_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // &'{' (dict | set | dictcomp | setcomp) void *_tmp_98_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{' && - (_tmp_98_var = _tmp_98_rule(p)) + (_tmp_98_var = _tmp_98_rule(p)) // dict | set | dictcomp | setcomp ) { - res = _tmp_98_var; + _res = _tmp_98_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // '...' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 52)) + (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Constant ( Py_Ellipsis , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_Ellipsis , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // strings: STRING+ @@ -8043,29 +8059,29 @@ strings_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, strings_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, strings_type, &_res)) + return _res; + int _mark = p->mark; { // STRING+ asdl_seq * a; if ( - (a = _loop1_99_rule(p)) + (a = _loop1_99_rule(p)) // STRING+ ) { - res = _PyPegen_concatenate_strings ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_concatenate_strings ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, strings_type, res); - return res; + _PyPegen_insert_memo(p, _mark, strings_type, _res); + return _res; } // list: '[' star_named_expressions? ']' @@ -8075,48 +8091,48 @@ list_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '[' star_named_expressions? ']' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = star_named_expressions_rule(p), 1) + (a = star_named_expressions_rule(p), 1) // star_named_expressions? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // listcomp: '[' named_expression for_if_clauses ']' | invalid_comprehension @@ -8126,62 +8142,62 @@ listcomp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '[' named_expression for_if_clauses ']' + Token * _literal; + Token * _literal_1; expr_ty a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 10)) + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_ListComp ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ListComp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_comprehension void *invalid_comprehension_var; if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { - res = invalid_comprehension_var; + _res = invalid_comprehension_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // tuple: '(' [star_named_expression ',' star_named_expressions?] ')' @@ -8191,48 +8207,48 @@ tuple_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '(' [star_named_expression ',' star_named_expressions?] ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_100_rule(p), 1) + (a = _tmp_100_rule(p), 1) // [star_named_expression ',' star_named_expressions?] && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // group: '(' (yield_expr | named_expression) ')' @@ -8242,32 +8258,32 @@ group_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; { // '(' (yield_expr | named_expression) ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_101_rule(p)) + (a = _tmp_101_rule(p)) // yield_expr | named_expression && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // genexp: '(' expression for_if_clauses ')' | invalid_comprehension @@ -8277,62 +8293,62 @@ genexp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '(' expression for_if_clauses ')' + Token * _literal; + Token * _literal_1; expr_ty a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_GeneratorExp ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_GeneratorExp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_comprehension void *invalid_comprehension_var; if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { - res = invalid_comprehension_var; + _res = invalid_comprehension_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // set: '{' expressions_list '}' @@ -8342,48 +8358,48 @@ set_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' expressions_list '}' + Token * _literal; + Token * _literal_1; asdl_seq* a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = expressions_list_rule(p)) + (a = expressions_list_rule(p)) // expressions_list && - (literal_1 = _PyPegen_expect_token(p, 26)) + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Set ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Set ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // setcomp: '{' expression for_if_clauses '}' | invalid_comprehension @@ -8393,62 +8409,62 @@ setcomp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' expression for_if_clauses '}' + Token * _literal; + Token * _literal_1; expr_ty a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 26)) + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_SetComp ( a , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_SetComp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // invalid_comprehension void *invalid_comprehension_var; if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension ) { - res = invalid_comprehension_var; + _res = invalid_comprehension_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dict: '{' kvpairs? '}' @@ -8458,48 +8474,48 @@ dict_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' kvpairs? '}' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = kvpairs_rule(p), 1) + (a = kvpairs_rule(p), 1) // kvpairs? && - (literal_1 = _PyPegen_expect_token(p, 26)) + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Dict ( CHECK ( _PyPegen_get_keys ( p , a ) ) , CHECK ( _PyPegen_get_values ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Dict ( CHECK ( _PyPegen_get_keys ( p , a ) ) , CHECK ( _PyPegen_get_values ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // dictcomp: '{' kvpair for_if_clauses '}' @@ -8509,51 +8525,51 @@ dictcomp_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '{' kvpair for_if_clauses '}' + Token * _literal; + Token * _literal_1; KeyValuePair* a; asdl_seq* b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 25)) + (_literal = _PyPegen_expect_token(p, 25)) // token='{' && - (a = kvpair_rule(p)) + (a = kvpair_rule(p)) // kvpair && - (b = for_if_clauses_rule(p)) + (b = for_if_clauses_rule(p)) // for_if_clauses && - (literal_1 = _PyPegen_expect_token(p, 26)) + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_DictComp ( a -> key , a -> value , b , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_DictComp ( a -> key , a -> value , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kvpairs: ','.kvpair+ ','? @@ -8563,30 +8579,30 @@ kvpairs_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.kvpair+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_102_rule(p)) + (a = _gather_102_rule(p)) // ','.kvpair+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kvpair: '**' bitwise_or | expression ':' expression @@ -8596,50 +8612,50 @@ kvpair_rule(Parser *p) if (p->error_indicator) { return NULL; } - KeyValuePair* res = NULL; - int mark = p->mark; + KeyValuePair* _res = NULL; + int _mark = p->mark; { // '**' bitwise_or + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = bitwise_or_rule(p)) + (a = bitwise_or_rule(p)) // bitwise_or ) { - res = _PyPegen_key_value_pair ( p , NULL , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_key_value_pair ( p , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression ':' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { - res = _PyPegen_key_value_pair ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_key_value_pair ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // for_if_clauses: for_if_clause+ @@ -8649,22 +8665,22 @@ for_if_clauses_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // for_if_clause+ asdl_seq * _loop1_104_var; if ( - (_loop1_104_var = _loop1_104_rule(p)) + (_loop1_104_var = _loop1_104_rule(p)) // for_if_clause+ ) { - res = _loop1_104_var; + _res = _loop1_104_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // for_if_clause: @@ -8676,68 +8692,68 @@ for_if_clause_rule(Parser *p) if (p->error_indicator) { return NULL; } - comprehension_ty res = NULL; - int mark = p->mark; + comprehension_ty _res = NULL; + int _mark = p->mark; { // ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* + Token * _keyword; + Token * _keyword_1; expr_ty a; Token * async_var; expr_ty b; asdl_seq * c; - Token * keyword; - Token * keyword_1; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (keyword = _PyPegen_expect_token(p, 517)) + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (a = star_targets_rule(p)) + (a = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (b = disjunction_rule(p)) + (b = disjunction_rule(p)) // disjunction && - (c = _loop0_105_rule(p)) + (c = _loop0_105_rule(p)) // (('if' disjunction))* ) { - res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); - if (res == NULL && PyErr_Occurred()) { + _res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'for' star_targets 'in' disjunction (('if' disjunction))* + Token * _keyword; + Token * _keyword_1; expr_ty a; expr_ty b; asdl_seq * c; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 517)) + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' && - (a = star_targets_rule(p)) + (a = star_targets_rule(p)) // star_targets && - (keyword_1 = _PyPegen_expect_token(p, 518)) + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && - (b = disjunction_rule(p)) + (b = disjunction_rule(p)) // disjunction && - (c = _loop0_106_rule(p)) + (c = _loop0_106_rule(p)) // (('if' disjunction))* ) { - res = _Py_comprehension ( a , b , c , 0 , p -> arena ); - if (res == NULL && PyErr_Occurred()) { + _res = _Py_comprehension ( a , b , c , 0 , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // yield_expr: 'yield' 'from' expression | 'yield' star_expressions? @@ -8747,74 +8763,74 @@ yield_expr_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // 'yield' 'from' expression + Token * _keyword; + Token * _keyword_1; expr_ty a; - Token * keyword; - Token * keyword_1; if ( - (keyword = _PyPegen_expect_token(p, 504)) + (_keyword = _PyPegen_expect_token(p, 504)) // token='yield' && - (keyword_1 = _PyPegen_expect_token(p, 514)) + (_keyword_1 = _PyPegen_expect_token(p, 514)) // token='from' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_YieldFrom ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_YieldFrom ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // 'yield' star_expressions? + Token * _keyword; void *a; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 504)) + (_keyword = _PyPegen_expect_token(p, 504)) // token='yield' && - (a = star_expressions_rule(p), 1) + (a = star_expressions_rule(p), 1) // star_expressions? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Yield ( a , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Yield ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // arguments: args ','? &')' | incorrect_arguments @@ -8824,46 +8840,46 @@ arguments_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, arguments_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, arguments_type, &_res)) + return _res; + int _mark = p->mark; { // args ','? &')' + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = args_rule(p)) + (a = args_rule(p)) // args && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // incorrect_arguments void *incorrect_arguments_var; if ( - (incorrect_arguments_var = incorrect_arguments_rule(p)) + (incorrect_arguments_var = incorrect_arguments_rule(p)) // incorrect_arguments ) { - res = incorrect_arguments_var; + _res = incorrect_arguments_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, arguments_type, res); - return res; + _PyPegen_insert_memo(p, _mark, arguments_type, _res); + return _res; } // args: starred_expression [',' args] | kwargs | named_expression [',' args] @@ -8873,94 +8889,94 @@ args_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // starred_expression [',' args] expr_ty a; void *b; if ( - (a = starred_expression_rule(p)) + (a = starred_expression_rule(p)) // starred_expression && - (b = _tmp_107_rule(p), 1) + (b = _tmp_107_rule(p), 1) // [',' args] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // kwargs asdl_seq* a; if ( - (a = kwargs_rule(p)) + (a = kwargs_rule(p)) // kwargs ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( _PyPegen_dummy_name ( p ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_extract_starred_exprs ( p , a ) ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_delete_starred_exprs ( p , a ) ) , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_extract_starred_exprs ( p , a ) ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_delete_starred_exprs ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // named_expression [',' args] expr_ty a; void *b; if ( - (a = named_expression_rule(p)) + (a = named_expression_rule(p)) // named_expression && - (b = _tmp_108_rule(p), 1) + (b = _tmp_108_rule(p), 1) // [',' args] ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // kwargs: @@ -8973,54 +8989,54 @@ kwargs_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ + Token * _literal; asdl_seq * a; asdl_seq * b; - Token * literal; if ( - (a = _gather_109_rule(p)) + (a = _gather_109_rule(p)) // ','.kwarg_or_starred+ && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _gather_111_rule(p)) + (b = _gather_111_rule(p)) // ','.kwarg_or_double_starred+ ) { - res = _PyPegen_join_sequences ( p , a , b ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_join_sequences ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.kwarg_or_starred+ asdl_seq * _gather_113_var; if ( - (_gather_113_var = _gather_113_rule(p)) + (_gather_113_var = _gather_113_rule(p)) // ','.kwarg_or_starred+ ) { - res = _gather_113_var; + _res = _gather_113_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // ','.kwarg_or_double_starred+ asdl_seq * _gather_115_var; if ( - (_gather_115_var = _gather_115_rule(p)) + (_gather_115_var = _gather_115_rule(p)) // ','.kwarg_or_double_starred+ ) { - res = _gather_115_var; + _res = _gather_115_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // starred_expression: '*' expression @@ -9030,188 +9046,210 @@ starred_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( a , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// kwarg_or_starred: NAME '=' expression | starred_expression +// kwarg_or_starred: NAME '=' expression | starred_expression | invalid_kwarg static KeywordOrStarred* kwarg_or_starred_rule(Parser *p) { if (p->error_indicator) { return NULL; } - KeywordOrStarred* res = NULL; - int mark = p->mark; + KeywordOrStarred* _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME '=' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 22)) + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // starred_expression expr_ty a; if ( - (a = starred_expression_rule(p)) + (a = starred_expression_rule(p)) // starred_expression ) { - res = _PyPegen_keyword_or_starred ( p , a , 0 ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_keyword_or_starred ( p , a , 0 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; + } + { // invalid_kwarg + void *invalid_kwarg_var; + if ( + (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg + ) + { + _res = invalid_kwarg_var; + goto done; + } + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// kwarg_or_double_starred: NAME '=' expression | '**' expression +// kwarg_or_double_starred: NAME '=' expression | '**' expression | invalid_kwarg static KeywordOrStarred* kwarg_or_double_starred_rule(Parser *p) { if (p->error_indicator) { return NULL; } - KeywordOrStarred* res = NULL; - int mark = p->mark; + KeywordOrStarred* _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME '=' expression + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME && - (literal = _PyPegen_expect_token(p, 22)) + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && - (b = expression_rule(p)) + (b = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' expression + Token * _literal; expr_ty a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (_literal = _PyPegen_expect_token(p, 35)) // token='**' && - (a = expression_rule(p)) + (a = expression_rule(p)) // expression ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( NULL , a , EXTRA ) ) , 1 ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( NULL , a , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; + } + { // invalid_kwarg + void *invalid_kwarg_var; + if ( + (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg + ) + { + _res = invalid_kwarg_var; + goto done; + } + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_targets: star_target !',' | star_target ((',' star_target))* ','? @@ -9221,66 +9259,66 @@ star_targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // star_target !',' expr_ty a; if ( - (a = star_target_rule(p)) + (a = star_target_rule(p)) // star_target && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_target ((',' star_target))* ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings expr_ty a; asdl_seq * b; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = star_target_rule(p)) + (a = star_target_rule(p)) // star_target && - (b = _loop0_117_rule(p)) + (b = _loop0_117_rule(p)) // ((',' star_target))* && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_targets_seq: ','.star_target+ ','? @@ -9290,30 +9328,30 @@ star_targets_seq_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.star_target+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_118_rule(p)) + (a = _gather_118_rule(p)) // ','.star_target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // star_target: @@ -9327,124 +9365,124 @@ star_target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, star_target_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, star_target_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // '*' (!'*' star_target) + Token * _literal; void *a; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = _tmp_120_rule(p)) + (a = _tmp_120_rule(p)) // !'*' star_target ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Starred ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '.' NAME !t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // star_atom expr_ty star_atom_var; if ( - (star_atom_var = star_atom_rule(p)) + (star_atom_var = star_atom_rule(p)) // star_atom ) { - res = star_atom_var; + _res = star_atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, star_target_type, res); - return res; + _PyPegen_insert_memo(p, _mark, star_target_type, _res); + return _res; } // star_atom: @@ -9458,266 +9496,263 @@ star_atom_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' star_target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = star_target_rule(p)) + (a = star_target_rule(p)) // star_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' star_targets_seq? ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = star_targets_seq_rule(p), 1) + (a = star_targets_seq_rule(p), 1) // star_targets_seq? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' star_targets_seq? ']' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = star_targets_seq_rule(p), 1) + (a = star_targets_seq_rule(p), 1) // star_targets_seq? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( a , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// inside_paren_ann_assign_target: -// | ann_assign_subscript_attribute_target -// | NAME -// | '(' inside_paren_ann_assign_target ')' +// single_target: single_subscript_attribute_target | NAME | '(' single_target ')' static expr_ty -inside_paren_ann_assign_target_rule(Parser *p) +single_target_rule(Parser *p) { if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; - { // ann_assign_subscript_attribute_target - expr_ty ann_assign_subscript_attribute_target_var; + expr_ty _res = NULL; + int _mark = p->mark; + { // single_subscript_attribute_target + expr_ty single_subscript_attribute_target_var; if ( - (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) + (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target ) { - res = ann_assign_subscript_attribute_target_var; + _res = single_subscript_attribute_target_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - { // '(' inside_paren_ann_assign_target ')' + { // '(' single_target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = inside_paren_ann_assign_target_rule(p)) + (a = single_target_rule(p)) // single_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// ann_assign_subscript_attribute_target: +// single_subscript_attribute_target: // | t_primary '.' NAME !t_lookahead // | t_primary '[' slices ']' !t_lookahead static expr_ty -ann_assign_subscript_attribute_target_rule(Parser *p) +single_subscript_attribute_target_rule(Parser *p) { if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // del_targets: ','.del_target+ ','? @@ -9727,35 +9762,35 @@ del_targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.del_target+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_121_rule(p)) + (a = _gather_121_rule(p)) // ','.del_target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // del_target: -// | t_primary '.' NAME !t_lookahead -// | t_primary '[' slices ']' !t_lookahead +// | t_primary '.' NAME &del_target_end +// | t_primary '[' slices ']' &del_target_end // | del_t_atom static expr_ty del_target_rule(Parser *p) @@ -9763,214 +9798,301 @@ del_target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, del_target_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, del_target_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro - { // t_primary '.' NAME !t_lookahead + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // t_primary '.' NAME &del_target_end + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && - _PyPegen_lookahead(0, t_lookahead_rule, p) + _PyPegen_lookahead(1, del_target_end_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - { // t_primary '[' slices ']' !t_lookahead + { // t_primary '[' slices ']' &del_target_end + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && - _PyPegen_lookahead(0, t_lookahead_rule, p) + _PyPegen_lookahead(1, del_target_end_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // del_t_atom expr_ty del_t_atom_var; if ( - (del_t_atom_var = del_t_atom_rule(p)) + (del_t_atom_var = del_t_atom_rule(p)) // del_t_atom ) { - res = del_t_atom_var; + _res = del_t_atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, del_target_type, res); - return res; + _PyPegen_insert_memo(p, _mark, del_target_type, _res); + return _res; } -// del_t_atom: NAME | '(' del_target ')' | '(' del_targets? ')' | '[' del_targets? ']' +// del_t_atom: +// | NAME &del_target_end +// | '(' del_target ')' +// | '(' del_targets? ')' +// | '[' del_targets? ']' +// | invalid_del_target static expr_ty del_t_atom_rule(Parser *p) { if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro - { // NAME + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // NAME &del_target_end expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME + && + _PyPegen_lookahead(1, del_target_end_rule, p) ) { - res = _PyPegen_set_expr_context ( p , a , Del ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Del ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' del_target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = del_target_rule(p)) + (a = del_target_rule(p)) // del_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = _PyPegen_set_expr_context ( p , a , Del ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Del ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' del_targets? ')' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = del_targets_rule(p), 1) + (a = del_targets_rule(p), 1) // del_targets? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( a , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' del_targets? ']' + Token * _literal; + Token * _literal_1; void *a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && - (a = del_targets_rule(p), 1) + (a = del_targets_rule(p), 1) // del_targets? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( a , Del , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; + } + { // invalid_del_target + void *invalid_del_target_var; + if ( + (invalid_del_target_var = invalid_del_target_rule(p)) // invalid_del_target + ) + { + _res = invalid_del_target_var; + goto done; + } + p->mark = _mark; + } + _res = NULL; + done: + return _res; +} + +// del_target_end: ')' | ']' | ',' | ';' | NEWLINE +static void * +del_target_end_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ')' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // ']' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 10)) // token=']' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // ',' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // ';' + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 13)) // token=';' + ) + { + _res = _literal; + goto done; + } + p->mark = _mark; + } + { // NEWLINE + Token * newline_var; + if ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = newline_var; + goto done; + } + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // targets: ','.target+ ','? @@ -9980,30 +10102,30 @@ targets_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq* res = NULL; - int mark = p->mark; + asdl_seq* _res = NULL; + int _mark = p->mark; { // ','.target+ ','? + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (a = _gather_123_rule(p)) + (a = _gather_123_rule(p)) // ','.target+ && - (opt_var = _PyPegen_expect_token(p, 12), 1) + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // target: @@ -10016,98 +10138,98 @@ target_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, target_type, &res)) - return res; - int mark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, target_type, &_res)) + return _res; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME !t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' !t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_atom expr_ty t_atom_var; if ( - (t_atom_var = t_atom_rule(p)) + (t_atom_var = t_atom_rule(p)) // t_atom ) { - res = t_atom_var; + _res = t_atom_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - _PyPegen_insert_memo(p, mark, target_type, res); - return res; + _PyPegen_insert_memo(p, _mark, target_type, _res); + return _res; } // Left-recursive @@ -10121,25 +10243,25 @@ static expr_ty t_primary_raw(Parser *); static expr_ty t_primary_rule(Parser *p) { - expr_ty res = NULL; - if (_PyPegen_is_memoized(p, t_primary_type, &res)) - return res; - int mark = p->mark; - int resmark = p->mark; + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, t_primary_type, &_res)) + return _res; + int _mark = p->mark; + int _resmark = p->mark; while (1) { - int tmpvar_8 = _PyPegen_update_memo(p, mark, t_primary_type, res); + int tmpvar_8 = _PyPegen_update_memo(p, _mark, t_primary_type, _res); if (tmpvar_8) { - return res; + return _res; } - p->mark = mark; - void *raw = t_primary_raw(p); - if (raw == NULL || p->mark <= resmark) + p->mark = _mark; + void *_raw = t_primary_raw(p); + if (_raw == NULL || p->mark <= _resmark) break; - resmark = p->mark; - res = raw; + _resmark = p->mark; + _res = _raw; } - p->mark = resmark; - return res; + p->mark = _resmark; + return _res; } static expr_ty t_primary_raw(Parser *p) @@ -10147,163 +10269,163 @@ t_primary_raw(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // t_primary '.' NAME &t_lookahead + Token * _literal; expr_ty a; expr_ty b; - Token * literal; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 23)) + (_literal = _PyPegen_expect_token(p, 23)) // token='.' && - (b = _PyPegen_name_token(p)) + (b = _PyPegen_name_token(p)) // NAME && _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '[' slices ']' &t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; expr_ty b; - Token * literal; - Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = slices_rule(p)) + (b = slices_rule(p)) // slices && - (literal_1 = _PyPegen_expect_token(p, 10)) + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Subscript ( a , b , Load , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary genexp &t_lookahead expr_ty a; expr_ty b; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (b = genexp_rule(p)) + (b = genexp_rule(p)) // genexp && _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // t_primary '(' arguments? ')' &t_lookahead + Token * _literal; + Token * _literal_1; expr_ty a; void *b; - Token * literal; - Token * literal_1; if ( - (a = t_primary_rule(p)) + (a = t_primary_rule(p)) // t_primary && - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = arguments_rule(p), 1) + (b = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // atom &t_lookahead expr_ty a; if ( - (a = atom_rule(p)) + (a = atom_rule(p)) // atom && _PyPegen_lookahead(1, t_lookahead_rule, p) ) { - res = a; - if (res == NULL && PyErr_Occurred()) { + _res = a; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // t_lookahead: '(' | '[' | '.' @@ -10313,44 +10435,44 @@ t_lookahead_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '(' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '.' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 23)) + (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // t_atom: NAME | '(' target ')' | '(' targets? ')' | '[' targets? ']' @@ -10360,113 +10482,113 @@ t_atom_rule(Parser *p) if (p->error_indicator) { return NULL; } - expr_ty res = NULL; - int mark = p->mark; + expr_ty _res = NULL; + int _mark = p->mark; if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { p->error_indicator = 1; return NULL; } - int start_lineno = p->tokens[mark]->lineno; - UNUSED(start_lineno); // Only used by EXTRA macro - int start_col_offset = p->tokens[mark]->col_offset; - UNUSED(start_col_offset); // Only used by EXTRA macro + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro { // NAME expr_ty a; if ( - (a = _PyPegen_name_token(p)) + (a = _PyPegen_name_token(p)) // NAME ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' target ')' + Token * _literal; + Token * _literal_1; expr_ty a; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = target_rule(p)) + (a = target_rule(p)) // target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = _PyPegen_set_expr_context ( p , a , Store ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' targets? ')' + Token * _literal; + Token * _literal_1; void *b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = targets_rule(p), 1) + (b = targets_rule(p), 1) // targets? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_Tuple ( b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // '[' targets? ']' + Token * _literal; + Token * _literal_1; void *b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' && - (b = targets_rule(p), 1) + (b = targets_rule(p), 1) // targets? && - (literal_1 = _PyPegen_expect_token(p, 10)) + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - Token *token = _PyPegen_get_last_nonnwhitespace_token(p); - if (token == NULL) { + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { return NULL; } - int end_lineno = token->end_lineno; - UNUSED(end_lineno); // Only used by EXTRA macro - int end_col_offset = token->end_col_offset; - UNUSED(end_col_offset); // Only used by EXTRA macro - res = _Py_List ( b , Store , EXTRA ); - if (res == NULL && PyErr_Occurred()) { + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // incorrect_arguments: @@ -10479,78 +10601,110 @@ incorrect_arguments_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // args ',' '*' + Token * _literal; + Token * _literal_1; expr_ty args_var; - Token * literal; - Token * literal_1; if ( - (args_var = args_rule(p)) + (args_var = args_rule(p)) // args && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal_1 = _PyPegen_expect_token(p, 16)) + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' ) { - res = RAISE_SYNTAX_ERROR ( "iterable argument unpacking follows keyword argument unpacking" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "iterable argument unpacking follows keyword argument unpacking" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression for_if_clauses ',' [args | expression for_if_clauses] - expr_ty expression_var; + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; asdl_seq* for_if_clauses_var; - Token * literal; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (expression_var = expression_rule(p)) + (a = expression_rule(p)) // expression && - (for_if_clauses_var = for_if_clauses_rule(p)) + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (opt_var = _tmp_125_rule(p), 1) + (_opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] ) { - res = RAISE_SYNTAX_ERROR ( "Generator expression must be parenthesized" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "Generator expression must be parenthesized" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // args ',' args + Token * _literal; expr_ty a; expr_ty args_var; - Token * literal; if ( - (a = args_rule(p)) + (a = args_rule(p)) // args && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (args_var = args_rule(p)) + (args_var = args_rule(p)) // args ) { - res = _PyPegen_arguments_parsing_error ( p , a ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_arguments_parsing_error ( p , a ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; +} + +// invalid_kwarg: expression '=' +static void * +invalid_kwarg_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // expression '=' + Token * _literal; + expr_ty a; + if ( + (a = expression_rule(p)) // expression + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "expression cannot contain assignment, perhaps you meant \"==\"?" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + _res = NULL; + done: + return _res; } // invalid_named_expression: expression ':=' expression @@ -10560,132 +10714,179 @@ invalid_named_expression_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // expression ':=' expression + Token * _literal; expr_ty a; expr_ty expression_var; - Token * literal; if ( - (a = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 53)) + (_literal = _PyPegen_expect_token(p, 53)) // token=':=' && - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression ) { - res = RAISE_SYNTAX_ERROR ( "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_assignment: // | list ':' // | tuple ':' +// | star_named_expression ',' star_named_expressions* ':' // | expression ':' expression ['=' annotated_rhs] -// | expression ('=' | augassign) (yield_expr | star_expressions) +// | star_expressions '=' (yield_expr | star_expressions) +// | star_expressions augassign (yield_expr | star_expressions) static void * invalid_assignment_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // list ':' - expr_ty list_var; - Token * literal; + Token * _literal; + expr_ty a; if ( - (list_var = list_rule(p)) + (a = list_rule(p)) // list && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = RAISE_SYNTAX_ERROR ( "only single target (not list) can be annotated" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not list) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // tuple ':' - Token * literal; - expr_ty tuple_var; + Token * _literal; + expr_ty a; + if ( + (a = tuple_rule(p)) // tuple + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + { // star_named_expression ',' star_named_expressions* ':' + Token * _literal; + Token * _literal_1; + asdl_seq * _loop0_126_var; + expr_ty a; if ( - (tuple_var = tuple_rule(p)) + (a = star_named_expression_rule(p)) // star_named_expression + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (literal = _PyPegen_expect_token(p, 11)) + (_loop0_126_var = _loop0_126_rule(p)) // star_named_expressions* + && + (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = RAISE_SYNTAX_ERROR ( "only single target (not tuple) can be annotated" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } { // expression ':' expression ['=' annotated_rhs] + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; expr_ty expression_var; - expr_ty expression_var_1; - Token * literal; - void *opt_var; - UNUSED(opt_var); // Silence compiler warnings if ( - (expression_var = expression_rule(p)) + (a = expression_rule(p)) // expression && - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (expression_var_1 = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - (opt_var = _tmp_126_rule(p), 1) + (_opt_var = _tmp_127_rule(p), 1) // ['=' annotated_rhs] ) { - res = RAISE_SYNTAX_ERROR ( "illegal target for annotation" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "illegal target for annotation" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - { // expression ('=' | augassign) (yield_expr | star_expressions) - void *_tmp_127_var; + { // star_expressions '=' (yield_expr | star_expressions) + Token * _literal; void *_tmp_128_var; expr_ty a; if ( - (a = expression_rule(p)) + (a = star_expressions_rule(p)) // star_expressions + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions + ) + { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( _PyPegen_get_invalid_target ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( _PyPegen_get_invalid_target ( a ) ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + { // star_expressions augassign (yield_expr | star_expressions) + void *_tmp_129_var; + expr_ty a; + AugOperator* augassign_var; + if ( + (a = star_expressions_rule(p)) // star_expressions && - (_tmp_127_var = _tmp_127_rule(p)) + (augassign_var = augassign_rule(p)) // augassign && - (_tmp_128_var = _tmp_128_rule(p)) + (_tmp_129_var = _tmp_129_rule(p)) // yield_expr | star_expressions ) { - res = RAISE_SYNTAX_ERROR_NO_COL_OFFSET ( "cannot assign to %s" , _PyPegen_get_expr_name ( a ) ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "'%s' is an illegal expression for augmented assignment" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_block: NEWLINE !INDENT @@ -10695,66 +10896,63 @@ invalid_block_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // NEWLINE !INDENT Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT ) { - res = RAISE_INDENTATION_ERROR ( "expected an indented block" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_INDENTATION_ERROR ( "expected an indented block" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// invalid_comprehension: ('[' | '(' | '{') '*' expression for_if_clauses +// invalid_comprehension: ('[' | '(' | '{') starred_expression for_if_clauses static void * invalid_comprehension_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; - { // ('[' | '(' | '{') '*' expression for_if_clauses - void *_tmp_129_var; - expr_ty expression_var; + void * _res = NULL; + int _mark = p->mark; + { // ('[' | '(' | '{') starred_expression for_if_clauses + void *_tmp_130_var; + expr_ty a; asdl_seq* for_if_clauses_var; - Token * literal; if ( - (_tmp_129_var = _tmp_129_rule(p)) + (_tmp_130_var = _tmp_130_rule(p)) // '[' | '(' | '{' && - (literal = _PyPegen_expect_token(p, 16)) + (a = starred_expression_rule(p)) // starred_expression && - (expression_var = expression_rule(p)) - && - (for_if_clauses_var = for_if_clauses_rule(p)) + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { - res = RAISE_SYNTAX_ERROR ( "iterable unpacking cannot be used in comprehension" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "iterable unpacking cannot be used in comprehension" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_parameters: @@ -10765,32 +10963,32 @@ invalid_parameters_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // param_no_default* (slash_with_default | param_with_default+) param_no_default - asdl_seq * _loop0_130_var; - void *_tmp_131_var; + asdl_seq * _loop0_131_var; + void *_tmp_132_var; arg_ty param_no_default_var; if ( - (_loop0_130_var = _loop0_130_rule(p)) + (_loop0_131_var = _loop0_131_rule(p)) // param_no_default* && - (_tmp_131_var = _tmp_131_rule(p)) + (_tmp_132_var = _tmp_132_rule(p)) // slash_with_default | param_with_default+ && - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_star_etc: '*' (')' | ',' (')' | '**')) @@ -10800,29 +10998,29 @@ invalid_star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '*' (')' | ',' (')' | '**')) - void *_tmp_132_var; - Token * literal; + Token * _literal; + void *_tmp_133_var; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_132_var = _tmp_132_rule(p)) + (_tmp_133_var = _tmp_133_rule(p)) // ')' | ',' (')' | '**') ) { - res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_lambda_star_etc: '*' (':' | ',' (':' | '**')) @@ -10832,29 +11030,29 @@ invalid_lambda_star_etc_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '*' (':' | ',' (':' | '**')) - void *_tmp_133_var; - Token * literal; + Token * _literal; + void *_tmp_134_var; if ( - (literal = _PyPegen_expect_token(p, 16)) + (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_133_var = _tmp_133_rule(p)) + (_tmp_134_var = _tmp_134_rule(p)) // ':' | ',' (':' | '**') ) { - res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // invalid_double_type_comments: TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT @@ -10864,8 +11062,8 @@ invalid_double_type_comments_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT Token * indent_var; Token * newline_var; @@ -10873,29 +11071,60 @@ invalid_double_type_comments_rule(Parser *p) Token * type_comment_var; Token * type_comment_var_1; if ( - (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) + (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (type_comment_var_1 = _PyPegen_expect_token(p, TYPE_COMMENT)) + (type_comment_var_1 = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && - (newline_var_1 = _PyPegen_expect_token(p, NEWLINE)) + (newline_var_1 = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + && + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' + ) + { + _res = RAISE_SYNTAX_ERROR ( "Cannot have two type comments on def" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + return NULL; + } + goto done; + } + p->mark = _mark; + } + _res = NULL; + done: + return _res; +} + +// invalid_del_target: star_expression &del_target_end +static void * +invalid_del_target_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // star_expression &del_target_end + expr_ty a; + if ( + (a = star_expression_rule(p)) // star_expression && - (indent_var = _PyPegen_expect_token(p, INDENT)) + _PyPegen_lookahead(1, del_target_end_rule, p) ) { - res = RAISE_SYNTAX_ERROR ( "Cannot have two type comments on def" ); - if (res == NULL && PyErr_Occurred()) { + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot delete %s" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_1: NEWLINE @@ -10905,46 +11134,46 @@ _loop0_1_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // NEWLINE Token * newline_var; while ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = newline_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = newline_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_1"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_1_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_1_type, _seq); + return _seq; } // _loop0_2: NEWLINE @@ -10954,46 +11183,46 @@ _loop0_2_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // NEWLINE Token * newline_var; while ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = newline_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = newline_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_2"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_2_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_2_type, _seq); + return _seq; } // _loop0_4: ',' expression @@ -11003,54 +11232,54 @@ _loop0_4_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_4"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_4_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_4_type, _seq); + return _seq; } // _gather_3: expression _loop0_4 @@ -11060,25 +11289,25 @@ _gather_3_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_4 expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_4_rule(p)) + (seq = _loop0_4_rule(p)) // _loop0_4 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_6: ',' expression @@ -11088,54 +11317,54 @@ _loop0_6_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_6"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_6_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_6_type, _seq); + return _seq; } // _gather_5: expression _loop0_6 @@ -11145,25 +11374,25 @@ _gather_5_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_6 expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_6_rule(p)) + (seq = _loop0_6_rule(p)) // _loop0_6 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_8: ',' expression @@ -11173,54 +11402,54 @@ _loop0_8_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_8"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_8_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_8_type, _seq); + return _seq; } // _gather_7: expression _loop0_8 @@ -11230,25 +11459,25 @@ _gather_7_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_8 expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_8_rule(p)) + (seq = _loop0_8_rule(p)) // _loop0_8 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_10: ',' expression @@ -11258,54 +11487,54 @@ _loop0_10_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_10"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_10_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_10_type, _seq); + return _seq; } // _gather_9: expression _loop0_10 @@ -11315,25 +11544,25 @@ _gather_9_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // expression _loop0_10 expr_ty elem; asdl_seq * seq; if ( - (elem = expression_rule(p)) + (elem = expression_rule(p)) // expression && - (seq = _loop0_10_rule(p)) + (seq = _loop0_10_rule(p)) // _loop0_10 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_11: statement @@ -11343,50 +11572,50 @@ _loop1_11_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // statement asdl_seq* statement_var; while ( - (statement_var = statement_rule(p)) + (statement_var = statement_rule(p)) // statement ) { - res = statement_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = statement_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_11"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_11_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_11_type, _seq); + return _seq; } // _loop0_13: ';' small_stmt @@ -11396,54 +11625,54 @@ _loop0_13_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ';' small_stmt + Token * _literal; stmt_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 13)) + (_literal = _PyPegen_expect_token(p, 13)) // token=';' && - (elem = small_stmt_rule(p)) + (elem = small_stmt_rule(p)) // small_stmt ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_13"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_13_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_13_type, _seq); + return _seq; } // _gather_12: small_stmt _loop0_13 @@ -11453,25 +11682,25 @@ _gather_12_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // small_stmt _loop0_13 stmt_ty elem; asdl_seq * seq; if ( - (elem = small_stmt_rule(p)) + (elem = small_stmt_rule(p)) // small_stmt && - (seq = _loop0_13_rule(p)) + (seq = _loop0_13_rule(p)) // _loop0_13 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_14: 'import' | 'from' @@ -11481,33 +11710,33 @@ _tmp_14_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'import' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 513)) + (_keyword = _PyPegen_expect_token(p, 513)) // token='import' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // 'from' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_15: 'def' | '@' | ASYNC @@ -11517,44 +11746,44 @@ _tmp_15_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'def' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 522)) + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // '@' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 49)) + (_literal = _PyPegen_expect_token(p, 49)) // token='@' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC Token * async_var; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - res = async_var; + _res = async_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_16: 'class' | '@' @@ -11564,33 +11793,33 @@ _tmp_16_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'class' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 523)) + (_keyword = _PyPegen_expect_token(p, 523)) // token='class' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // '@' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 49)) + (_literal = _PyPegen_expect_token(p, 49)) // token='@' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_17: 'with' | ASYNC @@ -11600,33 +11829,33 @@ _tmp_17_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'with' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 519)) + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC Token * async_var; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - res = async_var; + _res = async_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_18: 'for' | ASYNC @@ -11636,33 +11865,33 @@ _tmp_18_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'for' - Token * keyword; + Token * _keyword; if ( - (keyword = _PyPegen_expect_token(p, 517)) + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' ) { - res = keyword; + _res = _keyword; goto done; } - p->mark = mark; + p->mark = _mark; } { // ASYNC Token * async_var; if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' ) { - res = async_var; + _res = async_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_19: '=' annotated_rhs @@ -11672,75 +11901,75 @@ _tmp_19_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '=' annotated_rhs + Token * _literal; expr_ty d; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && - (d = annotated_rhs_rule(p)) + (d = annotated_rhs_rule(p)) // annotated_rhs ) { - res = d; - if (res == NULL && PyErr_Occurred()) { + _res = d; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_20: '(' inside_paren_ann_assign_target ')' | ann_assign_subscript_attribute_target +// _tmp_20: '(' single_target ')' | single_subscript_attribute_target static void * _tmp_20_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; - { // '(' inside_paren_ann_assign_target ')' + void * _res = NULL; + int _mark = p->mark; + { // '(' single_target ')' + Token * _literal; + Token * _literal_1; expr_ty b; - Token * literal; - Token * literal_1; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (b = inside_paren_ann_assign_target_rule(p)) + (b = single_target_rule(p)) // single_target && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = b; - if (res == NULL && PyErr_Occurred()) { + _res = b; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - { // ann_assign_subscript_attribute_target - expr_ty ann_assign_subscript_attribute_target_var; + { // single_subscript_attribute_target + expr_ty single_subscript_attribute_target_var; if ( - (ann_assign_subscript_attribute_target_var = ann_assign_subscript_attribute_target_rule(p)) + (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target ) { - res = ann_assign_subscript_attribute_target_var; + _res = single_subscript_attribute_target_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_21: '=' annotated_rhs @@ -11750,29 +11979,29 @@ _tmp_21_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '=' annotated_rhs + Token * _literal; expr_ty d; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && - (d = annotated_rhs_rule(p)) + (d = annotated_rhs_rule(p)) // annotated_rhs ) { - res = d; - if (res == NULL && PyErr_Occurred()) { + _res = d; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_22: (star_targets '=') @@ -11782,50 +12011,50 @@ _loop1_22_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (star_targets '=') - void *_tmp_134_var; + void *_tmp_135_var; while ( - (_tmp_134_var = _tmp_134_rule(p)) + (_tmp_135_var = _tmp_135_rule(p)) // star_targets '=' ) { - res = _tmp_134_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_135_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_22"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_22_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_22_type, _seq); + return _seq; } // _tmp_23: yield_expr | star_expressions @@ -11835,33 +12064,33 @@ _tmp_23_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_24: yield_expr | star_expressions @@ -11871,33 +12100,33 @@ _tmp_24_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_26: ',' NAME @@ -11907,54 +12136,54 @@ _loop0_26_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' NAME + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_26"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_26_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_26_type, _seq); + return _seq; } // _gather_25: NAME _loop0_26 @@ -11964,25 +12193,25 @@ _gather_25_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // NAME _loop0_26 expr_ty elem; asdl_seq * seq; if ( - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME && - (seq = _loop0_26_rule(p)) + (seq = _loop0_26_rule(p)) // _loop0_26 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_28: ',' NAME @@ -11992,54 +12221,54 @@ _loop0_28_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' NAME + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_28"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_28_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_28_type, _seq); + return _seq; } // _gather_27: NAME _loop0_28 @@ -12049,25 +12278,25 @@ _gather_27_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // NAME _loop0_28 expr_ty elem; asdl_seq * seq; if ( - (elem = _PyPegen_name_token(p)) + (elem = _PyPegen_name_token(p)) // NAME && - (seq = _loop0_28_rule(p)) + (seq = _loop0_28_rule(p)) // _loop0_28 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_29: ',' expression @@ -12077,29 +12306,29 @@ _tmp_29_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' expression - Token * literal; + Token * _literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_30: ('.' | '...') @@ -12109,46 +12338,46 @@ _loop0_30_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('.' | '...') - void *_tmp_135_var; + void *_tmp_136_var; while ( - (_tmp_135_var = _tmp_135_rule(p)) + (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' ) { - res = _tmp_135_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_136_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_30"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_30_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_30_type, _seq); + return _seq; } // _loop1_31: ('.' | '...') @@ -12158,50 +12387,50 @@ _loop1_31_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('.' | '...') - void *_tmp_136_var; + void *_tmp_137_var; while ( - (_tmp_136_var = _tmp_136_rule(p)) + (_tmp_137_var = _tmp_137_rule(p)) // '.' | '...' ) { - res = _tmp_136_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_137_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_31"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_31_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_31_type, _seq); + return _seq; } // _loop0_33: ',' import_from_as_name @@ -12211,54 +12440,54 @@ _loop0_33_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' import_from_as_name + Token * _literal; alias_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = import_from_as_name_rule(p)) + (elem = import_from_as_name_rule(p)) // import_from_as_name ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_33"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_33_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_33_type, _seq); + return _seq; } // _gather_32: import_from_as_name _loop0_33 @@ -12268,25 +12497,25 @@ _gather_32_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // import_from_as_name _loop0_33 alias_ty elem; asdl_seq * seq; if ( - (elem = import_from_as_name_rule(p)) + (elem = import_from_as_name_rule(p)) // import_from_as_name && - (seq = _loop0_33_rule(p)) + (seq = _loop0_33_rule(p)) // _loop0_33 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_34: 'as' NAME @@ -12296,29 +12525,29 @@ _tmp_34_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'as' NAME - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (z = _PyPegen_name_token(p)) + (z = _PyPegen_name_token(p)) // NAME ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_36: ',' dotted_as_name @@ -12328,54 +12557,54 @@ _loop0_36_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' dotted_as_name + Token * _literal; alias_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = dotted_as_name_rule(p)) + (elem = dotted_as_name_rule(p)) // dotted_as_name ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_36"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_36_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_36_type, _seq); + return _seq; } // _gather_35: dotted_as_name _loop0_36 @@ -12385,25 +12614,25 @@ _gather_35_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // dotted_as_name _loop0_36 alias_ty elem; asdl_seq * seq; if ( - (elem = dotted_as_name_rule(p)) + (elem = dotted_as_name_rule(p)) // dotted_as_name && - (seq = _loop0_36_rule(p)) + (seq = _loop0_36_rule(p)) // _loop0_36 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_37: 'as' NAME @@ -12413,29 +12642,29 @@ _tmp_37_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'as' NAME - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (z = _PyPegen_name_token(p)) + (z = _PyPegen_name_token(p)) // NAME ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_39: ',' with_item @@ -12445,54 +12674,54 @@ _loop0_39_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_39"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_39_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_39_type, _seq); + return _seq; } // _gather_38: with_item _loop0_39 @@ -12502,25 +12731,25 @@ _gather_38_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_39 withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_39_rule(p)) + (seq = _loop0_39_rule(p)) // _loop0_39 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_41: ',' with_item @@ -12530,54 +12759,54 @@ _loop0_41_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_41"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_41_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_41_type, _seq); + return _seq; } // _gather_40: with_item _loop0_41 @@ -12587,25 +12816,25 @@ _gather_40_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_41 withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_41_rule(p)) + (seq = _loop0_41_rule(p)) // _loop0_41 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_43: ',' with_item @@ -12615,54 +12844,54 @@ _loop0_43_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_43"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_43_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_43_type, _seq); + return _seq; } // _gather_42: with_item _loop0_43 @@ -12672,25 +12901,25 @@ _gather_42_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_43 withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_43_rule(p)) + (seq = _loop0_43_rule(p)) // _loop0_43 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_45: ',' with_item @@ -12700,54 +12929,54 @@ _loop0_45_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' with_item + Token * _literal; withitem_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_45"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_45_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_45_type, _seq); + return _seq; } // _gather_44: with_item _loop0_45 @@ -12757,25 +12986,25 @@ _gather_44_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // with_item _loop0_45 withitem_ty elem; asdl_seq * seq; if ( - (elem = with_item_rule(p)) + (elem = with_item_rule(p)) // with_item && - (seq = _loop0_45_rule(p)) + (seq = _loop0_45_rule(p)) // _loop0_45 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_46: 'as' target @@ -12785,29 +13014,29 @@ _tmp_46_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'as' target - Token * keyword; + Token * _keyword; expr_ty t; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (t = target_rule(p)) + (t = target_rule(p)) // target ) { - res = t; - if (res == NULL && PyErr_Occurred()) { + _res = t; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_47: except_block @@ -12817,82 +13046,82 @@ _loop1_47_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // except_block excepthandler_ty except_block_var; while ( - (except_block_var = except_block_rule(p)) + (except_block_var = except_block_rule(p)) // except_block ) { - res = except_block_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = except_block_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_47"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_47_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_47_type, _seq); + return _seq; } -// _tmp_48: 'as' target +// _tmp_48: 'as' NAME static void * _tmp_48_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; - { // 'as' target - Token * keyword; + void * _res = NULL; + int _mark = p->mark; + { // 'as' NAME + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 531)) + (_keyword = _PyPegen_expect_token(p, 531)) // token='as' && - (z = target_rule(p)) + (z = _PyPegen_name_token(p)) // NAME ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_49: 'from' expression @@ -12902,29 +13131,29 @@ _tmp_49_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'from' expression - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 514)) + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_50: '->' expression @@ -12934,29 +13163,29 @@ _tmp_50_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '->' expression - Token * literal; + Token * _literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 51)) + (_literal = _PyPegen_expect_token(p, 51)) // token='->' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_51: '->' expression @@ -12966,29 +13195,29 @@ _tmp_51_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '->' expression - Token * literal; + Token * _literal; expr_ty z; if ( - (literal = _PyPegen_expect_token(p, 51)) + (_literal = _PyPegen_expect_token(p, 51)) // token='->' && - (z = expression_rule(p)) + (z = expression_rule(p)) // expression ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_52: NEWLINE INDENT @@ -12998,25 +13227,25 @@ _tmp_52_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // NEWLINE INDENT Token * indent_var; Token * newline_var; if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' && - (indent_var = _PyPegen_expect_token(p, INDENT)) + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { - res = _PyPegen_dummy_name(p, newline_var, indent_var); + _res = _PyPegen_dummy_name(p, newline_var, indent_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_53: param_no_default @@ -13026,46 +13255,46 @@ _loop0_53_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_53"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_53_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_53_type, _seq); + return _seq; } // _loop0_54: param_with_default @@ -13075,46 +13304,46 @@ _loop0_54_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_54"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_54_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_54_type, _seq); + return _seq; } // _loop0_55: param_with_default @@ -13124,46 +13353,46 @@ _loop0_55_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_55"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_55_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_55_type, _seq); + return _seq; } // _loop1_56: param_no_default @@ -13173,50 +13402,50 @@ _loop1_56_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_56"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_56_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_56_type, _seq); + return _seq; } // _loop0_57: param_with_default @@ -13226,46 +13455,46 @@ _loop0_57_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_57"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_57_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_57_type, _seq); + return _seq; } // _loop1_58: param_with_default @@ -13275,50 +13504,50 @@ _loop1_58_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_58"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_58_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_58_type, _seq); + return _seq; } // _loop1_59: param_no_default @@ -13328,50 +13557,50 @@ _loop1_59_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_59"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_59_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_59_type, _seq); + return _seq; } // _loop1_60: param_no_default @@ -13381,50 +13610,50 @@ _loop1_60_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_60"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_60_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_60_type, _seq); + return _seq; } // _loop0_61: param_no_default @@ -13434,46 +13663,46 @@ _loop0_61_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_61"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_61_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_61_type, _seq); + return _seq; } // _loop1_62: param_with_default @@ -13483,50 +13712,50 @@ _loop1_62_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_62"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_62_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_62_type, _seq); + return _seq; } // _loop0_63: param_no_default @@ -13536,46 +13765,46 @@ _loop0_63_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_63"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_63_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_63_type, _seq); + return _seq; } // _loop1_64: param_with_default @@ -13585,50 +13814,50 @@ _loop1_64_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_64"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_64_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_64_type, _seq); + return _seq; } // _loop0_65: param_maybe_default @@ -13638,46 +13867,46 @@ _loop0_65_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_maybe_default NameDefaultPair* param_maybe_default_var; while ( - (param_maybe_default_var = param_maybe_default_rule(p)) + (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default ) { - res = param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_65"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_65_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_65_type, _seq); + return _seq; } // _loop1_66: param_maybe_default @@ -13687,50 +13916,50 @@ _loop1_66_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_maybe_default NameDefaultPair* param_maybe_default_var; while ( - (param_maybe_default_var = param_maybe_default_rule(p)) + (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default ) { - res = param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_66"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_66_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_66_type, _seq); + return _seq; } // _loop1_67: ('@' named_expression NEWLINE) @@ -13740,50 +13969,50 @@ _loop1_67_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('@' named_expression NEWLINE) - void *_tmp_137_var; + void *_tmp_138_var; while ( - (_tmp_137_var = _tmp_137_rule(p)) + (_tmp_138_var = _tmp_138_rule(p)) // '@' named_expression NEWLINE ) { - res = _tmp_137_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_138_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_67"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_67_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_67_type, _seq); + return _seq; } // _tmp_68: '(' arguments? ')' @@ -13793,32 +14022,32 @@ _tmp_68_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '(' arguments? ')' - Token * literal; - Token * literal_1; + Token * _literal; + Token * _literal_1; void *z; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (z = arguments_rule(p), 1) + (z = arguments_rule(p), 1) // arguments? && - (literal_1 = _PyPegen_expect_token(p, 8)) + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_70: ',' star_expression @@ -13828,54 +14057,54 @@ _loop0_70_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' star_expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = star_expression_rule(p)) + (elem = star_expression_rule(p)) // star_expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_70"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_70_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_70_type, _seq); + return _seq; } // _gather_69: star_expression _loop0_70 @@ -13885,25 +14114,25 @@ _gather_69_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // star_expression _loop0_70 expr_ty elem; asdl_seq * seq; if ( - (elem = star_expression_rule(p)) + (elem = star_expression_rule(p)) // star_expression && - (seq = _loop0_70_rule(p)) + (seq = _loop0_70_rule(p)) // _loop0_70 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_71: (',' star_expression) @@ -13913,50 +14142,50 @@ _loop1_71_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (',' star_expression) - void *_tmp_138_var; + void *_tmp_139_var; while ( - (_tmp_138_var = _tmp_138_rule(p)) + (_tmp_139_var = _tmp_139_rule(p)) // ',' star_expression ) { - res = _tmp_138_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_139_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_71"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_71_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_71_type, _seq); + return _seq; } // _loop0_73: ',' star_named_expression @@ -13966,54 +14195,54 @@ _loop0_73_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' star_named_expression + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = star_named_expression_rule(p)) + (elem = star_named_expression_rule(p)) // star_named_expression ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_73"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_73_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_73_type, _seq); + return _seq; } // _gather_72: star_named_expression _loop0_73 @@ -14023,25 +14252,25 @@ _gather_72_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // star_named_expression _loop0_73 expr_ty elem; asdl_seq * seq; if ( - (elem = star_named_expression_rule(p)) + (elem = star_named_expression_rule(p)) // star_named_expression && - (seq = _loop0_73_rule(p)) + (seq = _loop0_73_rule(p)) // _loop0_73 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_74: (',' expression) @@ -14051,50 +14280,50 @@ _loop1_74_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (',' expression) - void *_tmp_139_var; + void *_tmp_140_var; while ( - (_tmp_139_var = _tmp_139_rule(p)) + (_tmp_140_var = _tmp_140_rule(p)) // ',' expression ) { - res = _tmp_139_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_140_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_74"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_74_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_74_type, _seq); + return _seq; } // _loop0_75: lambda_param_no_default @@ -14104,46 +14333,46 @@ _loop0_75_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_75"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_75_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_75_type, _seq); + return _seq; } // _loop0_76: lambda_param_with_default @@ -14153,46 +14382,46 @@ _loop0_76_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_76"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_76_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_76_type, _seq); + return _seq; } // _loop0_77: lambda_param_with_default @@ -14202,46 +14431,46 @@ _loop0_77_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_77"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_77_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_77_type, _seq); + return _seq; } // _loop1_78: lambda_param_no_default @@ -14251,50 +14480,50 @@ _loop1_78_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_78"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_78_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_78_type, _seq); + return _seq; } // _loop0_79: lambda_param_with_default @@ -14304,46 +14533,46 @@ _loop0_79_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_79"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_79_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_79_type, _seq); + return _seq; } // _loop1_80: lambda_param_with_default @@ -14353,50 +14582,50 @@ _loop1_80_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_80"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_80_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_80_type, _seq); + return _seq; } // _loop1_81: lambda_param_no_default @@ -14406,50 +14635,50 @@ _loop1_81_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_81"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_81_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_81_type, _seq); + return _seq; } // _loop1_82: lambda_param_no_default @@ -14459,50 +14688,50 @@ _loop1_82_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_82"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_82_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_82_type, _seq); + return _seq; } // _loop0_83: lambda_param_no_default @@ -14512,46 +14741,46 @@ _loop0_83_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_83"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_83_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_83_type, _seq); + return _seq; } // _loop1_84: lambda_param_with_default @@ -14561,50 +14790,50 @@ _loop1_84_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_84"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_84_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_84_type, _seq); + return _seq; } // _loop0_85: lambda_param_no_default @@ -14614,46 +14843,46 @@ _loop0_85_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_no_default arg_ty lambda_param_no_default_var; while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) { - res = lambda_param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_85"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_85_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_85_type, _seq); + return _seq; } // _loop1_86: lambda_param_with_default @@ -14663,50 +14892,50 @@ _loop1_86_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_with_default NameDefaultPair* lambda_param_with_default_var; while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default ) { - res = lambda_param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_86"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_86_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_86_type, _seq); + return _seq; } // _loop0_87: lambda_param_maybe_default @@ -14716,46 +14945,46 @@ _loop0_87_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_maybe_default NameDefaultPair* lambda_param_maybe_default_var; while ( - (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { - res = lambda_param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_87"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_87_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_87_type, _seq); + return _seq; } // _loop1_88: lambda_param_maybe_default @@ -14765,50 +14994,50 @@ _loop1_88_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // lambda_param_maybe_default NameDefaultPair* lambda_param_maybe_default_var; while ( - (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default ) { - res = lambda_param_maybe_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = lambda_param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_88"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_88_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_88_type, _seq); + return _seq; } // _loop1_89: ('or' conjunction) @@ -14818,50 +15047,50 @@ _loop1_89_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('or' conjunction) - void *_tmp_140_var; + void *_tmp_141_var; while ( - (_tmp_140_var = _tmp_140_rule(p)) + (_tmp_141_var = _tmp_141_rule(p)) // 'or' conjunction ) { - res = _tmp_140_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_141_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_89"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_89_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_89_type, _seq); + return _seq; } // _loop1_90: ('and' inversion) @@ -14871,50 +15100,50 @@ _loop1_90_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('and' inversion) - void *_tmp_141_var; + void *_tmp_142_var; while ( - (_tmp_141_var = _tmp_141_rule(p)) + (_tmp_142_var = _tmp_142_rule(p)) // 'and' inversion ) { - res = _tmp_141_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_142_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_90"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_90_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_90_type, _seq); + return _seq; } // _loop1_91: compare_op_bitwise_or_pair @@ -14924,50 +15153,50 @@ _loop1_91_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // compare_op_bitwise_or_pair CmpopExprPair* compare_op_bitwise_or_pair_var; while ( - (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) + (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair ) { - res = compare_op_bitwise_or_pair_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = compare_op_bitwise_or_pair_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_91"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_91_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_91_type, _seq); + return _seq; } // _tmp_92: '!=' @@ -14977,26 +15206,26 @@ _tmp_92_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '!=' Token * tok; if ( - (tok = _PyPegen_expect_token(p, 28)) + (tok = _PyPegen_expect_token(p, 28)) // token='!=' ) { - res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_94: ',' slice @@ -15006,54 +15235,54 @@ _loop0_94_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' slice + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = slice_rule(p)) + (elem = slice_rule(p)) // slice ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_94"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_94_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_94_type, _seq); + return _seq; } // _gather_93: slice _loop0_94 @@ -15063,25 +15292,25 @@ _gather_93_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // slice _loop0_94 expr_ty elem; asdl_seq * seq; if ( - (elem = slice_rule(p)) + (elem = slice_rule(p)) // slice && - (seq = _loop0_94_rule(p)) + (seq = _loop0_94_rule(p)) // _loop0_94 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_95: ':' expression? @@ -15091,29 +15320,29 @@ _tmp_95_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ':' expression? + Token * _literal; void *d; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' && - (d = expression_rule(p), 1) + (d = expression_rule(p), 1) // expression? ) { - res = d; - if (res == NULL && PyErr_Occurred()) { + _res = d; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_96: tuple | group | genexp @@ -15123,44 +15352,44 @@ _tmp_96_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // tuple expr_ty tuple_var; if ( - (tuple_var = tuple_rule(p)) + (tuple_var = tuple_rule(p)) // tuple ) { - res = tuple_var; + _res = tuple_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // group expr_ty group_var; if ( - (group_var = group_rule(p)) + (group_var = group_rule(p)) // group ) { - res = group_var; + _res = group_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // genexp expr_ty genexp_var; if ( - (genexp_var = genexp_rule(p)) + (genexp_var = genexp_rule(p)) // genexp ) { - res = genexp_var; + _res = genexp_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_97: list | listcomp @@ -15170,33 +15399,33 @@ _tmp_97_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // list expr_ty list_var; if ( - (list_var = list_rule(p)) + (list_var = list_rule(p)) // list ) { - res = list_var; + _res = list_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // listcomp expr_ty listcomp_var; if ( - (listcomp_var = listcomp_rule(p)) + (listcomp_var = listcomp_rule(p)) // listcomp ) { - res = listcomp_var; + _res = listcomp_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_98: dict | set | dictcomp | setcomp @@ -15206,55 +15435,55 @@ _tmp_98_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // dict expr_ty dict_var; if ( - (dict_var = dict_rule(p)) + (dict_var = dict_rule(p)) // dict ) { - res = dict_var; + _res = dict_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // set expr_ty set_var; if ( - (set_var = set_rule(p)) + (set_var = set_rule(p)) // set ) { - res = set_var; + _res = set_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // dictcomp expr_ty dictcomp_var; if ( - (dictcomp_var = dictcomp_rule(p)) + (dictcomp_var = dictcomp_rule(p)) // dictcomp ) { - res = dictcomp_var; + _res = dictcomp_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // setcomp expr_ty setcomp_var; if ( - (setcomp_var = setcomp_rule(p)) + (setcomp_var = setcomp_rule(p)) // setcomp ) { - res = setcomp_var; + _res = setcomp_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_99: STRING @@ -15264,50 +15493,50 @@ _loop1_99_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // STRING expr_ty string_var; while ( - (string_var = _PyPegen_string_token(p)) + (string_var = _PyPegen_string_token(p)) // STRING ) { - res = string_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = string_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_99"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_99_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_99_type, _seq); + return _seq; } // _tmp_100: star_named_expression ',' star_named_expressions? @@ -15317,32 +15546,32 @@ _tmp_100_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // star_named_expression ',' star_named_expressions? - Token * literal; + Token * _literal; expr_ty y; void *z; if ( - (y = star_named_expression_rule(p)) + (y = star_named_expression_rule(p)) // star_named_expression && - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (z = star_named_expressions_rule(p), 1) + (z = star_named_expressions_rule(p), 1) // star_named_expressions? ) { - res = _PyPegen_seq_insert_in_front ( p , y , z ); - if (res == NULL && PyErr_Occurred()) { + _res = _PyPegen_seq_insert_in_front ( p , y , z ); + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_101: yield_expr | named_expression @@ -15352,33 +15581,33 @@ _tmp_101_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // named_expression expr_ty named_expression_var; if ( - (named_expression_var = named_expression_rule(p)) + (named_expression_var = named_expression_rule(p)) // named_expression ) { - res = named_expression_var; + _res = named_expression_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_103: ',' kvpair @@ -15388,54 +15617,54 @@ _loop0_103_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kvpair + Token * _literal; KeyValuePair* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kvpair_rule(p)) + (elem = kvpair_rule(p)) // kvpair ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_103"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_103_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_103_type, _seq); + return _seq; } // _gather_102: kvpair _loop0_103 @@ -15445,25 +15674,25 @@ _gather_102_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kvpair _loop0_103 KeyValuePair* elem; asdl_seq * seq; if ( - (elem = kvpair_rule(p)) + (elem = kvpair_rule(p)) // kvpair && - (seq = _loop0_103_rule(p)) + (seq = _loop0_103_rule(p)) // _loop0_103 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop1_104: for_if_clause @@ -15473,50 +15702,50 @@ _loop1_104_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // for_if_clause comprehension_ty for_if_clause_var; while ( - (for_if_clause_var = for_if_clause_rule(p)) + (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause ) { - res = for_if_clause_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = for_if_clause_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_104"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_104_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_104_type, _seq); + return _seq; } // _loop0_105: ('if' disjunction) @@ -15526,46 +15755,46 @@ _loop0_105_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('if' disjunction) - void *_tmp_142_var; + void *_tmp_143_var; while ( - (_tmp_142_var = _tmp_142_rule(p)) + (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction ) { - res = _tmp_142_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_143_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_105"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_105_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_105_type, _seq); + return _seq; } // _loop0_106: ('if' disjunction) @@ -15575,46 +15804,46 @@ _loop0_106_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ('if' disjunction) - void *_tmp_143_var; + void *_tmp_144_var; while ( - (_tmp_143_var = _tmp_143_rule(p)) + (_tmp_144_var = _tmp_144_rule(p)) // 'if' disjunction ) { - res = _tmp_143_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_144_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_106"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_106_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_106_type, _seq); + return _seq; } // _tmp_107: ',' args @@ -15624,29 +15853,29 @@ _tmp_107_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' args + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = args_rule(p)) + (c = args_rule(p)) // args ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_108: ',' args @@ -15656,29 +15885,29 @@ _tmp_108_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' args + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = args_rule(p)) + (c = args_rule(p)) // args ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_110: ',' kwarg_or_starred @@ -15688,54 +15917,54 @@ _loop0_110_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_110"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_110_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_110_type, _seq); + return _seq; } // _gather_109: kwarg_or_starred _loop0_110 @@ -15745,25 +15974,25 @@ _gather_109_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_starred _loop0_110 KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred && - (seq = _loop0_110_rule(p)) + (seq = _loop0_110_rule(p)) // _loop0_110 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_112: ',' kwarg_or_double_starred @@ -15773,54 +16002,54 @@ _loop0_112_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_double_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_112"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_112_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_112_type, _seq); + return _seq; } // _gather_111: kwarg_or_double_starred _loop0_112 @@ -15830,25 +16059,25 @@ _gather_111_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_double_starred _loop0_112 KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred && - (seq = _loop0_112_rule(p)) + (seq = _loop0_112_rule(p)) // _loop0_112 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_114: ',' kwarg_or_starred @@ -15858,54 +16087,54 @@ _loop0_114_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_114"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_114_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_114_type, _seq); + return _seq; } // _gather_113: kwarg_or_starred _loop0_114 @@ -15915,25 +16144,25 @@ _gather_113_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_starred _loop0_114 KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_starred_rule(p)) + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred && - (seq = _loop0_114_rule(p)) + (seq = _loop0_114_rule(p)) // _loop0_114 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_116: ',' kwarg_or_double_starred @@ -15943,54 +16172,54 @@ _loop0_116_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' kwarg_or_double_starred + Token * _literal; KeywordOrStarred* elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_116"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_116_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_116_type, _seq); + return _seq; } // _gather_115: kwarg_or_double_starred _loop0_116 @@ -16000,25 +16229,25 @@ _gather_115_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // kwarg_or_double_starred _loop0_116 KeywordOrStarred* elem; asdl_seq * seq; if ( - (elem = kwarg_or_double_starred_rule(p)) + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred && - (seq = _loop0_116_rule(p)) + (seq = _loop0_116_rule(p)) // _loop0_116 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_117: (',' star_target) @@ -16028,46 +16257,46 @@ _loop0_117_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // (',' star_target) - void *_tmp_144_var; + void *_tmp_145_var; while ( - (_tmp_144_var = _tmp_144_rule(p)) + (_tmp_145_var = _tmp_145_rule(p)) // ',' star_target ) { - res = _tmp_144_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = _tmp_145_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_117"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_117_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_117_type, _seq); + return _seq; } // _loop0_119: ',' star_target @@ -16077,54 +16306,54 @@ _loop0_119_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' star_target + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = star_target_rule(p)) + (elem = star_target_rule(p)) // star_target ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_119"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_119_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_119_type, _seq); + return _seq; } // _gather_118: star_target _loop0_119 @@ -16134,25 +16363,25 @@ _gather_118_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // star_target _loop0_119 expr_ty elem; asdl_seq * seq; if ( - (elem = star_target_rule(p)) + (elem = star_target_rule(p)) // star_target && - (seq = _loop0_119_rule(p)) + (seq = _loop0_119_rule(p)) // _loop0_119 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_120: !'*' star_target @@ -16162,24 +16391,24 @@ _tmp_120_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // !'*' star_target expr_ty star_target_var; if ( - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*' && - (star_target_var = star_target_rule(p)) + (star_target_var = star_target_rule(p)) // star_target ) { - res = star_target_var; + _res = star_target_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_122: ',' del_target @@ -16189,54 +16418,54 @@ _loop0_122_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' del_target + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = del_target_rule(p)) + (elem = del_target_rule(p)) // del_target ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_122"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_122_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_122_type, _seq); + return _seq; } // _gather_121: del_target _loop0_122 @@ -16246,25 +16475,25 @@ _gather_121_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // del_target _loop0_122 expr_ty elem; asdl_seq * seq; if ( - (elem = del_target_rule(p)) + (elem = del_target_rule(p)) // del_target && - (seq = _loop0_122_rule(p)) + (seq = _loop0_122_rule(p)) // _loop0_122 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _loop0_124: ',' target @@ -16274,54 +16503,54 @@ _loop0_124_rule(Parser *p) if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // ',' target + Token * _literal; expr_ty elem; - Token * literal; while ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (elem = target_rule(p)) + (elem = target_rule(p)) // target ) { - res = elem; - if (res == NULL && PyErr_Occurred()) { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_124"); - PyMem_Free(children); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_124_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_124_type, _seq); + return _seq; } // _gather_123: target _loop0_124 @@ -16331,25 +16560,25 @@ _gather_123_rule(Parser *p) if (p->error_indicator) { return NULL; } - asdl_seq * res = NULL; - int mark = p->mark; + asdl_seq * _res = NULL; + int _mark = p->mark; { // target _loop0_124 expr_ty elem; asdl_seq * seq; if ( - (elem = target_rule(p)) + (elem = target_rule(p)) // target && - (seq = _loop0_124_rule(p)) + (seq = _loop0_124_rule(p)) // _loop0_124 ) { - res = _PyPegen_seq_insert_in_front(p, elem, seq); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } // _tmp_125: args | expression for_if_clauses @@ -16359,834 +16588,883 @@ _tmp_125_rule(Parser *p) if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // args expr_ty args_var; if ( - (args_var = args_rule(p)) + (args_var = args_rule(p)) // args ) { - res = args_var; + _res = args_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // expression for_if_clauses expr_ty expression_var; asdl_seq* for_if_clauses_var; if ( - (expression_var = expression_rule(p)) + (expression_var = expression_rule(p)) // expression && - (for_if_clauses_var = for_if_clauses_rule(p)) + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { - res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); + _res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; +} + +// _loop0_126: star_named_expressions +static asdl_seq * +_loop0_126_rule(Parser *p) +{ + if (p->error_indicator) { + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + PyErr_Format(PyExc_MemoryError, "Parser out of memory"); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // star_named_expressions + asdl_seq* star_named_expressions_var; + while ( + (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions + ) + { + _res = star_named_expressions_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { + PyErr_Format(PyExc_MemoryError, "realloc None"); + return NULL; + } + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_126"); + PyMem_Free(_children); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_126_type, _seq); + return _seq; } -// _tmp_126: '=' annotated_rhs +// _tmp_127: '=' annotated_rhs static void * -_tmp_126_rule(Parser *p) +_tmp_127_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '=' annotated_rhs + Token * _literal; expr_ty annotated_rhs_var; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 22)) + (_literal = _PyPegen_expect_token(p, 22)) // token='=' && - (annotated_rhs_var = annotated_rhs_rule(p)) + (annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs ) { - res = _PyPegen_dummy_name(p, literal, annotated_rhs_var); + _res = _PyPegen_dummy_name(p, _literal, annotated_rhs_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_127: '=' | augassign +// _tmp_128: yield_expr | star_expressions static void * -_tmp_127_rule(Parser *p) +_tmp_128_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; - { // '=' - Token * literal; + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + expr_ty yield_expr_var; if ( - (literal = _PyPegen_expect_token(p, 22)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = literal; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } - { // augassign - AugOperator* augassign_var; + { // star_expressions + expr_ty star_expressions_var; if ( - (augassign_var = augassign_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = augassign_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_128: yield_expr | star_expressions +// _tmp_129: yield_expr | star_expressions static void * -_tmp_128_rule(Parser *p) +_tmp_129_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // yield_expr expr_ty yield_expr_var; if ( - (yield_expr_var = yield_expr_rule(p)) + (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - res = yield_expr_var; + _res = yield_expr_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // star_expressions expr_ty star_expressions_var; if ( - (star_expressions_var = star_expressions_rule(p)) + (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - res = star_expressions_var; + _res = star_expressions_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_129: '[' | '(' | '{' +// _tmp_130: '[' | '(' | '{' static void * -_tmp_129_rule(Parser *p) +_tmp_130_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '[' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 9)) + (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '(' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 7)) + (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '{' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 25)) + (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _loop0_130: param_no_default +// _loop0_131: param_no_default static asdl_seq * -_loop0_130_rule(Parser *p) +_loop0_131_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_no_default arg_ty param_no_default_var; while ( - (param_no_default_var = param_no_default_rule(p)) + (param_no_default_var = param_no_default_rule(p)) // param_no_default ) { - res = param_no_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_130"); - PyMem_Free(children); + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop0_131"); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop0_130_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_131_type, _seq); + return _seq; } -// _tmp_131: slash_with_default | param_with_default+ +// _tmp_132: slash_with_default | param_with_default+ static void * -_tmp_131_rule(Parser *p) +_tmp_132_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // slash_with_default SlashWithDefault* slash_with_default_var; if ( - (slash_with_default_var = slash_with_default_rule(p)) + (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - res = slash_with_default_var; + _res = slash_with_default_var; goto done; } - p->mark = mark; + p->mark = _mark; } { // param_with_default+ - asdl_seq * _loop1_145_var; + asdl_seq * _loop1_146_var; if ( - (_loop1_145_var = _loop1_145_rule(p)) + (_loop1_146_var = _loop1_146_rule(p)) // param_with_default+ ) { - res = _loop1_145_var; + _res = _loop1_146_var; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_132: ')' | ',' (')' | '**') +// _tmp_133: ')' | ',' (')' | '**') static void * -_tmp_132_rule(Parser *p) +_tmp_133_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ')' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 8)) + (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // ',' (')' | '**') - void *_tmp_146_var; - Token * literal; + Token * _literal; + void *_tmp_147_var; if ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_146_var = _tmp_146_rule(p)) + (_tmp_147_var = _tmp_147_rule(p)) // ')' | '**' ) { - res = _PyPegen_dummy_name(p, literal, _tmp_146_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_147_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_133: ':' | ',' (':' | '**') +// _tmp_134: ':' | ',' (':' | '**') static void * -_tmp_133_rule(Parser *p) +_tmp_134_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ':' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // ',' (':' | '**') - void *_tmp_147_var; - Token * literal; + Token * _literal; + void *_tmp_148_var; if ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_147_var = _tmp_147_rule(p)) + (_tmp_148_var = _tmp_148_rule(p)) // ':' | '**' ) { - res = _PyPegen_dummy_name(p, literal, _tmp_147_var); + _res = _PyPegen_dummy_name(p, _literal, _tmp_148_var); goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_134: star_targets '=' +// _tmp_135: star_targets '=' static void * -_tmp_134_rule(Parser *p) +_tmp_135_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // star_targets '=' - Token * literal; + Token * _literal; expr_ty z; if ( - (z = star_targets_rule(p)) + (z = star_targets_rule(p)) // star_targets && - (literal = _PyPegen_expect_token(p, 22)) + (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_135: '.' | '...' +// _tmp_136: '.' | '...' static void * -_tmp_135_rule(Parser *p) +_tmp_136_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '.' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 23)) + (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '...' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 52)) + (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_136: '.' | '...' +// _tmp_137: '.' | '...' static void * -_tmp_136_rule(Parser *p) +_tmp_137_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '.' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 23)) + (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '...' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 52)) + (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_137: '@' named_expression NEWLINE +// _tmp_138: '@' named_expression NEWLINE static void * -_tmp_137_rule(Parser *p) +_tmp_138_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // '@' named_expression NEWLINE + Token * _literal; expr_ty f; - Token * literal; Token * newline_var; if ( - (literal = _PyPegen_expect_token(p, 49)) + (_literal = _PyPegen_expect_token(p, 49)) // token='@' && - (f = named_expression_rule(p)) + (f = named_expression_rule(p)) // named_expression && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - res = f; - if (res == NULL && PyErr_Occurred()) { + _res = f; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_138: ',' star_expression +// _tmp_139: ',' star_expression static void * -_tmp_138_rule(Parser *p) +_tmp_139_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' star_expression + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = star_expression_rule(p)) + (c = star_expression_rule(p)) // star_expression ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_139: ',' expression +// _tmp_140: ',' expression static void * -_tmp_139_rule(Parser *p) +_tmp_140_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' expression + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = expression_rule(p)) + (c = expression_rule(p)) // expression ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_140: 'or' conjunction +// _tmp_141: 'or' conjunction static void * -_tmp_140_rule(Parser *p) +_tmp_141_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'or' conjunction + Token * _keyword; expr_ty c; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 532)) + (_keyword = _PyPegen_expect_token(p, 532)) // token='or' && - (c = conjunction_rule(p)) + (c = conjunction_rule(p)) // conjunction ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_141: 'and' inversion +// _tmp_142: 'and' inversion static void * -_tmp_141_rule(Parser *p) +_tmp_142_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'and' inversion + Token * _keyword; expr_ty c; - Token * keyword; if ( - (keyword = _PyPegen_expect_token(p, 533)) + (_keyword = _PyPegen_expect_token(p, 533)) // token='and' && - (c = inversion_rule(p)) + (c = inversion_rule(p)) // inversion ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_142: 'if' disjunction +// _tmp_143: 'if' disjunction static void * -_tmp_142_rule(Parser *p) +_tmp_143_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'if' disjunction - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (z = disjunction_rule(p)) + (z = disjunction_rule(p)) // disjunction ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_143: 'if' disjunction +// _tmp_144: 'if' disjunction static void * -_tmp_143_rule(Parser *p) +_tmp_144_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // 'if' disjunction - Token * keyword; + Token * _keyword; expr_ty z; if ( - (keyword = _PyPegen_expect_token(p, 510)) + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' && - (z = disjunction_rule(p)) + (z = disjunction_rule(p)) // disjunction ) { - res = z; - if (res == NULL && PyErr_Occurred()) { + _res = z; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_144: ',' star_target +// _tmp_145: ',' star_target static void * -_tmp_144_rule(Parser *p) +_tmp_145_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ',' star_target + Token * _literal; expr_ty c; - Token * literal; if ( - (literal = _PyPegen_expect_token(p, 12)) + (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (c = star_target_rule(p)) + (c = star_target_rule(p)) // star_target ) { - res = c; - if (res == NULL && PyErr_Occurred()) { + _res = c; + if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; return NULL; } goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _loop1_145: param_with_default +// _loop1_146: param_with_default static asdl_seq * -_loop1_145_rule(Parser *p) +_loop1_146_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void *res = NULL; - int mark = p->mark; - int start_mark = p->mark; - void **children = PyMem_Malloc(sizeof(void *)); - if (!children) { + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "Parser out of memory"); return NULL; } - ssize_t children_capacity = 1; - ssize_t n = 0; + ssize_t _children_capacity = 1; + ssize_t _n = 0; { // param_with_default NameDefaultPair* param_with_default_var; while ( - (param_with_default_var = param_with_default_rule(p)) + (param_with_default_var = param_with_default_rule(p)) // param_with_default ) { - res = param_with_default_var; - if (n == children_capacity) { - children_capacity *= 2; - children = PyMem_Realloc(children, children_capacity*sizeof(void *)); - if (!children) { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + _children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_children) { PyErr_Format(PyExc_MemoryError, "realloc None"); return NULL; } } - children[n++] = res; - mark = p->mark; + _children[_n++] = _res; + _mark = p->mark; } - p->mark = mark; + p->mark = _mark; } - if (n == 0) { - PyMem_Free(children); + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); return NULL; } - asdl_seq *seq = _Py_asdl_seq_new(n, p->arena); - if (!seq) { - PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_145"); - PyMem_Free(children); + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyErr_Format(PyExc_MemoryError, "asdl_seq_new _loop1_146"); + PyMem_Free(_children); return NULL; } - for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]); - PyMem_Free(children); - _PyPegen_insert_memo(p, start_mark, _loop1_145_type, seq); - return seq; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_146_type, _seq); + return _seq; } -// _tmp_146: ')' | '**' +// _tmp_147: ')' | '**' static void * -_tmp_146_rule(Parser *p) +_tmp_147_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ')' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 8)) + (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } -// _tmp_147: ':' | '**' +// _tmp_148: ':' | '**' static void * -_tmp_147_rule(Parser *p) +_tmp_148_rule(Parser *p) { if (p->error_indicator) { return NULL; } - void * res = NULL; - int mark = p->mark; + void * _res = NULL; + int _mark = p->mark; { // ':' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 11)) + (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } { // '**' - Token * literal; + Token * _literal; if ( - (literal = _PyPegen_expect_token(p, 35)) + (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - res = literal; + _res = _literal; goto done; } - p->mark = mark; + p->mark = _mark; } - res = NULL; + _res = NULL; done: - return res; + return _res; } void * diff --git a/Parser/pegen/parse_string.c b/Parser/pegen/parse_string.c index d96303dc183fa7..ca4b733c153b57 100644 --- a/Parser/pegen/parse_string.c +++ b/Parser/pegen/parse_string.c @@ -12,7 +12,7 @@ // file (like "_PyPegen_raise_syntax_error"). static int -warn_invalid_escape_sequence(Parser *p, unsigned char first_invalid_escape_char) +warn_invalid_escape_sequence(Parser *p, unsigned char first_invalid_escape_char, Token *t) { PyObject *msg = PyUnicode_FromFormat("invalid escape sequence \\%c", first_invalid_escape_char); @@ -20,11 +20,16 @@ warn_invalid_escape_sequence(Parser *p, unsigned char first_invalid_escape_char) return -1; } if (PyErr_WarnExplicitObject(PyExc_DeprecationWarning, msg, p->tok->filename, - p->tok->lineno, NULL, NULL) < 0) { + t->lineno, NULL, NULL) < 0) { if (PyErr_ExceptionMatches(PyExc_DeprecationWarning)) { /* Replace the DeprecationWarning exception with a SyntaxError to get a more accurate error report */ PyErr_Clear(); + + /* This is needed, in order for the SyntaxError to point to the token t, + since _PyPegen_raise_error uses p->tokens[p->fill - 1] for the + error location, if p->known_err_token is not set. */ + p->known_err_token = t; RAISE_SYNTAX_ERROR("invalid escape sequence \\%c", first_invalid_escape_char); } Py_DECREF(msg); @@ -47,7 +52,7 @@ decode_utf8(const char **sPtr, const char *end) } static PyObject * -decode_unicode_with_escapes(Parser *parser, const char *s, size_t len) +decode_unicode_with_escapes(Parser *parser, const char *s, size_t len, Token *t) { PyObject *v, *u; char *buf; @@ -110,7 +115,7 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len) v = _PyUnicode_DecodeUnicodeEscape(s, len, NULL, &first_invalid_escape); if (v != NULL && first_invalid_escape != NULL) { - if (warn_invalid_escape_sequence(parser, *first_invalid_escape) < 0) { + if (warn_invalid_escape_sequence(parser, *first_invalid_escape, t) < 0) { /* We have not decref u before because first_invalid_escape points inside u. */ Py_XDECREF(u); @@ -123,7 +128,7 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len) } static PyObject * -decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len) +decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len, Token *t) { const char *first_invalid_escape; PyObject *result = _PyBytes_DecodeEscape(s, len, NULL, &first_invalid_escape); @@ -132,7 +137,7 @@ decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len) } if (first_invalid_escape != NULL) { - if (warn_invalid_escape_sequence(p, *first_invalid_escape) < 0) { + if (warn_invalid_escape_sequence(p, *first_invalid_escape, t) < 0) { Py_DECREF(result); return NULL; } @@ -146,9 +151,14 @@ decode_bytes_with_escapes(Parser *p, const char *s, Py_ssize_t len) If the string is an f-string, set *fstr and *fstrlen to the unparsed string object. Return 0 if no errors occurred. */ int -_PyPegen_parsestr(Parser *p, const char *s, int *bytesmode, int *rawmode, PyObject **result, - const char **fstr, Py_ssize_t *fstrlen) +_PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result, + const char **fstr, Py_ssize_t *fstrlen, Token *t) { + const char *s = PyBytes_AsString(t->bytes); + if (s == NULL) { + return -1; + } + size_t len; int quote = Py_CHARMASK(*s); int fmode = 0; @@ -245,7 +255,7 @@ _PyPegen_parsestr(Parser *p, const char *s, int *bytesmode, int *rawmode, PyObje *result = PyBytes_FromStringAndSize(s, len); } else { - *result = decode_bytes_with_escapes(p, s, len); + *result = decode_bytes_with_escapes(p, s, len, t); } } else { @@ -253,7 +263,7 @@ _PyPegen_parsestr(Parser *p, const char *s, int *bytesmode, int *rawmode, PyObje *result = PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL); } else { - *result = decode_unicode_with_escapes(p, s, len); + *result = decode_unicode_with_escapes(p, s, len, t); } } return *result == NULL ? -1 : 0; @@ -637,7 +647,7 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, */ static int fstring_find_literal(Parser *p, const char **str, const char *end, int raw, - PyObject **literal, int recurse_lvl) + PyObject **literal, int recurse_lvl, Token *t) { /* Get any literal string. It ends when we hit an un-doubled left brace (which isn't part of a unicode name escape such as @@ -660,7 +670,7 @@ fstring_find_literal(Parser *p, const char **str, const char *end, int raw, } break; } - if (ch == '{' && warn_invalid_escape_sequence(p, ch) < 0) { + if (ch == '{' && warn_invalid_escape_sequence(p, ch, t) < 0) { return -1; } } @@ -704,7 +714,7 @@ fstring_find_literal(Parser *p, const char **str, const char *end, int raw, NULL, NULL); else *literal = decode_unicode_with_escapes(p, literal_start, - s - literal_start); + s - literal_start, t); if (!*literal) return -1; } @@ -1041,7 +1051,7 @@ fstring_find_literal_and_expr(Parser *p, const char **str, const char *end, int assert(*literal == NULL && *expression == NULL); /* Get any literal string. */ - result = fstring_find_literal(p, str, end, raw, literal, recurse_lvl); + result = fstring_find_literal(p, str, end, raw, literal, recurse_lvl, t); if (result < 0) goto error; diff --git a/Parser/pegen/parse_string.h b/Parser/pegen/parse_string.h index 4f2aa94fc19b05..cd85bd57d0a383 100644 --- a/Parser/pegen/parse_string.h +++ b/Parser/pegen/parse_string.h @@ -34,8 +34,8 @@ typedef struct { } FstringParser; void _PyPegen_FstringParser_Init(FstringParser *); -int _PyPegen_parsestr(Parser *, const char *, int *, int *, PyObject **, - const char **, Py_ssize_t *); +int _PyPegen_parsestr(Parser *, int *, int *, PyObject **, + const char **, Py_ssize_t *, Token *); int _PyPegen_FstringParser_ConcatFstring(Parser *, FstringParser *, const char **, const char *, int, int, Token *, Token *, Token *); diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index c311593af70f58..7f3e4561de6055 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -300,24 +300,6 @@ raise_tokenizer_init_error(PyObject *filename) Py_XDECREF(tuple); } -static inline PyObject * -get_error_line(char *buffer, int is_file) -{ - const char *newline; - if (is_file) { - newline = strrchr(buffer, '\n'); - } else { - newline = strchr(buffer, '\n'); - } - - if (newline) { - return PyUnicode_DecodeUTF8(buffer, newline - buffer, "replace"); - } - else { - return PyUnicode_DecodeUTF8(buffer, strlen(buffer), "replace"); - } -} - static int tokenizer_error(Parser *p) { @@ -331,9 +313,6 @@ tokenizer_error(Parser *p) case E_TOKEN: msg = "invalid token"; break; - case E_IDENTIFIER: - msg = "invalid character in identifier"; - break; case E_EOFS: RAISE_SYNTAX_ERROR("EOF while scanning triple-quoted string literal"); return -1; @@ -377,48 +356,61 @@ tokenizer_error(Parser *p) } void * -_PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const char *errmsg, ...) +_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...) +{ + Token *t = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1]; + int col_offset; + if (t->col_offset == -1) { + col_offset = Py_SAFE_DOWNCAST(p->tok->cur - p->tok->buf, + intptr_t, int); + } else { + col_offset = t->col_offset + 1; + } + + va_list va; + va_start(va, errmsg); + _PyPegen_raise_error_known_location(p, errtype, t->lineno, + col_offset, errmsg, va); + va_end(va); + + return NULL; +} + + +void * +_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, + int lineno, int col_offset, + const char *errmsg, va_list va) { PyObject *value = NULL; PyObject *errstr = NULL; - PyObject *loc = NULL; + PyObject *error_line = NULL; PyObject *tmp = NULL; - Token *t = p->tokens[p->fill - 1]; - Py_ssize_t col_number = !with_col_number; - va_list va; p->error_indicator = 1; - va_start(va, errmsg); errstr = PyUnicode_FromFormatV(errmsg, va); - va_end(va); if (!errstr) { goto error; } if (p->start_rule == Py_file_input) { - loc = PyErr_ProgramTextObject(p->tok->filename, t->lineno); - } - - if (!loc) { - loc = get_error_line(p->tok->buf, p->start_rule == Py_file_input); + error_line = PyErr_ProgramTextObject(p->tok->filename, lineno); } - if (loc && with_col_number) { - int col_offset; - if (t->col_offset == -1) { - col_offset = Py_SAFE_DOWNCAST(p->tok->cur - p->tok->buf, - intptr_t, int); - } else { - col_offset = t->col_offset + 1; + if (!error_line) { + Py_ssize_t size = p->tok->inp - p->tok->buf; + if (size && p->tok->buf[size-1] == '\n') { + size--; + } + error_line = PyUnicode_DecodeUTF8(p->tok->buf, size, "replace"); + if (!error_line) { + goto error; } - col_number = byte_offset_to_character_offset(loc, col_offset); - } - else if (!loc) { - Py_INCREF(Py_None); - loc = Py_None; } - tmp = Py_BuildValue("(OiiN)", p->tok->filename, t->lineno, col_number, loc); + int col_number = byte_offset_to_character_offset(error_line, col_offset); + + tmp = Py_BuildValue("(OiiN)", p->tok->filename, lineno, col_number, error_line); if (!tmp) { goto error; } @@ -435,7 +427,7 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const ch error: Py_XDECREF(errstr); - Py_XDECREF(loc); + Py_XDECREF(error_line); return NULL; } @@ -1053,6 +1045,7 @@ _PyPegen_Parser_New(struct tok_state *tok, int start_rule, int flags, p->starting_col_offset = 0; p->flags = flags; p->feature_version = feature_version; + p->known_err_token = NULL; return p; } @@ -1972,12 +1965,7 @@ _PyPegen_concatenate_strings(Parser *p, asdl_seq *strings) const char *fstr; Py_ssize_t fstrlen = -1; - char *this_str = PyBytes_AsString(t->bytes); - if (!this_str) { - goto error; - } - - if (_PyPegen_parsestr(p, this_str, &this_bytesmode, &this_rawmode, &s, &fstr, &fstrlen) != 0) { + if (_PyPegen_parsestr(p, &this_bytesmode, &this_rawmode, &s, &fstr, &fstrlen, t) != 0) { goto error; } @@ -2066,3 +2054,49 @@ _PyPegen_make_module(Parser *p, asdl_seq *a) { } return Module(a, type_ignores, p->arena); } + +// Error reporting helpers + +expr_ty +_PyPegen_get_invalid_target(expr_ty e) +{ + if (e == NULL) { + return NULL; + } + +#define VISIT_CONTAINER(CONTAINER, TYPE) do { \ + Py_ssize_t len = asdl_seq_LEN(CONTAINER->v.TYPE.elts);\ + for (Py_ssize_t i = 0; i < len; i++) {\ + expr_ty other = asdl_seq_GET(CONTAINER->v.TYPE.elts, i);\ + expr_ty child = _PyPegen_get_invalid_target(other);\ + if (child != NULL) {\ + return child;\ + }\ + }\ + } while (0) + + // We only need to visit List and Tuple nodes recursively as those + // are the only ones that can contain valid names in targets when + // they are parsed as expressions. Any other kind of expression + // that is a container (like Sets or Dicts) is directly invalid and + // we don't need to visit it recursively. + + switch (e->kind) { + case List_kind: { + VISIT_CONTAINER(e, List); + return NULL; + } + case Tuple_kind: { + VISIT_CONTAINER(e, Tuple); + return NULL; + } + case Starred_kind: + return _PyPegen_get_invalid_target(e->v.Starred.value); + case Name_kind: + case Subscript_kind: + case Attribute_kind: + return NULL; + default: + return e; + } +} \ No newline at end of file diff --git a/Parser/pegen/pegen.h b/Parser/pegen/pegen.h index cbe6f197ac7423..b9d4c048bb52b0 100644 --- a/Parser/pegen/pegen.h +++ b/Parser/pegen/pegen.h @@ -71,6 +71,7 @@ typedef struct { int flags; int feature_version; growable_comment_array type_ignore_comments; + Token *known_err_token; } Parser; typedef struct { @@ -126,15 +127,32 @@ expr_ty _PyPegen_name_token(Parser *p); expr_ty _PyPegen_number_token(Parser *p); void *_PyPegen_string_token(Parser *p); const char *_PyPegen_get_expr_name(expr_ty); -void *_PyPegen_raise_error(Parser *p, PyObject *errtype, int with_col_number, const char *errmsg, ...); +void *_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...); +void *_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, + int lineno, int col_offset, + const char *errmsg, va_list va); void *_PyPegen_dummy_name(Parser *p, ...); +Py_LOCAL_INLINE(void *) +RAISE_ERROR_KNOWN_LOCATION(Parser *p, PyObject *errtype, int lineno, + int col_offset, const char *errmsg, ...) +{ + va_list va; + va_start(va, errmsg); + _PyPegen_raise_error_known_location(p, errtype, lineno, col_offset + 1, + errmsg, va); + va_end(va); + return NULL; +} + + #define UNUSED(expr) do { (void)(expr); } while (0) #define EXTRA_EXPR(head, tail) head->lineno, head->col_offset, tail->end_lineno, tail->end_col_offset, p->arena -#define EXTRA start_lineno, start_col_offset, end_lineno, end_col_offset, p->arena -#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 1, msg, ##__VA_ARGS__) -#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, 1, msg, ##__VA_ARGS__) -#define RAISE_SYNTAX_ERROR_NO_COL_OFFSET(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, 0, msg, ##__VA_ARGS__) +#define EXTRA _start_lineno, _start_col_offset, _end_lineno, _end_col_offset, p->arena +#define RAISE_SYNTAX_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_SyntaxError, msg, ##__VA_ARGS__) +#define RAISE_INDENTATION_ERROR(msg, ...) _PyPegen_raise_error(p, PyExc_IndentationError, msg, ##__VA_ARGS__) +#define RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, msg, ...) \ + RAISE_ERROR_KNOWN_LOCATION(p, PyExc_SyntaxError, a->lineno, a->col_offset, msg, ##__VA_ARGS__) Py_LOCAL_INLINE(void *) CHECK_CALL(Parser *p, void *result) @@ -242,6 +260,10 @@ void *_PyPegen_arguments_parsing_error(Parser *, expr_ty); int _PyPegen_check_barry_as_flufl(Parser *); mod_ty _PyPegen_make_module(Parser *, asdl_seq *); +// Error reporting helpers + +expr_ty _PyPegen_get_invalid_target(expr_ty e); + void *_PyPegen_parse(Parser *); #endif diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 0f2b6af5e50adf..b81fa118f216eb 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -1101,25 +1101,53 @@ static int verify_identifier(struct tok_state *tok) { PyObject *s; - int result; if (tok->decoding_erred) return 0; s = PyUnicode_DecodeUTF8(tok->start, tok->cur - tok->start, NULL); if (s == NULL) { if (PyErr_ExceptionMatches(PyExc_UnicodeDecodeError)) { - PyErr_Clear(); - tok->done = E_IDENTIFIER; - } else { + tok->done = E_DECODE; + } + else { tok->done = E_ERROR; } return 0; } - result = PyUnicode_IsIdentifier(s); - Py_DECREF(s); - if (result == 0) { - tok->done = E_IDENTIFIER; + Py_ssize_t invalid = _PyUnicode_ScanIdentifier(s); + if (invalid < 0) { + Py_DECREF(s); + tok->done = E_ERROR; + return 0; } - return result; + assert(PyUnicode_GET_LENGTH(s) > 0); + if (invalid < PyUnicode_GET_LENGTH(s)) { + Py_UCS4 ch = PyUnicode_READ_CHAR(s, invalid); + if (invalid + 1 < PyUnicode_GET_LENGTH(s)) { + /* Determine the offset in UTF-8 encoded input */ + Py_SETREF(s, PyUnicode_Substring(s, 0, invalid + 1)); + if (s != NULL) { + Py_SETREF(s, PyUnicode_AsUTF8String(s)); + } + if (s == NULL) { + tok->done = E_ERROR; + return 0; + } + tok->cur = (char *)tok->start + PyBytes_GET_SIZE(s); + } + Py_DECREF(s); + // PyUnicode_FromFormatV() does not support %X + char hex[9]; + snprintf(hex, sizeof(hex), "%04X", ch); + if (Py_UNICODE_ISPRINTABLE(ch)) { + syntaxerror(tok, "invalid character '%c' (U+%s)", ch, hex); + } + else { + syntaxerror(tok, "invalid non-printable character U+%s", hex); + } + return 0; + } + Py_DECREF(s); + return 1; } static int diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 80f91646fd62e4..f34b1450c66ef1 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -1294,11 +1294,9 @@ static PyObject* ast2obj_object(void *o) Py_INCREF((PyObject*)o); return (PyObject*)o; } -#define ast2obj_singleton ast2obj_object #define ast2obj_constant ast2obj_object #define ast2obj_identifier ast2obj_object #define ast2obj_string ast2obj_object -#define ast2obj_bytes ast2obj_object static PyObject* ast2obj_int(long b) { @@ -2077,7 +2075,7 @@ Expression(expr_ty body, PyArena *arena) mod_ty p; if (!body) { PyErr_SetString(PyExc_ValueError, - "field body is required for Expression"); + "field 'body' is required for Expression"); return NULL; } p = (mod_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2094,7 +2092,7 @@ FunctionType(asdl_seq * argtypes, expr_ty returns, PyArena *arena) mod_ty p; if (!returns) { PyErr_SetString(PyExc_ValueError, - "field returns is required for FunctionType"); + "field 'returns' is required for FunctionType"); return NULL; } p = (mod_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2114,12 +2112,12 @@ FunctionDef(identifier name, arguments_ty args, asdl_seq * body, asdl_seq * stmt_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for FunctionDef"); + "field 'name' is required for FunctionDef"); return NULL; } if (!args) { PyErr_SetString(PyExc_ValueError, - "field args is required for FunctionDef"); + "field 'args' is required for FunctionDef"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2148,12 +2146,12 @@ AsyncFunctionDef(identifier name, arguments_ty args, asdl_seq * body, asdl_seq stmt_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for AsyncFunctionDef"); + "field 'name' is required for AsyncFunctionDef"); return NULL; } if (!args) { PyErr_SetString(PyExc_ValueError, - "field args is required for AsyncFunctionDef"); + "field 'args' is required for AsyncFunctionDef"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2181,7 +2179,7 @@ ClassDef(identifier name, asdl_seq * bases, asdl_seq * keywords, asdl_seq * stmt_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for ClassDef"); + "field 'name' is required for ClassDef"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2241,7 +2239,7 @@ Assign(asdl_seq * targets, expr_ty value, string type_comment, int lineno, int stmt_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Assign"); + "field 'value' is required for Assign"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2265,17 +2263,17 @@ AugAssign(expr_ty target, operator_ty op, expr_ty value, int lineno, int stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for AugAssign"); + "field 'target' is required for AugAssign"); return NULL; } if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for AugAssign"); + "field 'op' is required for AugAssign"); return NULL; } if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for AugAssign"); + "field 'value' is required for AugAssign"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2300,12 +2298,12 @@ AnnAssign(expr_ty target, expr_ty annotation, expr_ty value, int simple, int stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for AnnAssign"); + "field 'target' is required for AnnAssign"); return NULL; } if (!annotation) { PyErr_SetString(PyExc_ValueError, - "field annotation is required for AnnAssign"); + "field 'annotation' is required for AnnAssign"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2331,12 +2329,12 @@ For(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse, string stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for For"); + "field 'target' is required for For"); return NULL; } if (!iter) { PyErr_SetString(PyExc_ValueError, - "field iter is required for For"); + "field 'iter' is required for For"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2363,12 +2361,12 @@ AsyncFor(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse, stmt_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for AsyncFor"); + "field 'target' is required for AsyncFor"); return NULL; } if (!iter) { PyErr_SetString(PyExc_ValueError, - "field iter is required for AsyncFor"); + "field 'iter' is required for AsyncFor"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2394,7 +2392,7 @@ While(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno, int stmt_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for While"); + "field 'test' is required for While"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2418,7 +2416,7 @@ If(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno, int stmt_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for If"); + "field 'test' is required for If"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2519,7 +2517,7 @@ Assert(expr_ty test, expr_ty msg, int lineno, int col_offset, int end_lineno, stmt_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for Assert"); + "field 'test' is required for Assert"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2612,7 +2610,7 @@ Expr(expr_ty value, int lineno, int col_offset, int end_lineno, int stmt_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Expr"); + "field 'value' is required for Expr"); return NULL; } p = (stmt_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2682,7 +2680,7 @@ BoolOp(boolop_ty op, asdl_seq * values, int lineno, int col_offset, int expr_ty p; if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for BoolOp"); + "field 'op' is required for BoolOp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2705,12 +2703,12 @@ NamedExpr(expr_ty target, expr_ty value, int lineno, int col_offset, int expr_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for NamedExpr"); + "field 'target' is required for NamedExpr"); return NULL; } if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for NamedExpr"); + "field 'value' is required for NamedExpr"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2733,17 +2731,17 @@ BinOp(expr_ty left, operator_ty op, expr_ty right, int lineno, int col_offset, expr_ty p; if (!left) { PyErr_SetString(PyExc_ValueError, - "field left is required for BinOp"); + "field 'left' is required for BinOp"); return NULL; } if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for BinOp"); + "field 'op' is required for BinOp"); return NULL; } if (!right) { PyErr_SetString(PyExc_ValueError, - "field right is required for BinOp"); + "field 'right' is required for BinOp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2767,12 +2765,12 @@ UnaryOp(unaryop_ty op, expr_ty operand, int lineno, int col_offset, int expr_ty p; if (!op) { PyErr_SetString(PyExc_ValueError, - "field op is required for UnaryOp"); + "field 'op' is required for UnaryOp"); return NULL; } if (!operand) { PyErr_SetString(PyExc_ValueError, - "field operand is required for UnaryOp"); + "field 'operand' is required for UnaryOp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2795,12 +2793,12 @@ Lambda(arguments_ty args, expr_ty body, int lineno, int col_offset, int expr_ty p; if (!args) { PyErr_SetString(PyExc_ValueError, - "field args is required for Lambda"); + "field 'args' is required for Lambda"); return NULL; } if (!body) { PyErr_SetString(PyExc_ValueError, - "field body is required for Lambda"); + "field 'body' is required for Lambda"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2823,17 +2821,17 @@ IfExp(expr_ty test, expr_ty body, expr_ty orelse, int lineno, int col_offset, expr_ty p; if (!test) { PyErr_SetString(PyExc_ValueError, - "field test is required for IfExp"); + "field 'test' is required for IfExp"); return NULL; } if (!body) { PyErr_SetString(PyExc_ValueError, - "field body is required for IfExp"); + "field 'body' is required for IfExp"); return NULL; } if (!orelse) { PyErr_SetString(PyExc_ValueError, - "field orelse is required for IfExp"); + "field 'orelse' is required for IfExp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2892,7 +2890,7 @@ ListComp(expr_ty elt, asdl_seq * generators, int lineno, int col_offset, int expr_ty p; if (!elt) { PyErr_SetString(PyExc_ValueError, - "field elt is required for ListComp"); + "field 'elt' is required for ListComp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2915,7 +2913,7 @@ SetComp(expr_ty elt, asdl_seq * generators, int lineno, int col_offset, int expr_ty p; if (!elt) { PyErr_SetString(PyExc_ValueError, - "field elt is required for SetComp"); + "field 'elt' is required for SetComp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2938,12 +2936,12 @@ DictComp(expr_ty key, expr_ty value, asdl_seq * generators, int lineno, int expr_ty p; if (!key) { PyErr_SetString(PyExc_ValueError, - "field key is required for DictComp"); + "field 'key' is required for DictComp"); return NULL; } if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for DictComp"); + "field 'value' is required for DictComp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2967,7 +2965,7 @@ GeneratorExp(expr_ty elt, asdl_seq * generators, int lineno, int col_offset, expr_ty p; if (!elt) { PyErr_SetString(PyExc_ValueError, - "field elt is required for GeneratorExp"); + "field 'elt' is required for GeneratorExp"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -2990,7 +2988,7 @@ Await(expr_ty value, int lineno, int col_offset, int end_lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Await"); + "field 'value' is required for Await"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3029,7 +3027,7 @@ YieldFrom(expr_ty value, int lineno, int col_offset, int end_lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for YieldFrom"); + "field 'value' is required for YieldFrom"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3051,7 +3049,7 @@ Compare(expr_ty left, asdl_int_seq * ops, asdl_seq * comparators, int lineno, expr_ty p; if (!left) { PyErr_SetString(PyExc_ValueError, - "field left is required for Compare"); + "field 'left' is required for Compare"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3075,7 +3073,7 @@ Call(expr_ty func, asdl_seq * args, asdl_seq * keywords, int lineno, int expr_ty p; if (!func) { PyErr_SetString(PyExc_ValueError, - "field func is required for Call"); + "field 'func' is required for Call"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3100,7 +3098,7 @@ FormattedValue(expr_ty value, int conversion, expr_ty format_spec, int lineno, expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for FormattedValue"); + "field 'value' is required for FormattedValue"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3141,7 +3139,7 @@ Constant(constant value, string kind, int lineno, int col_offset, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Constant"); + "field 'value' is required for Constant"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3164,17 +3162,17 @@ Attribute(expr_ty value, identifier attr, expr_context_ty ctx, int lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Attribute"); + "field 'value' is required for Attribute"); return NULL; } if (!attr) { PyErr_SetString(PyExc_ValueError, - "field attr is required for Attribute"); + "field 'attr' is required for Attribute"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Attribute"); + "field 'ctx' is required for Attribute"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3198,17 +3196,17 @@ Subscript(expr_ty value, expr_ty slice, expr_context_ty ctx, int lineno, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Subscript"); + "field 'value' is required for Subscript"); return NULL; } if (!slice) { PyErr_SetString(PyExc_ValueError, - "field slice is required for Subscript"); + "field 'slice' is required for Subscript"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Subscript"); + "field 'ctx' is required for Subscript"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3232,12 +3230,12 @@ Starred(expr_ty value, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for Starred"); + "field 'value' is required for Starred"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Starred"); + "field 'ctx' is required for Starred"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3260,12 +3258,12 @@ Name(identifier id, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!id) { PyErr_SetString(PyExc_ValueError, - "field id is required for Name"); + "field 'id' is required for Name"); return NULL; } if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Name"); + "field 'ctx' is required for Name"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3288,7 +3286,7 @@ List(asdl_seq * elts, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for List"); + "field 'ctx' is required for List"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3311,7 +3309,7 @@ Tuple(asdl_seq * elts, expr_context_ty ctx, int lineno, int col_offset, int expr_ty p; if (!ctx) { PyErr_SetString(PyExc_ValueError, - "field ctx is required for Tuple"); + "field 'ctx' is required for Tuple"); return NULL; } p = (expr_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3353,12 +3351,12 @@ comprehension(expr_ty target, expr_ty iter, asdl_seq * ifs, int is_async, comprehension_ty p; if (!target) { PyErr_SetString(PyExc_ValueError, - "field target is required for comprehension"); + "field 'target' is required for comprehension"); return NULL; } if (!iter) { PyErr_SetString(PyExc_ValueError, - "field iter is required for comprehension"); + "field 'iter' is required for comprehension"); return NULL; } p = (comprehension_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3416,7 +3414,7 @@ arg(identifier arg, expr_ty annotation, string type_comment, int lineno, int arg_ty p; if (!arg) { PyErr_SetString(PyExc_ValueError, - "field arg is required for arg"); + "field 'arg' is required for arg"); return NULL; } p = (arg_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3439,7 +3437,7 @@ keyword(identifier arg, expr_ty value, int lineno, int col_offset, int keyword_ty p; if (!value) { PyErr_SetString(PyExc_ValueError, - "field value is required for keyword"); + "field 'value' is required for keyword"); return NULL; } p = (keyword_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3460,7 +3458,7 @@ alias(identifier name, identifier asname, PyArena *arena) alias_ty p; if (!name) { PyErr_SetString(PyExc_ValueError, - "field name is required for alias"); + "field 'name' is required for alias"); return NULL; } p = (alias_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3477,7 +3475,7 @@ withitem(expr_ty context_expr, expr_ty optional_vars, PyArena *arena) withitem_ty p; if (!context_expr) { PyErr_SetString(PyExc_ValueError, - "field context_expr is required for withitem"); + "field 'context_expr' is required for withitem"); return NULL; } p = (withitem_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -3494,7 +3492,7 @@ TypeIgnore(int lineno, string tag, PyArena *arena) type_ignore_ty p; if (!tag) { PyErr_SetString(PyExc_ValueError, - "field tag is required for TypeIgnore"); + "field 'tag' is required for TypeIgnore"); return NULL; } p = (type_ignore_ty)PyArena_Malloc(arena, sizeof(*p)); @@ -4602,11 +4600,8 @@ PyObject* ast2obj_expr_context(expr_context_ty o) case Del: Py_INCREF(astmodulestate_global->Del_singleton); return astmodulestate_global->Del_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown expr_context found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_boolop(boolop_ty o) { @@ -4617,11 +4612,8 @@ PyObject* ast2obj_boolop(boolop_ty o) case Or: Py_INCREF(astmodulestate_global->Or_singleton); return astmodulestate_global->Or_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown boolop found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_operator(operator_ty o) { @@ -4665,11 +4657,8 @@ PyObject* ast2obj_operator(operator_ty o) case FloorDiv: Py_INCREF(astmodulestate_global->FloorDiv_singleton); return astmodulestate_global->FloorDiv_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown operator found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_unaryop(unaryop_ty o) { @@ -4686,11 +4675,8 @@ PyObject* ast2obj_unaryop(unaryop_ty o) case USub: Py_INCREF(astmodulestate_global->USub_singleton); return astmodulestate_global->USub_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown unaryop found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_cmpop(cmpop_ty o) { @@ -4725,11 +4711,8 @@ PyObject* ast2obj_cmpop(cmpop_ty o) case NotIn: Py_INCREF(astmodulestate_global->NotIn_singleton); return astmodulestate_global->NotIn_singleton; - default: - /* should never happen, but just in case ... */ - PyErr_Format(PyExc_SystemError, "unknown cmpop found"); - return NULL; } + Py_UNREACHABLE(); } PyObject* ast2obj_comprehension(void* _o) diff --git a/Python/ast.c b/Python/ast.c index 1a4a3110e69559..2d20ca62aa8378 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -3164,10 +3164,7 @@ ast_for_expr_stmt(struct compiling *c, const node *n) expr1 = ast_for_testlist(c, ch); if (!expr1) return NULL; - if(!set_context(c, expr1, Store, ch)) - return NULL; - /* set_context checks that most expressions are not the left side. - Augmented assignments can only have a name, a subscript, or an + /* Augmented assignments can only have a name, a subscript, or an attribute on the left, though, so we have to explicitly check for those. */ switch (expr1->kind) { @@ -3176,10 +3173,16 @@ ast_for_expr_stmt(struct compiling *c, const node *n) case Subscript_kind: break; default: - ast_error(c, ch, "illegal expression for augmented assignment"); + ast_error(c, ch, "'%s' is an illegal expression for augmented assignment", + get_expr_name(expr1)); return NULL; } + /* set_context checks that most expressions are not the left side. */ + if(!set_context(c, expr1, Store, ch)) { + return NULL; + } + ch = CHILD(n, 2); if (TYPE(ch) == testlist) expr2 = ast_for_testlist(c, ch); diff --git a/Python/ceval.c b/Python/ceval.c index e15d7e0b4603d2..43ea1c760b17e9 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -147,7 +147,7 @@ COMPUTE_EVAL_BREAKER(PyInterpreterState *interp, struct _ceval_state *ceval2) { _Py_atomic_store_relaxed(&ceval2->eval_breaker, - _Py_atomic_load_relaxed(&ceval->gil_drop_request) + _Py_atomic_load_relaxed(&ceval2->gil_drop_request) | (_Py_atomic_load_relaxed(&ceval->signals_pending) && _Py_ThreadCanHandleSignals(interp)) | (_Py_atomic_load_relaxed(&ceval2->pending.calls_to_do) @@ -159,9 +159,8 @@ COMPUTE_EVAL_BREAKER(PyInterpreterState *interp, static inline void SET_GIL_DROP_REQUEST(PyInterpreterState *interp) { - struct _ceval_runtime_state *ceval = &interp->runtime->ceval; struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->gil_drop_request, 1); + _Py_atomic_store_relaxed(&ceval2->gil_drop_request, 1); _Py_atomic_store_relaxed(&ceval2->eval_breaker, 1); } @@ -171,7 +170,7 @@ RESET_GIL_DROP_REQUEST(PyInterpreterState *interp) { struct _ceval_runtime_state *ceval = &interp->runtime->ceval; struct _ceval_state *ceval2 = &interp->ceval; - _Py_atomic_store_relaxed(&ceval->gil_drop_request, 0); + _Py_atomic_store_relaxed(&ceval2->gil_drop_request, 0); COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } @@ -251,6 +250,21 @@ ensure_tstate_not_null(const char *func, PyThreadState *tstate) } +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +int +_PyEval_ThreadsInitialized(PyInterpreterState *interp) +{ + return gil_created(&interp->ceval.gil); +} + +int +PyEval_ThreadsInitialized(void) +{ + // Fatal error if there is no current interpreter + PyInterpreterState *interp = PyInterpreterState_Get(); + return _PyEval_ThreadsInitialized(interp); +} +#else int _PyEval_ThreadsInitialized(_PyRuntimeState *runtime) { @@ -263,18 +277,25 @@ PyEval_ThreadsInitialized(void) _PyRuntimeState *runtime = &_PyRuntime; return _PyEval_ThreadsInitialized(runtime); } +#endif PyStatus _PyEval_InitGIL(PyThreadState *tstate) { +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS if (!_Py_IsMainInterpreter(tstate)) { /* Currently, the GIL is shared by all interpreters, and only the main interpreter is responsible to create and destroy it. */ return _PyStatus_OK(); } +#endif +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &tstate->interp->ceval.gil; +#else struct _gil_runtime_state *gil = &tstate->interp->runtime->ceval.gil; +#endif assert(!gil_created(gil)); PyThread_init_thread(); @@ -289,14 +310,20 @@ _PyEval_InitGIL(PyThreadState *tstate) void _PyEval_FiniGIL(PyThreadState *tstate) { +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS if (!_Py_IsMainInterpreter(tstate)) { /* Currently, the GIL is shared by all interpreters, and only the main interpreter is responsible to create and destroy it. */ return; } +#endif +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &tstate->interp->ceval.gil; +#else struct _gil_runtime_state *gil = &tstate->interp->runtime->ceval.gil; +#endif if (!gil_created(gil)) { /* First Py_InitializeFromConfig() call: the GIL doesn't exist yet: do nothing. */ @@ -360,14 +387,17 @@ PyEval_ReleaseLock(void) /* This function must succeed when the current thread state is NULL. We therefore avoid PyThreadState_Get() which dumps a fatal error in debug mode. */ - drop_gil(&runtime->ceval, tstate); + struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *ceval2 = &tstate->interp->ceval; + drop_gil(ceval, ceval2, tstate); } void _PyEval_ReleaseLock(PyThreadState *tstate) { struct _ceval_runtime_state *ceval = &tstate->interp->runtime->ceval; - drop_gil(ceval, tstate); + struct _ceval_state *ceval2 = &tstate->interp->ceval; + drop_gil(ceval, ceval2, tstate); } void @@ -378,9 +408,13 @@ PyEval_AcquireThread(PyThreadState *tstate) take_gil(tstate); struct _gilstate_runtime_state *gilstate = &tstate->interp->runtime->gilstate; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + (void)_PyThreadState_Swap(gilstate, tstate); +#else if (_PyThreadState_Swap(gilstate, tstate) != NULL) { Py_FatalError("non-NULL old thread state"); } +#endif } void @@ -393,7 +427,9 @@ PyEval_ReleaseThread(PyThreadState *tstate) if (new_tstate != tstate) { Py_FatalError("wrong thread state"); } - drop_gil(&runtime->ceval, tstate); + struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *ceval2 = &tstate->interp->ceval; + drop_gil(ceval, ceval2, tstate); } #ifdef HAVE_FORK @@ -405,13 +441,18 @@ PyEval_ReleaseThread(PyThreadState *tstate) void _PyEval_ReInitThreads(_PyRuntimeState *runtime) { + PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); + ensure_tstate_not_null(__func__, tstate); + +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &tstate->interp->ceval.gil; +#else struct _gil_runtime_state *gil = &runtime->ceval.gil; +#endif if (!gil_created(gil)) { return; } recreate_gil(gil); - PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); - ensure_tstate_not_null(__func__, tstate); take_gil(tstate); @@ -439,13 +480,22 @@ PyThreadState * PyEval_SaveThread(void) { _PyRuntimeState *runtime = &_PyRuntime; - struct _ceval_runtime_state *ceval = &runtime->ceval; - +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyThreadState *old_tstate = _PyThreadState_GET(); + PyThreadState *tstate = _PyThreadState_Swap(&runtime->gilstate, old_tstate); +#else PyThreadState *tstate = _PyThreadState_Swap(&runtime->gilstate, NULL); +#endif ensure_tstate_not_null(__func__, tstate); + struct _ceval_runtime_state *ceval = &runtime->ceval; + struct _ceval_state *ceval2 = &tstate->interp->ceval; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + assert(gil_created(&ceval2->gil)); +#else assert(gil_created(&ceval->gil)); - drop_gil(ceval, tstate); +#endif + drop_gil(ceval, ceval2, tstate); return tstate; } @@ -702,14 +752,17 @@ int _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT; void _PyEval_InitRuntimeState(struct _ceval_runtime_state *ceval) { - ceval->recursion_limit = Py_DEFAULT_RECURSION_LIMIT; _Py_CheckRecursionLimit = Py_DEFAULT_RECURSION_LIMIT; +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS _gil_initialize(&ceval->gil); +#endif } int _PyEval_InitState(struct _ceval_state *ceval) { + ceval->recursion_limit = Py_DEFAULT_RECURSION_LIMIT; + struct _pending_calls *pending = &ceval->pending; assert(pending->lock == NULL); @@ -717,6 +770,11 @@ _PyEval_InitState(struct _ceval_state *ceval) if (pending->lock == NULL) { return -1; } + +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + _gil_initialize(&ceval->gil); +#endif + return 0; } @@ -733,16 +791,18 @@ _PyEval_FiniState(struct _ceval_state *ceval) int Py_GetRecursionLimit(void) { - struct _ceval_runtime_state *ceval = &_PyRuntime.ceval; - return ceval->recursion_limit; + PyThreadState *tstate = _PyThreadState_GET(); + return tstate->interp->ceval.recursion_limit; } void Py_SetRecursionLimit(int new_limit) { - struct _ceval_runtime_state *ceval = &_PyRuntime.ceval; - ceval->recursion_limit = new_limit; - _Py_CheckRecursionLimit = new_limit; + PyThreadState *tstate = _PyThreadState_GET(); + tstate->interp->ceval.recursion_limit = new_limit; + if (_Py_IsMainInterpreter(tstate)) { + _Py_CheckRecursionLimit = new_limit; + } } /* The function _Py_EnterRecursiveCall() only calls _Py_CheckRecursiveCall() @@ -753,8 +813,7 @@ Py_SetRecursionLimit(int new_limit) int _Py_CheckRecursiveCall(PyThreadState *tstate, const char *where) { - _PyRuntimeState *runtime = tstate->interp->runtime; - int recursion_limit = runtime->ceval.recursion_limit; + int recursion_limit = tstate->interp->ceval.recursion_limit; #ifdef USE_STACKCHECK tstate->stackcheck_counter = 0; @@ -763,8 +822,10 @@ _Py_CheckRecursiveCall(PyThreadState *tstate, const char *where) _PyErr_SetString(tstate, PyExc_MemoryError, "Stack overflow"); return -1; } - /* Needed for ABI backwards-compatibility (see bpo-31857) */ - _Py_CheckRecursionLimit = recursion_limit; + if (_Py_IsMainInterpreter(tstate)) { + /* Needed for ABI backwards-compatibility (see bpo-31857) */ + _Py_CheckRecursionLimit = recursion_limit; + } #endif if (tstate->recursion_critical) /* Somebody asked that we don't check for recursion. */ @@ -847,20 +908,24 @@ eval_frame_handle_pending(PyThreadState *tstate) } /* GIL drop request */ - if (_Py_atomic_load_relaxed(&ceval->gil_drop_request)) { + if (_Py_atomic_load_relaxed(&ceval2->gil_drop_request)) { /* Give another thread a chance */ if (_PyThreadState_Swap(&runtime->gilstate, NULL) != tstate) { Py_FatalError("tstate mix-up"); } - drop_gil(ceval, tstate); + drop_gil(ceval, ceval2, tstate); /* Other threads may run now */ take_gil(tstate); +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + (void)_PyThreadState_Swap(&runtime->gilstate, tstate); +#else if (_PyThreadState_Swap(&runtime->gilstate, tstate) != NULL) { Py_FatalError("orphan tstate"); } +#endif } /* Check for asynchronous exception. */ @@ -1331,7 +1396,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, PyFrameObject *f, int throwflag) /* Do periodic things. Doing this every time through the loop would add too much overhead, so we do it only every Nth instruction. We also do it if - ``pendingcalls_to_do'' is set, i.e. when an asynchronous + ``pending.calls_to_do'' is set, i.e. when an asynchronous event needs attention (e.g. a signal handler or async I/O handler); see Py_AddPendingCall() and Py_MakePendingCalls() above. */ @@ -4349,7 +4414,7 @@ special_lookup(PyThreadState *tstate, PyObject *o, _Py_Identifier *id) PyObject *res; res = _PyObject_LookupSpecial(o, id); if (res == NULL && !_PyErr_Occurred(tstate)) { - _PyErr_SetObject(tstate, PyExc_AttributeError, id->object); + _PyErr_SetObject(tstate, PyExc_AttributeError, _PyUnicode_FromId(id)); return NULL; } return res; @@ -4989,7 +5054,7 @@ trace_call_function(PyThreadState *tstate, PyObject *kwnames) { PyObject *x; - if (PyCFunction_Check(func)) { + if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) { C_TRACE(x, PyObject_Vectorcall(func, args, nargs, kwnames)); return x; } @@ -5050,7 +5115,7 @@ do_call_core(PyThreadState *tstate, PyObject *func, PyObject *callargs, PyObject { PyObject *result; - if (PyCFunction_Check(func)) { + if (PyCFunction_CheckExact(func) || PyCMethod_CheckExact(func)) { C_TRACE(result, PyObject_Call(func, callargs, kwdict)); return result; } diff --git a/Python/ceval_gil.h b/Python/ceval_gil.h index a025a9fad1248e..56944b89237fb4 100644 --- a/Python/ceval_gil.h +++ b/Python/ceval_gil.h @@ -141,9 +141,14 @@ static void recreate_gil(struct _gil_runtime_state *gil) } static void -drop_gil(struct _ceval_runtime_state *ceval, PyThreadState *tstate) +drop_gil(struct _ceval_runtime_state *ceval, struct _ceval_state *ceval2, + PyThreadState *tstate) { +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &ceval2->gil; +#else struct _gil_runtime_state *gil = &ceval->gil; +#endif if (!_Py_atomic_load_relaxed(&gil->locked)) { Py_FatalError("drop_gil: GIL is not locked"); } @@ -163,7 +168,7 @@ drop_gil(struct _ceval_runtime_state *ceval, PyThreadState *tstate) MUTEX_UNLOCK(gil->mutex); #ifdef FORCE_SWITCHING - if (_Py_atomic_load_relaxed(&ceval->gil_drop_request) && tstate != NULL) { + if (_Py_atomic_load_relaxed(&ceval2->gil_drop_request) && tstate != NULL) { MUTEX_LOCK(gil->switch_mutex); /* Not switched yet => wait */ if (((PyThreadState*)_Py_atomic_load_relaxed(&gil->last_holder)) == tstate) @@ -226,7 +231,12 @@ take_gil(PyThreadState *tstate) assert(is_tstate_valid(tstate)); PyInterpreterState *interp = tstate->interp; struct _ceval_runtime_state *ceval = &interp->runtime->ceval; + struct _ceval_state *ceval2 = &interp->ceval; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + struct _gil_runtime_state *gil = &ceval2->gil; +#else struct _gil_runtime_state *gil = &ceval->gil; +#endif /* Check that _PyEval_InitThreads() was called to create the lock */ assert(gil_created(gil)); @@ -289,12 +299,12 @@ take_gil(PyThreadState *tstate) in take_gil() while the main thread called wait_for_thread_shutdown() from Py_Finalize(). */ MUTEX_UNLOCK(gil->mutex); - drop_gil(ceval, tstate); + drop_gil(ceval, ceval2, tstate); PyThread_exit_thread(); } assert(is_tstate_valid(tstate)); - if (_Py_atomic_load_relaxed(&ceval->gil_drop_request)) { + if (_Py_atomic_load_relaxed(&ceval2->gil_drop_request)) { RESET_GIL_DROP_REQUEST(interp); } else { @@ -303,7 +313,6 @@ take_gil(PyThreadState *tstate) handle signals. Note: RESET_GIL_DROP_REQUEST() calls COMPUTE_EVAL_BREAKER(). */ - struct _ceval_state *ceval2 = &interp->ceval; COMPUTE_EVAL_BREAKER(interp, ceval, ceval2); } @@ -319,10 +328,22 @@ take_gil(PyThreadState *tstate) void _PyEval_SetSwitchInterval(unsigned long microseconds) { - _PyRuntime.ceval.gil.interval = microseconds; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyInterpreterState *interp = PyInterpreterState_Get(); + struct _gil_runtime_state *gil = &interp->ceval.gil; +#else + struct _gil_runtime_state *gil = &_PyRuntime.ceval.gil; +#endif + gil->interval = microseconds; } unsigned long _PyEval_GetSwitchInterval() { - return _PyRuntime.ceval.gil.interval; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyInterpreterState *interp = PyInterpreterState_Get(); + struct _gil_runtime_state *gil = &interp->ceval.gil; +#else + struct _gil_runtime_state *gil = &_PyRuntime.ceval.gil; +#endif + return gil->interval; } diff --git a/Python/errors.c b/Python/errors.c index 9e53d050416ff1..f856a798eed1e5 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -107,7 +107,8 @@ _PyErr_SetObject(PyThreadState *tstate, PyObject *exception, PyObject *value) if (exception != NULL && !PyExceptionClass_Check(exception)) { _PyErr_Format(tstate, PyExc_SystemError, - "exception %R not a BaseException subclass", + "_PyErr_SetObject: " + "exception %R is not a BaseException subclass", exception); return; } @@ -484,6 +485,15 @@ _PyErr_ChainExceptions(PyObject *exc, PyObject *val, PyObject *tb) return; PyThreadState *tstate = _PyThreadState_GET(); + + if (!PyExceptionClass_Check(exc)) { + _PyErr_Format(tstate, PyExc_SystemError, + "_PyErr_ChainExceptions: " + "exception %R is not a BaseException subclass", + exc); + return; + } + if (_PyErr_Occurred(tstate)) { PyObject *exc2, *val2, *tb2; _PyErr_Fetch(tstate, &exc2, &val2, &tb2); diff --git a/Python/getopt.c b/Python/getopt.c index 708d9ce496287c..2e3891aae2d16a 100644 --- a/Python/getopt.c +++ b/Python/getopt.c @@ -101,7 +101,9 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) if (option == L'-') { // Parse long option. if (*opt_ptr == L'\0') { - fprintf(stderr, "expected long option\n"); + if (_PyOS_opterr) { + fprintf(stderr, "expected long option\n"); + } return -1; } *longindex = 0; @@ -111,7 +113,9 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) break; } if (!opt->name) { - fprintf(stderr, "unknown option %ls\n", argv[_PyOS_optind - 1]); + if (_PyOS_opterr) { + fprintf(stderr, "unknown option %ls\n", argv[_PyOS_optind - 1]); + } return '_'; } opt_ptr = L""; @@ -119,8 +123,10 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) return opt->val; } if (_PyOS_optind >= argc) { - fprintf(stderr, "Argument expected for the %ls options\n", - argv[_PyOS_optind - 1]); + if (_PyOS_opterr) { + fprintf(stderr, "Argument expected for the %ls options\n", + argv[_PyOS_optind - 1]); + } return '_'; } _PyOS_optarg = argv[_PyOS_optind++]; @@ -128,14 +134,16 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) } if (option == 'J') { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "-J is reserved for Jython\n"); + } return '_'; } if ((ptr = wcschr(SHORT_OPTS, option)) == NULL) { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "Unknown option: -%c\n", (char)option); + } return '_'; } @@ -147,9 +155,10 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) else { if (_PyOS_optind >= argc) { - if (_PyOS_opterr) + if (_PyOS_opterr) { fprintf(stderr, "Argument expected for the -%c option\n", (char)option); + } return '_'; } diff --git a/Python/hashtable.c b/Python/hashtable.c new file mode 100644 index 00000000000000..b92e8ca08c7e1c --- /dev/null +++ b/Python/hashtable.c @@ -0,0 +1,417 @@ +/* The implementation of the hash table (_Py_hashtable_t) is based on the + cfuhash project: + http://sourceforge.net/projects/libcfu/ + + Copyright of cfuhash: + ---------------------------------- + Creation date: 2005-06-24 21:22:40 + Authors: Don + Change log: + + Copyright (c) 2005 Don Owens + All rights reserved. + + This code is released under the BSD license: + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of the author nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + OF THE POSSIBILITY OF SUCH DAMAGE. + ---------------------------------- +*/ + +#include "Python.h" +#include "pycore_hashtable.h" + +#define HASHTABLE_MIN_SIZE 16 +#define HASHTABLE_HIGH 0.50 +#define HASHTABLE_LOW 0.10 +#define HASHTABLE_REHASH_FACTOR 2.0 / (HASHTABLE_LOW + HASHTABLE_HIGH) + +#define BUCKETS_HEAD(SLIST) \ + ((_Py_hashtable_entry_t *)_Py_SLIST_HEAD(&(SLIST))) +#define TABLE_HEAD(HT, BUCKET) \ + ((_Py_hashtable_entry_t *)_Py_SLIST_HEAD(&(HT)->buckets[BUCKET])) +#define ENTRY_NEXT(ENTRY) \ + ((_Py_hashtable_entry_t *)_Py_SLIST_ITEM_NEXT(ENTRY)) + +/* Forward declaration */ +static int hashtable_rehash(_Py_hashtable_t *ht); + +static void +_Py_slist_init(_Py_slist_t *list) +{ + list->head = NULL; +} + + +static void +_Py_slist_prepend(_Py_slist_t *list, _Py_slist_item_t *item) +{ + item->next = list->head; + list->head = item; +} + + +static void +_Py_slist_remove(_Py_slist_t *list, _Py_slist_item_t *previous, + _Py_slist_item_t *item) +{ + if (previous != NULL) + previous->next = item->next; + else + list->head = item->next; +} + + +Py_uhash_t +_Py_hashtable_hash_ptr(const void *key) +{ + return (Py_uhash_t)_Py_HashPointerRaw(key); +} + + +int +_Py_hashtable_compare_direct(const void *key1, const void *key2) +{ + return (key1 == key2); +} + + +/* makes sure the real size of the buckets array is a power of 2 */ +static size_t +round_size(size_t s) +{ + size_t i; + if (s < HASHTABLE_MIN_SIZE) + return HASHTABLE_MIN_SIZE; + i = 1; + while (i < s) + i <<= 1; + return i; +} + + +size_t +_Py_hashtable_size(const _Py_hashtable_t *ht) +{ + size_t size = sizeof(_Py_hashtable_t); + /* buckets */ + size += ht->nbuckets * sizeof(_Py_hashtable_entry_t *); + /* entries */ + size += ht->nentries * sizeof(_Py_hashtable_entry_t); + return size; +} + + +_Py_hashtable_entry_t * +_Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key) +{ + Py_uhash_t key_hash = ht->hash_func(key); + size_t index = key_hash & (ht->nbuckets - 1); + _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); + while (1) { + if (entry == NULL) { + return NULL; + } + if (entry->key_hash == key_hash && ht->compare_func(key, entry->key)) { + break; + } + entry = ENTRY_NEXT(entry); + } + return entry; +} + + +// Specialized for: +// hash_func == _Py_hashtable_hash_ptr +// compare_func == _Py_hashtable_compare_direct +static _Py_hashtable_entry_t * +_Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) +{ + Py_uhash_t key_hash = _Py_hashtable_hash_ptr(key); + size_t index = key_hash & (ht->nbuckets - 1); + _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); + while (1) { + if (entry == NULL) { + return NULL; + } + // Compare directly keys (ignore entry->key_hash) + if (entry->key == key) { + break; + } + entry = ENTRY_NEXT(entry); + } + return entry; +} + + +void* +_Py_hashtable_steal(_Py_hashtable_t *ht, const void *key) +{ + Py_uhash_t key_hash = ht->hash_func(key); + size_t index = key_hash & (ht->nbuckets - 1); + + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, index); + _Py_hashtable_entry_t *previous = NULL; + while (1) { + if (entry == NULL) { + // not found + return NULL; + } + if (entry->key_hash == key_hash && ht->compare_func(key, entry->key)) { + break; + } + previous = entry; + entry = ENTRY_NEXT(entry); + } + + _Py_slist_remove(&ht->buckets[index], (_Py_slist_item_t *)previous, + (_Py_slist_item_t *)entry); + ht->nentries--; + + void *value = entry->value; + ht->alloc.free(entry); + + if ((float)ht->nentries / (float)ht->nbuckets < HASHTABLE_LOW) { + // Ignore failure: error cannot be reported to the caller + hashtable_rehash(ht); + } + return value; +} + + +int +_Py_hashtable_set(_Py_hashtable_t *ht, const void *key, void *value) +{ + _Py_hashtable_entry_t *entry; + +#ifndef NDEBUG + /* Don't write the assertion on a single line because it is interesting + to know the duplicated entry if the assertion failed. The entry can + be read using a debugger. */ + entry = ht->get_entry_func(ht, key); + assert(entry == NULL); +#endif + + + entry = ht->alloc.malloc(sizeof(_Py_hashtable_entry_t)); + if (entry == NULL) { + /* memory allocation failed */ + return -1; + } + + entry->key_hash = ht->hash_func(key); + entry->key = (void *)key; + entry->value = value; + + ht->nentries++; + if ((float)ht->nentries / (float)ht->nbuckets > HASHTABLE_HIGH) { + if (hashtable_rehash(ht) < 0) { + ht->nentries--; + ht->alloc.free(entry); + return -1; + } + } + + size_t index = entry->key_hash & (ht->nbuckets - 1); + _Py_slist_prepend(&ht->buckets[index], (_Py_slist_item_t*)entry); + return 0; +} + + +void* +_Py_hashtable_get(_Py_hashtable_t *ht, const void *key) +{ + _Py_hashtable_entry_t *entry = ht->get_entry_func(ht, key); + if (entry != NULL) { + return entry->value; + } + else { + return NULL; + } +} + + +int +_Py_hashtable_foreach(_Py_hashtable_t *ht, + _Py_hashtable_foreach_func func, + void *user_data) +{ + for (size_t hv = 0; hv < ht->nbuckets; hv++) { + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, hv); + while (entry != NULL) { + int res = func(ht, entry->key, entry->value, user_data); + if (res) { + return res; + } + entry = ENTRY_NEXT(entry); + } + } + return 0; +} + + +static int +hashtable_rehash(_Py_hashtable_t *ht) +{ + size_t new_size = round_size((size_t)(ht->nentries * HASHTABLE_REHASH_FACTOR)); + if (new_size == ht->nbuckets) { + return 0; + } + + size_t buckets_size = new_size * sizeof(ht->buckets[0]); + _Py_slist_t *new_buckets = ht->alloc.malloc(buckets_size); + if (new_buckets == NULL) { + /* memory allocation failed */ + return -1; + } + memset(new_buckets, 0, buckets_size); + + for (size_t bucket = 0; bucket < ht->nbuckets; bucket++) { + _Py_hashtable_entry_t *entry = BUCKETS_HEAD(ht->buckets[bucket]); + while (entry != NULL) { + assert(ht->hash_func(entry->key) == entry->key_hash); + _Py_hashtable_entry_t *next = ENTRY_NEXT(entry); + size_t entry_index = entry->key_hash & (new_size - 1); + + _Py_slist_prepend(&new_buckets[entry_index], (_Py_slist_item_t*)entry); + + entry = next; + } + } + + ht->alloc.free(ht->buckets); + ht->nbuckets = new_size; + ht->buckets = new_buckets; + return 0; +} + + +_Py_hashtable_t * +_Py_hashtable_new_full(_Py_hashtable_hash_func hash_func, + _Py_hashtable_compare_func compare_func, + _Py_hashtable_destroy_func key_destroy_func, + _Py_hashtable_destroy_func value_destroy_func, + _Py_hashtable_allocator_t *allocator) +{ + _Py_hashtable_allocator_t alloc; + if (allocator == NULL) { + alloc.malloc = PyMem_Malloc; + alloc.free = PyMem_Free; + } + else { + alloc = *allocator; + } + + _Py_hashtable_t *ht = (_Py_hashtable_t *)alloc.malloc(sizeof(_Py_hashtable_t)); + if (ht == NULL) { + return ht; + } + + ht->nbuckets = HASHTABLE_MIN_SIZE; + ht->nentries = 0; + + size_t buckets_size = ht->nbuckets * sizeof(ht->buckets[0]); + ht->buckets = alloc.malloc(buckets_size); + if (ht->buckets == NULL) { + alloc.free(ht); + return NULL; + } + memset(ht->buckets, 0, buckets_size); + + ht->get_entry_func = _Py_hashtable_get_entry_generic; + ht->hash_func = hash_func; + ht->compare_func = compare_func; + ht->key_destroy_func = key_destroy_func; + ht->value_destroy_func = value_destroy_func; + ht->alloc = alloc; + if (ht->hash_func == _Py_hashtable_hash_ptr + && ht->compare_func == _Py_hashtable_compare_direct) + { + ht->get_entry_func = _Py_hashtable_get_entry_ptr; + } + return ht; +} + + +_Py_hashtable_t * +_Py_hashtable_new(_Py_hashtable_hash_func hash_func, + _Py_hashtable_compare_func compare_func) +{ + return _Py_hashtable_new_full(hash_func, compare_func, + NULL, NULL, NULL); +} + + +static void +_Py_hashtable_destroy_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry) +{ + if (ht->key_destroy_func) { + ht->key_destroy_func(entry->key); + } + if (ht->value_destroy_func) { + ht->value_destroy_func(entry->value); + } + ht->alloc.free(entry); +} + + +void +_Py_hashtable_clear(_Py_hashtable_t *ht) +{ + for (size_t i=0; i < ht->nbuckets; i++) { + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, i); + while (entry != NULL) { + _Py_hashtable_entry_t *next = ENTRY_NEXT(entry); + _Py_hashtable_destroy_entry(ht, entry); + entry = next; + } + _Py_slist_init(&ht->buckets[i]); + } + ht->nentries = 0; + // Ignore failure: clear function is not expected to fail + // because of a memory allocation failure. + (void)hashtable_rehash(ht); +} + + +void +_Py_hashtable_destroy(_Py_hashtable_t *ht) +{ + for (size_t i = 0; i < ht->nbuckets; i++) { + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, i); + while (entry) { + _Py_hashtable_entry_t *entry_next = ENTRY_NEXT(entry); + _Py_hashtable_destroy_entry(ht, entry); + entry = entry_next; + } + } + + ht->alloc.free(ht->buckets); + ht->alloc.free(ht); +} diff --git a/Python/marshal.c b/Python/marshal.c index b4429aea502d3f..a0f6b9812601be 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -12,7 +12,7 @@ #include "longintrepr.h" #include "code.h" #include "marshal.h" -#include "../Modules/hashtable.h" +#include "pycore_hashtable.h" /*[clinic input] module marshal @@ -302,17 +302,17 @@ w_ref(PyObject *v, char *flag, WFILE *p) if (Py_REFCNT(v) == 1) return 0; - entry = _Py_HASHTABLE_GET_ENTRY(p->hashtable, v); + entry = _Py_hashtable_get_entry(p->hashtable, v); if (entry != NULL) { /* write the reference index to the stream */ - _Py_HASHTABLE_ENTRY_READ_DATA(p->hashtable, entry, w); + w = (int)(uintptr_t)entry->value; /* we don't store "long" indices in the dict */ assert(0 <= w && w <= 0x7fffffff); w_byte(TYPE_REF, p); w_long(w, p); return 1; } else { - size_t s = p->hashtable->entries; + size_t s = p->hashtable->nentries; /* we don't support long indices */ if (s >= 0x7fffffff) { PyErr_SetString(PyExc_ValueError, "too many objects"); @@ -320,7 +320,7 @@ w_ref(PyObject *v, char *flag, WFILE *p) } w = (int)s; Py_INCREF(v); - if (_Py_HASHTABLE_SET(p->hashtable, v, w) < 0) { + if (_Py_hashtable_set(p->hashtable, v, (void *)(uintptr_t)w) < 0) { Py_DECREF(v); goto err; } @@ -545,13 +545,20 @@ w_complex_object(PyObject *v, char flag, WFILE *p) } } +static void +w_decref_entry(void *key) +{ + PyObject *entry_key = (PyObject *)key; + Py_XDECREF(entry_key); +} + static int w_init_refs(WFILE *wf, int version) { if (version >= 3) { - wf->hashtable = _Py_hashtable_new(sizeof(PyObject *), sizeof(int), - _Py_hashtable_hash_ptr, - _Py_hashtable_compare_direct); + wf->hashtable = _Py_hashtable_new_full(_Py_hashtable_hash_ptr, + _Py_hashtable_compare_direct, + w_decref_entry, NULL, NULL); if (wf->hashtable == NULL) { PyErr_NoMemory(); return -1; @@ -560,22 +567,10 @@ w_init_refs(WFILE *wf, int version) return 0; } -static int -w_decref_entry(_Py_hashtable_t *ht, _Py_hashtable_entry_t *entry, - void *Py_UNUSED(data)) -{ - PyObject *entry_key; - - _Py_HASHTABLE_ENTRY_READ_KEY(ht, entry, entry_key); - Py_XDECREF(entry_key); - return 0; -} - static void w_clear_refs(WFILE *wf) { if (wf->hashtable != NULL) { - _Py_hashtable_foreach(wf->hashtable, w_decref_entry, NULL); _Py_hashtable_destroy(wf->hashtable); } } diff --git a/Python/preconfig.c b/Python/preconfig.c index 262738fa57da56..fd94d7dda1c298 100644 --- a/Python/preconfig.c +++ b/Python/preconfig.c @@ -291,7 +291,17 @@ _PyPreConfig_InitCompatConfig(PyPreConfig *config) config->coerce_c_locale_warn = 0; config->dev_mode = -1; +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + /* bpo-40512: pymalloc is not compatible with subinterpreters, + force usage of libc malloc() which is thread-safe. */ +#ifdef Py_DEBUG + config->allocator = PYMEM_ALLOCATOR_MALLOC_DEBUG; +#else + config->allocator = PYMEM_ALLOCATOR_MALLOC; +#endif +#else config->allocator = PYMEM_ALLOCATOR_NOT_SET; +#endif #ifdef MS_WINDOWS config->legacy_windows_fs_encoding = -1; #endif diff --git a/Python/pyhash.c b/Python/pyhash.c index a6f42e71cf643c..3843079fbbce14 100644 --- a/Python/pyhash.c +++ b/Python/pyhash.c @@ -129,16 +129,22 @@ _Py_HashDouble(double v) } Py_hash_t -_Py_HashPointer(const void *p) +_Py_HashPointerRaw(const void *p) { - Py_hash_t x; size_t y = (size_t)p; /* bottom 3 or 4 bits are likely to be 0; rotate y by 4 to avoid excessive hash collisions for dicts and sets */ y = (y >> 4) | (y << (8 * SIZEOF_VOID_P - 4)); - x = (Py_hash_t)y; - if (x == -1) + return (Py_hash_t)y; +} + +Py_hash_t +_Py_HashPointer(const void *p) +{ + Py_hash_t x = _Py_HashPointerRaw(p); + if (x == -1) { x = -2; + } return x; } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 5726a559cfcb73..da66a82ada70a8 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1561,9 +1561,13 @@ new_interpreter(PyThreadState **tstate_p, int isolated_subinterpreter) /* Copy the current interpreter config into the new interpreter */ const PyConfig *config; +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS if (save_tstate != NULL) { config = _PyInterpreterState_GetConfig(save_tstate->interp); - } else { + } + else +#endif + { /* No current thread state, copy from the main interpreter */ PyInterpreterState *main_interp = PyInterpreterState_Main(); config = _PyInterpreterState_GetConfig(main_interp); @@ -1575,19 +1579,19 @@ new_interpreter(PyThreadState **tstate_p, int isolated_subinterpreter) } interp->config._isolated_interpreter = isolated_subinterpreter; - status = pycore_interp_init(tstate); + status = init_interp_create_gil(tstate); if (_PyStatus_EXCEPTION(status)) { goto error; } - status = init_interp_main(tstate); + status = pycore_interp_init(tstate); if (_PyStatus_EXCEPTION(status)) { goto error; } - status = init_interp_create_gil(tstate); + status = init_interp_main(tstate); if (_PyStatus_EXCEPTION(status)) { - return status; + goto error; } *tstate_p = tstate; diff --git a/Python/pystate.c b/Python/pystate.c index dd95750027241b..119fe31a84ba12 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -956,6 +956,14 @@ _PyThreadState_DeleteExcept(_PyRuntimeState *runtime, PyThreadState *tstate) } +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS +PyThreadState* +_PyThreadState_GetTSS(void) { + return PyThread_tss_get(&_PyRuntime.gilstate.autoTSSkey); +} +#endif + + PyThreadState * _PyThreadState_UncheckedGet(void) { @@ -975,7 +983,11 @@ PyThreadState_Get(void) PyThreadState * _PyThreadState_Swap(struct _gilstate_runtime_state *gilstate, PyThreadState *newts) { +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyThreadState *oldts = _PyThreadState_GetTSS(); +#else PyThreadState *oldts = _PyRuntimeGILState_GetThreadState(gilstate); +#endif _PyRuntimeGILState_SetThreadState(gilstate, newts); /* It should not be possible for more than one thread state @@ -993,6 +1005,9 @@ _PyThreadState_Swap(struct _gilstate_runtime_state *gilstate, PyThreadState *new Py_FatalError("Invalid thread state for this thread"); errno = err; } +#endif +#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + PyThread_tss_set(&gilstate->autoTSSkey, newts); #endif return oldts; } @@ -1363,7 +1378,9 @@ PyGILState_Ensure(void) /* Ensure that _PyEval_InitThreads() and _PyGILState_Init() have been called by Py_Initialize() */ +#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS assert(_PyEval_ThreadsInitialized(runtime)); +#endif assert(gilstate->autoInterpreterState); PyThreadState *tcur = (PyThreadState *)PyThread_tss_get(&gilstate->autoTSSkey); diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 1b79a33c814da1..160f44d38e2e19 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -554,37 +554,65 @@ parse_syntax_error(PyObject *err, PyObject **message, PyObject **filename, static void print_error_text(PyObject *f, int offset, PyObject *text_obj) { - const char *text; - const char *nl; - - text = PyUnicode_AsUTF8(text_obj); + /* Convert text to a char pointer; return if error */ + const char *text = PyUnicode_AsUTF8(text_obj); if (text == NULL) return; - if (offset >= 0) { - if (offset > 0 && (size_t)offset == strlen(text) && text[offset - 1] == '\n') - offset--; - for (;;) { - nl = strchr(text, '\n'); - if (nl == NULL || nl-text >= offset) - break; - offset -= (int)(nl+1-text); - text = nl+1; + /* Convert offset from 1-based to 0-based */ + offset--; + + /* Strip leading whitespace from text, adjusting offset as we go */ + while (*text == ' ' || *text == '\t' || *text == '\f') { + text++; + offset--; + } + + /* Calculate text length excluding trailing newline */ + Py_ssize_t len = strlen(text); + if (len > 0 && text[len-1] == '\n') { + len--; + } + + /* Clip offset to at most len */ + if (offset > len) { + offset = len; + } + + /* Skip past newlines embedded in text */ + for (;;) { + const char *nl = strchr(text, '\n'); + if (nl == NULL) { + break; } - while (*text == ' ' || *text == '\t' || *text == '\f') { - text++; - offset--; + Py_ssize_t inl = nl - text; + if (inl >= (Py_ssize_t)offset) { + break; } + inl += 1; + text += inl; + len -= inl; + offset -= (int)inl; } + + /* Print text */ PyFile_WriteString(" ", f); PyFile_WriteString(text, f); - if (*text == '\0' || text[strlen(text)-1] != '\n') + + /* Make sure there's a newline at the end */ + if (text[len] != '\n') { PyFile_WriteString("\n", f); - if (offset == -1) + } + + /* Don't print caret if it points to the left of the text */ + if (offset < 0) return; + + /* Write caret line */ PyFile_WriteString(" ", f); - while (--offset > 0) + while (--offset >= 0) { PyFile_WriteString(" ", f); + } PyFile_WriteString("^\n", f); } @@ -1603,9 +1631,6 @@ err_input(perrdetail *err) msg = "unexpected character after line continuation character"; break; - case E_IDENTIFIER: - msg = "invalid character in identifier"; - break; case E_BADSINGLE: msg = "multiple statements found while compiling a single statement"; break; diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index 382e29a28ab48e..b07ffdd928f154 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -657,9 +657,14 @@ def output_templates(self, f): if not p.is_optional(): min_pos = i + requires_defining_class = any( + isinstance(p.converter, defining_class_converter) + for p in parameters) + meth_o = (len(parameters) == 1 and parameters[0].is_positional_only() and not converters[0].is_optional() and + not requires_defining_class and not new_or_init) # we have to set these things before we're done: @@ -717,6 +722,11 @@ def output_templates(self, f): {c_basename}({self_type}{self_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) """) + parser_prototype_def_class = normalize_snippet(""" + static PyObject * + {c_basename}({self_type}{self_name}, PyTypeObject *{defining_class_name}, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) + """) + # parser_body_fields remembers the fields passed in to the # previous call to parser_body. this is used for an awful hack. parser_body_fields = () @@ -824,7 +834,7 @@ def parser_body(prototype, *fields, declarations=''): parser_definition = parser_body(parser_prototype, ' {option_group_parsing}') - elif pos_only == len(parameters): + elif not requires_defining_class and pos_only == len(parameters): if not new_or_init: # positional-only, but no option groups # we only need one call to _PyArg_ParseStack @@ -891,7 +901,7 @@ def parser_body(prototype, *fields, declarations=''): parser_prototype = parser_prototype_fastcall_keywords argname_fmt = 'args[%d]' declarations = normalize_snippet(""" - static const char * const _keywords[] = {{{keywords}, NULL}}; + static const char * const _keywords[] = {{{keywords} NULL}}; static _PyArg_Parser _parser = {{NULL, _keywords, "{name}", 0}}; PyObject *argsbuf[%s]; """ % len(converters)) @@ -909,7 +919,7 @@ def parser_body(prototype, *fields, declarations=''): parser_prototype = parser_prototype_keyword argname_fmt = 'fastargs[%d]' declarations = normalize_snippet(""" - static const char * const _keywords[] = {{{keywords}, NULL}}; + static const char * const _keywords[] = {{{keywords} NULL}}; static _PyArg_Parser _parser = {{NULL, _keywords, "{name}", 0}}; PyObject *argsbuf[%s]; PyObject * const *fastargs; @@ -923,6 +933,9 @@ def parser_body(prototype, *fields, declarations=''): goto exit; }} """ % (min_pos, max_pos, min_kw_only), indent=4)] + if requires_defining_class: + flags = 'METH_METHOD|' + flags + parser_prototype = parser_prototype_def_class add_label = None for i, p in enumerate(parameters): @@ -983,11 +996,11 @@ def parser_body(prototype, *fields, declarations=''): parser_code.append("%s:" % add_label) else: declarations = ( - 'static const char * const _keywords[] = {{{keywords}, NULL}};\n' + 'static const char * const _keywords[] = {{{keywords} NULL}};\n' 'static _PyArg_Parser _parser = {{"{format_units}:{name}", _keywords, 0}};') if not new_or_init: parser_code = [normalize_snippet(""" - if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser{parse_arguments_comma} {parse_arguments})) {{ goto exit; }} @@ -1021,6 +1034,9 @@ def parser_body(prototype, *fields, declarations=''): if parses_keywords: assert parses_positional + if requires_defining_class: + raise ValueError("Slot methods cannot access their defining class.") + if not parses_keywords: fields.insert(0, normalize_snippet(""" if ({self_type_check}!_PyArg_NoKeywords("{name}", kwargs)) {{ @@ -1289,7 +1305,8 @@ def render_function(self, clinic, f): template_dict['docstring'] = self.docstring_for_c_string(f) template_dict['self_name'] = template_dict['self_type'] = template_dict['self_type_check'] = '' - f_self.converter.set_template_dict(template_dict) + for converter in converters: + converter.set_template_dict(template_dict) f.return_converter.render(f, data) template_dict['impl_return_type'] = f.return_converter.type @@ -1297,9 +1314,13 @@ def render_function(self, clinic, f): template_dict['declarations'] = format_escape("\n".join(data.declarations)) template_dict['initializers'] = "\n\n".join(data.initializers) template_dict['modifications'] = '\n\n'.join(data.modifications) - template_dict['keywords'] = '"' + '", "'.join(data.keywords) + '"' + template_dict['keywords'] = ' '.join('"' + k + '",' for k in data.keywords) template_dict['format_units'] = ''.join(data.format_units) template_dict['parse_arguments'] = ', '.join(data.parse_arguments) + if data.parse_arguments: + template_dict['parse_arguments_comma'] = ','; + else: + template_dict['parse_arguments_comma'] = ''; template_dict['impl_parameters'] = ", ".join(data.impl_parameters) template_dict['impl_arguments'] = ", ".join(data.impl_arguments) template_dict['return_conversion'] = format_escape("".join(data.return_conversion).rstrip()) @@ -2678,6 +2699,10 @@ def parse_arg(self, argname, displayname): """.format(argname=argname, paramname=self.name, cast=cast) return None + def set_template_dict(self, template_dict): + pass + + type_checks = { '&PyLong_Type': ('PyLong_Check', 'int'), '&PyTuple_Type': ('PyTuple_Check', 'tuple'), @@ -2730,6 +2755,25 @@ def parse_arg(self, argname, displayname): """.format(argname=argname, paramname=self.name) return super().parse_arg(argname, displayname) +class defining_class_converter(CConverter): + """ + A special-case converter: + this is the default converter used for the defining class. + """ + type = 'PyTypeObject *' + format_unit = '' + show_in_signature = False + + def converter_init(self, *, type=None): + self.specified_type = type + + def render(self, parameter, data): + self._render_self(parameter, data) + + def set_template_dict(self, template_dict): + template_dict['defining_class_name'] = self.name + + class char_converter(CConverter): type = 'char' default_type = (bytes, bytearray) @@ -4508,6 +4552,19 @@ def bad_node(self, node): else: fail("A 'self' parameter, if specified, must be the very first thing in the parameter block.") + if isinstance(converter, defining_class_converter): + _lp = len(self.function.parameters) + if _lp == 1: + if (self.parameter_state != self.ps_required): + fail("A 'defining_class' parameter cannot be marked optional.") + if value is not unspecified: + fail("A 'defining_class' parameter cannot have a default value.") + if self.group: + fail("A 'defining_class' parameter cannot be in an optional group.") + else: + fail("A 'defining_class' parameter, if specified, must either be the first thing in the parameter block, or come just after 'self'.") + + p = Parameter(parameter_name, kind, function=self.function, converter=converter, default=value, group=self.group) if parameter_name in self.function.parameters: diff --git a/Tools/msi/lib/lib_files.wxs b/Tools/msi/lib/lib_files.wxs index b462372512f6de..95541599b9bb29 100644 --- a/Tools/msi/lib/lib_files.wxs +++ b/Tools/msi/lib/lib_files.wxs @@ -1,6 +1,6 @@  - + diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index 40004e7875278d..6c9aa3f2ba7866 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -1,5 +1,5 @@ import ast -from dataclasses import dataclass, field +from dataclasses import dataclass import re from typing import Any, Dict, IO, Optional, List, Text, Tuple, Set from enum import Enum @@ -68,6 +68,7 @@ class FunctionCall: return_type: Optional[str] = None nodetype: Optional[NodeTypes] = None force_true: bool = False + comment: Optional[str] = None def __str__(self) -> str: parts = [] @@ -78,6 +79,8 @@ def __str__(self) -> str: parts.append(", 1") if self.assigned_variable: parts = ["(", self.assigned_variable, " = ", *parts, ")"] + if self.comment: + parts.append(f" // {self.comment}") return "".join(parts) @@ -98,11 +101,12 @@ def keyword_helper(self, keyword: str) -> FunctionCall: if keyword not in self.keyword_cache: self.keyword_cache[keyword] = self.gen.keyword_type() return FunctionCall( - assigned_variable="keyword", + assigned_variable="_keyword", function="_PyPegen_expect_token", arguments=["p", self.keyword_cache[keyword]], return_type="Token *", nodetype=NodeTypes.KEYWORD, + comment=f"token='{keyword}'", ) def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: @@ -115,6 +119,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: arguments=["p"], nodetype=BASE_NODETYPES[name], return_type="expr_ty", + comment=name, ) return FunctionCall( assigned_variable=f"{name.lower()}_var", @@ -122,6 +127,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: arguments=["p", name], nodetype=NodeTypes.GENERIC_TOKEN, return_type="Token *", + comment=f"token='{name}'", ) type = None @@ -134,6 +140,7 @@ def visit_NameLeaf(self, node: NameLeaf) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type=type, + comment=f"{node}", ) def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: @@ -144,11 +151,12 @@ def visit_StringLeaf(self, node: StringLeaf) -> FunctionCall: assert val in self.exact_tokens, f"{node.value} is not a known literal" type = self.exact_tokens[val] return FunctionCall( - assigned_variable="literal", + assigned_variable="_literal", function=f"_PyPegen_expect_token", arguments=["p", type], nodetype=NodeTypes.GENERIC_TOKEN, return_type="Token *", + comment=f"token='{val}'", ) def visit_Rhs(self, node: Rhs) -> FunctionCall: @@ -167,7 +175,10 @@ def can_we_inline(node: Rhs) -> int: else: name = self.gen.name_node(node) self.cache[node] = FunctionCall( - assigned_variable=f"{name}_var", function=f"{name}_rule", arguments=["p"], + assigned_variable=f"{name}_var", + function=f"{name}_rule", + arguments=["p"], + comment=f"{node}", ) return self.cache[node] @@ -190,6 +201,7 @@ def lookahead_call_helper(self, node: Lookahead, positive: int) -> FunctionCall: function=f"_PyPegen_lookahead_with_int", arguments=[positive, call.function, *call.arguments], return_type="int", + comment=f"token={node.node}", ) else: return FunctionCall( @@ -207,10 +219,11 @@ def visit_NegativeLookahead(self, node: NegativeLookahead) -> FunctionCall: def visit_Opt(self, node: Opt) -> FunctionCall: call = self.visit(node.node) return FunctionCall( - assigned_variable="opt_var", + assigned_variable="_opt_var", function=call.function, arguments=call.arguments, force_true=True, + comment=f"{node}", ) def visit_Repeat0(self, node: Repeat0) -> FunctionCall: @@ -222,6 +235,7 @@ def visit_Repeat0(self, node: Repeat0) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type="asdl_seq *", + comment=f"{node}", ) return self.cache[node] @@ -234,6 +248,7 @@ def visit_Repeat1(self, node: Repeat1) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type="asdl_seq *", + comment=f"{node}", ) return self.cache[node] @@ -246,6 +261,7 @@ def visit_Gather(self, node: Gather) -> FunctionCall: function=f"{name}_rule", arguments=["p"], return_type="asdl_seq *", + comment=f"{node}", ) return self.cache[node] @@ -254,7 +270,7 @@ def visit_Group(self, node: Group) -> FunctionCall: def visit_Cut(self, node: Cut) -> FunctionCall: return FunctionCall( - assigned_variable="cut_var", + assigned_variable="_cut_var", return_type="int", function="1", nodetype=NodeTypes.CUT_OPERATOR, @@ -404,46 +420,46 @@ def _set_up_token_start_metadata_extraction(self) -> None: self.print("p->error_indicator = 1;") self.print("return NULL;") self.print("}") - self.print("int start_lineno = p->tokens[mark]->lineno;") - self.print("UNUSED(start_lineno); // Only used by EXTRA macro") - self.print("int start_col_offset = p->tokens[mark]->col_offset;") - self.print("UNUSED(start_col_offset); // Only used by EXTRA macro") + self.print("int _start_lineno = p->tokens[_mark]->lineno;") + self.print("UNUSED(_start_lineno); // Only used by EXTRA macro") + self.print("int _start_col_offset = p->tokens[_mark]->col_offset;") + self.print("UNUSED(_start_col_offset); // Only used by EXTRA macro") def _set_up_token_end_metadata_extraction(self) -> None: - self.print("Token *token = _PyPegen_get_last_nonnwhitespace_token(p);") - self.print("if (token == NULL) {") + self.print("Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);") + self.print("if (_token == NULL) {") with self.indent(): self.print("return NULL;") self.print("}") - self.print(f"int end_lineno = token->end_lineno;") - self.print("UNUSED(end_lineno); // Only used by EXTRA macro") - self.print(f"int end_col_offset = token->end_col_offset;") - self.print("UNUSED(end_col_offset); // Only used by EXTRA macro") + self.print("int _end_lineno = _token->end_lineno;") + self.print("UNUSED(_end_lineno); // Only used by EXTRA macro") + self.print("int _end_col_offset = _token->end_col_offset;") + self.print("UNUSED(_end_col_offset); // Only used by EXTRA macro") def _set_up_rule_memoization(self, node: Rule, result_type: str) -> None: self.print("{") with self.indent(): - self.print(f"{result_type} res = NULL;") - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &res))") + self.print(f"{result_type} _res = NULL;") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") with self.indent(): - self.print("return res;") - self.print("int mark = p->mark;") - self.print("int resmark = p->mark;") + self.print("return _res;") + self.print("int _mark = p->mark;") + self.print("int _resmark = p->mark;") self.print("while (1) {") with self.indent(): self.call_with_errorcheck_return( - f"_PyPegen_update_memo(p, mark, {node.name}_type, res)", "res" + f"_PyPegen_update_memo(p, _mark, {node.name}_type, _res)", "_res" ) - self.print("p->mark = mark;") - self.print(f"void *raw = {node.name}_raw(p);") - self.print("if (raw == NULL || p->mark <= resmark)") + self.print("p->mark = _mark;") + self.print(f"void *_raw = {node.name}_raw(p);") + self.print("if (_raw == NULL || p->mark <= _resmark)") with self.indent(): self.print("break;") - self.print("resmark = p->mark;") - self.print("res = raw;") + self.print(f"_resmark = p->mark;") + self.print("_res = _raw;") self.print("}") - self.print("p->mark = resmark;") - self.print("return res;") + self.print(f"p->mark = _resmark;") + self.print("return _res;") self.print("}") self.print(f"static {result_type}") self.print(f"{node.name}_raw(Parser *p)") @@ -459,12 +475,12 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N with self.indent(): self.print("return NULL;") self.print("}") - self.print(f"{result_type} res = NULL;") + self.print(f"{result_type} _res = NULL;") if memoize: - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &res))") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") with self.indent(): - self.print("return res;") - self.print("int mark = p->mark;") + self.print("return _res;") + self.print("int _mark = p->mark;") if any(alt.action and "EXTRA" in alt.action for alt in rhs.alts): self._set_up_token_start_metadata_extraction() self.visit( @@ -474,13 +490,13 @@ def _handle_default_rule_body(self, node: Rule, rhs: Rhs, result_type: str) -> N rulename=node.name if memoize else None, ) if self.debug: - self.print(f'fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark);') - self.print("res = NULL;") + self.print('fprintf(stderr, "Fail at %d: {node.name}\\n", p->mark);') + self.print("_res = NULL;") self.print(" done:") with self.indent(): if memoize: - self.print(f"_PyPegen_insert_memo(p, mark, {node.name}_type, res);") - self.print("return res;") + self.print(f"_PyPegen_insert_memo(p, _mark, {node.name}_type, _res);") + self.print("return _res;") def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: memoize = self._should_memoize(node) @@ -491,17 +507,17 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: with self.indent(): self.print("return NULL;") self.print("}") - self.print(f"void *res = NULL;") + self.print("void *_res = NULL;") if memoize: - self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &res))") + self.print(f"if (_PyPegen_is_memoized(p, {node.name}_type, &_res))") with self.indent(): - self.print("return res;") - self.print("int mark = p->mark;") - self.print("int start_mark = p->mark;") - self.print("void **children = PyMem_Malloc(sizeof(void *));") - self.out_of_memory_return(f"!children", "NULL") - self.print("ssize_t children_capacity = 1;") - self.print("ssize_t n = 0;") + self.print("return _res;") + self.print("int _mark = p->mark;") + self.print("int _start_mark = p->mark;") + self.print("void **_children = PyMem_Malloc(sizeof(void *));") + self.out_of_memory_return(f"!_children", "NULL") + self.print("ssize_t _children_capacity = 1;") + self.print("ssize_t _n = 0;") if any(alt.action and "EXTRA" in alt.action for alt in rhs.alts): self._set_up_token_start_metadata_extraction() self.visit( @@ -511,23 +527,23 @@ def _handle_loop_rule_body(self, node: Rule, rhs: Rhs) -> None: rulename=node.name if memoize else None, ) if is_repeat1: - self.print("if (n == 0) {") + self.print("if (_n == 0 || p->error_indicator) {") with self.indent(): - self.print("PyMem_Free(children);") + self.print("PyMem_Free(_children);") self.print("return NULL;") self.print("}") - self.print("asdl_seq *seq = _Py_asdl_seq_new(n, p->arena);") + self.print("asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena);") self.out_of_memory_return( - f"!seq", + "!_seq", "NULL", message=f"asdl_seq_new {node.name}", - cleanup_code="PyMem_Free(children);", + cleanup_code="PyMem_Free(_children);", ) - self.print("for (int i = 0; i < n; i++) asdl_seq_SET(seq, i, children[i]);") - self.print("PyMem_Free(children);") + self.print("for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]);") + self.print("PyMem_Free(_children);") if node.name: - self.print(f"_PyPegen_insert_memo(p, start_mark, {node.name}_type, seq);") - self.print("return seq;") + self.print(f"_PyPegen_insert_memo(p, _start_mark, {node.name}_type, _seq);") + self.print("return _seq;") def visit_Rule(self, node: Rule) -> None: is_loop = node.is_loop() @@ -585,9 +601,9 @@ def join_conditions(self, keyword: str, node: Any) -> None: self.print(")") def emit_action(self, node: Alt, cleanup_code: Optional[str] = None) -> None: - self.print(f"res = {node.action};") + self.print(f"_res = {node.action};") - self.print("if (res == NULL && PyErr_Occurred()) {") + self.print("if (_res == NULL && PyErr_Occurred()) {") with self.indent(): self.print("p->error_indicator = 1;") if cleanup_code: @@ -597,7 +613,7 @@ def emit_action(self, node: Alt, cleanup_code: Optional[str] = None) -> None: if self.debug: self.print( - f'fprintf(stderr, "Hit with action [%d-%d]: %s\\n", mark, p->mark, "{node}");' + f'fprintf(stderr, "Hit with action [%d-%d]: %s\\n", _mark, p->mark, "{node}");' ) def emit_default_action(self, is_gather: bool, node: Alt) -> None: @@ -605,7 +621,7 @@ def emit_default_action(self, is_gather: bool, node: Alt) -> None: if is_gather: assert len(self.local_variable_names) == 2 self.print( - f"res = _PyPegen_seq_insert_in_front(p, " + f"_res = _PyPegen_seq_insert_in_front(p, " f"{self.local_variable_names[0]}, {self.local_variable_names[1]});" ) else: @@ -614,17 +630,17 @@ def emit_default_action(self, is_gather: bool, node: Alt) -> None: f'fprintf(stderr, "Hit without action [%d:%d]: %s\\n", mark, p->mark, "{node}");' ) self.print( - f"res = _PyPegen_dummy_name(p, {', '.join(self.local_variable_names)});" + f"_res = _PyPegen_dummy_name(p, {', '.join(self.local_variable_names)});" ) else: if self.debug: self.print( f'fprintf(stderr, "Hit with default action [%d:%d]: %s\\n", mark, p->mark, "{node}");' ) - self.print(f"res = {self.local_variable_names[0]};") + self.print(f"_res = {self.local_variable_names[0]};") def emit_dummy_action(self) -> None: - self.print(f"res = _PyPegen_dummy_name(p);") + self.print("_res = _PyPegen_dummy_name(p);") def handle_alt_normal(self, node: Alt, is_gather: bool) -> None: self.join_conditions(keyword="if", node=node) @@ -657,20 +673,22 @@ def handle_alt_loop(self, node: Alt, is_gather: bool, rulename: Optional[str]) - if self.skip_actions: self.emit_dummy_action() elif node.action: - self.emit_action(node, cleanup_code="PyMem_Free(children);") + self.emit_action(node, cleanup_code="PyMem_Free(_children);") else: self.emit_default_action(is_gather, node) # Add the result of rule to the temporary buffer of children. This buffer # will populate later an asdl_seq with all elements to return. - self.print("if (n == children_capacity) {") + self.print("if (_n == _children_capacity) {") with self.indent(): - self.print("children_capacity *= 2;") - self.print("children = PyMem_Realloc(children, children_capacity*sizeof(void *));") - self.out_of_memory_return(f"!children", "NULL", message=f"realloc {rulename}") + self.print("_children_capacity *= 2;") + self.print( + "_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));" + ) + self.out_of_memory_return(f"!_children", "NULL", message=f"realloc {rulename}") self.print("}") - self.print(f"children[n++] = res;") - self.print("mark = p->mark;") + self.print("_children[_n++] = _res;") + self.print("_mark = p->mark;") self.print("}") def visit_Alt( @@ -685,11 +703,11 @@ def visit_Alt( var_type = "void *" else: var_type += " " - if v == "cut_var": + if v == "_cut_var": v += " = 0" # cut_var must be initialized self.print(f"{var_type}{v};") - if v == "opt_var": - self.print("UNUSED(opt_var); // Silence compiler warnings") + if v == "_opt_var": + self.print("UNUSED(_opt_var); // Silence compiler warnings") with self.local_variable_context(): if is_loop: @@ -697,9 +715,9 @@ def visit_Alt( else: self.handle_alt_normal(node, is_gather) - self.print("p->mark = mark;") - if "cut_var" in vars: - self.print("if (cut_var) return NULL;") + self.print("p->mark = _mark;") + if "_cut_var" in vars: + self.print("if (_cut_var) return NULL;") self.print("}") def collect_vars(self, node: Alt) -> Dict[Optional[str], Optional[str]]: diff --git a/Tools/peg_generator/pegen/parser_generator.py b/Tools/peg_generator/pegen/parser_generator.py index 03452510b9669b..689022b12da203 100644 --- a/Tools/peg_generator/pegen/parser_generator.py +++ b/Tools/peg_generator/pegen/parser_generator.py @@ -27,6 +27,11 @@ def visit_NameLeaf(self, node: NameLeaf) -> None: # TODO: Add line/col info to (leaf) nodes raise GrammarError(f"Dangling reference to rule {node.value!r}") + def visit_NamedItem(self, node: NameLeaf) -> None: + if node.name and node.name.startswith("_"): + raise GrammarError(f"Variable names cannot start with underscore: '{node.name}'") + self.visit(node.item) + class ParserGenerator: @@ -36,6 +41,7 @@ def __init__(self, grammar: Grammar, tokens: Dict[int, str], file: Optional[IO[T self.grammar = grammar self.tokens = tokens self.rules = grammar.rules + self.validate_rule_names() if "trailer" not in grammar.metas and "start" not in self.rules: raise GrammarError("Grammar without a trailer must have a 'start' rule") checker = RuleCheckingVisitor(self.rules, self.tokens) @@ -51,6 +57,11 @@ def __init__(self, grammar: Grammar, tokens: Dict[int, str], file: Optional[IO[T self.all_rules: Dict[str, Rule] = {} # Rules + temporal rules self._local_variable_stack: List[List[str]] = [] + def validate_rule_names(self): + for rule in self.rules: + if rule.startswith("_"): + raise GrammarError(f"Rule names cannot start with underscore: '{rule}'") + @contextlib.contextmanager def local_variable_context(self) -> Iterator[None]: self._local_variable_stack.append([]) diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 05d6d7de296db9..7aa28bd2157fb3 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -41,13 +41,13 @@ log = logging.getLogger("multissl") OPENSSL_OLD_VERSIONS = [ - "1.0.2", ] OPENSSL_RECENT_VERSIONS = [ - "1.0.2t", + "1.0.2u", "1.1.0l", - "1.1.1f", + "1.1.1g", + # "3.0.0-alpha2" ] LIBRESSL_OLD_VERSIONS = [ @@ -143,6 +143,23 @@ help="Keep original sources for debugging." ) +OPENSSL_FIPS_CNF = """\ +openssl_conf = openssl_init + +.include {self.install_dir}/ssl/fipsinstall.cnf +# .include {self.install_dir}/ssl/openssl.cnf + +[openssl_init] +providers = provider_sect + +[provider_sect] +fips = fips_sect +default = default_sect + +[default_sect] +activate = 1 +""" + class AbstractBuilder(object): library = None @@ -291,9 +308,13 @@ def _make_install(self): ["make", "-j1", self.install_target], cwd=self.build_dir ) + self._post_install() if not self.args.keep_sources: shutil.rmtree(self.build_dir) + def _post_install(self): + pass + def install(self): log.info(self.openssl_cli) if not self.has_openssl or self.args.force: @@ -365,6 +386,40 @@ class BuildOpenSSL(AbstractBuilder): # only install software, skip docs install_target = 'install_sw' + def _post_install(self): + if self.version.startswith("3.0"): + self._post_install_300() + + def _post_install_300(self): + # create ssl/ subdir with example configs + self._subprocess_call( + ["make", "-j1", "install_ssldirs"], + cwd=self.build_dir + ) + # Install FIPS module + # https://wiki.openssl.org/index.php/OpenSSL_3.0#Completing_the_installation_of_the_FIPS_Module + fipsinstall_cnf = os.path.join( + self.install_dir, "ssl", "fipsinstall.cnf" + ) + openssl_fips_cnf = os.path.join( + self.install_dir, "ssl", "openssl-fips.cnf" + ) + fips_mod = os.path.join(self.lib_dir, "ossl-modules/fips.so") + self._subprocess_call( + [ + self.openssl_cli, "fipsinstall", + "-out", fipsinstall_cnf, + "-module", fips_mod, + "-provider_name", "fips", + "-mac_name", "HMAC", + "-macopt", "digest:SHA256", + "-macopt", "hexkey:00", + "-section_name", "fips_sect" + ] + ) + with open(openssl_fips_cnf, "w") as f: + f.write(OPENSSL_FIPS_CNF.format(self=self)) + class BuildLibreSSL(AbstractBuilder): library = "LibreSSL" diff --git a/configure b/configure index a8a35d0defc6b3..26e9aa9fe454e2 100755 --- a/configure +++ b/configure @@ -845,6 +845,7 @@ with_computed_gotos with_ensurepip with_openssl with_ssl_default_suites +with_experimental_isolated_subinterpreters ' ac_precious_vars='build_alias host_alias @@ -1575,6 +1576,9 @@ Optional Packages: leave OpenSSL's defaults untouched, STRING: use a custom string, PROTOCOL_SSLv2 ignores the setting, see Doc/library/ssl.rst + --with-experimental-isolated-subinterpreters + better isolate subinterpreters, experimental build + mode (default is no) Some influential environment variables: MACHDEP name for machine-dependent library files @@ -17489,6 +17493,30 @@ $as_echo "#define PY_SSL_DEFAULT_CIPHERS 1" >>confdefs.h fi +# --with-experimental-isolated-subinterpreters + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-experimental-isolated-subinterpreters" >&5 +$as_echo_n "checking for --with-experimental-isolated-subinterpreters... " >&6; } + +# Check whether --with-experimental-isolated-subinterpreters was given. +if test "${with_experimental_isolated_subinterpreters+set}" = set; then : + withval=$with_experimental_isolated_subinterpreters; +if test "$withval" != no +then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; }; + $as_echo "#define EXPERIMENTAL_ISOLATED_SUBINTERPRETERS 1" >>confdefs.h + +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; }; +fi +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + # generate output files ac_config_files="$ac_config_files Makefile.pre Misc/python.pc Misc/python-embed.pc Misc/python-config.sh" diff --git a/configure.ac b/configure.ac index f996051efc719e..acb6d4bfa8da10 100644 --- a/configure.ac +++ b/configure.ac @@ -5717,6 +5717,23 @@ AC_MSG_RESULT(python) AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1) ]) +# --with-experimental-isolated-subinterpreters +AH_TEMPLATE(EXPERIMENTAL_ISOLATED_SUBINTERPRETERS, + [Better isolate subinterpreters, experimental build mode.]) +AC_MSG_CHECKING(for --with-experimental-isolated-subinterpreters) +AC_ARG_WITH(experimental-isolated-subinterpreters, + AS_HELP_STRING([--with-experimental-isolated-subinterpreters], + [better isolate subinterpreters, experimental build mode (default is no)]), +[ +if test "$withval" != no +then + AC_MSG_RESULT(yes); + AC_DEFINE(EXPERIMENTAL_ISOLATED_SUBINTERPRETERS) +else + AC_MSG_RESULT(no); +fi], +[AC_MSG_RESULT(no)]) + # generate output files AC_CONFIG_FILES(Makefile.pre Misc/python.pc Misc/python-embed.pc Misc/python-config.sh) diff --git a/pyconfig.h.in b/pyconfig.h.in index 75ac368aadafec..c06c4958726c0f 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -38,6 +38,9 @@ /* Define if --enable-ipv6 is specified */ #undef ENABLE_IPV6 +/* Better isolate subinterpreters, experimental build mode. */ +#undef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS + /* Define to 1 if your system stores words within floats with the most significant word first */ #undef FLOAT_WORDS_BIGENDIAN